
    [Th                        S SK r S SKJrJrJr  S SKrS SKrS SKJr  S SK	J
r
  S SKJr  S SKJr  S SKJrJr  SS	/r\" S
S9 " S S\5      5       r SS\R(                  S\4S jjr\" S
S9 " S S	\R,                  R.                  5      5       rg)    N)Any
NamedTupleOptional)enable_python_dispatcher)detect_fake_mode)is_sparse_any)compatibility)map_aggregateNodeTensorMetadata	ShapePropT)is_backward_compatiblec                       \ rS rSr% \R
                  \S'   \R                  \S'   \\S'   \	\
S4   \S'   \\R                     \S'   \\S'   \\\4   \S	'   S
rg)r      shapedtyperequires_grad.stridememory_formatis_quantizedqparams N)__name__
__module____qualname____firstlineno__torchSize__annotations__r   booltupleintr   r   dictstrr   __static_attributes__r       R/var/www/auris/envauris/lib/python3.13/site-packages/torch/fx/passes/shape_prop.pyr   r      sS     ::;;#s(OE//00 #s(^r&   resultreturnc           	      t   U R                   nU R                  nU R                  n[        U 5      (       d  U R	                  5       OSnSnU(       a`  [        U 5      (       dP  [
        R                  [
        R                  [
        R                  1nU H  nU R                  US9(       d  M  Un  O   U R                  n	0 n
U	(       a  U R                  5       nXS'   U[
        R                  [
        R                  1;   a'  U R                  5       U
S'   U R                  5       U
S'   OU[
        R                   [
        R"                  [
        R$                  1;   aU  U R'                  5       R)                  5       U
S'   U R+                  5       R)                  5       U
S'   U R-                  5       U
S'   [/        X#XEXiU
5      $ )z:
Extract a TensorMetadata NamedTuple describing `result`.
r   N)r   qschemescale
zero_pointaxis)r   r   r   r   r   r   contiguous_formatchannels_lastchannels_last_3dis_contiguousr   r+   per_tensor_affineper_tensor_symmetricq_scaleq_zero_pointper_channel_affine per_channel_affine_float_qparamsper_channel_symmetricq_per_channel_scalestolistq_per_channel_zero_pointsq_per_channel_axisr   )r(   include_contiguityr   r   r   r   r   memory_formatsquery_formatr   r   r+   s               r'   _extract_tensor_metadatarA   #   s    LLELLE((M$1&$9$9V]]_rFM-"7"7##""

 +L##,#?? , +
 &&L G.."$	u..0J0JKK%~~/GG$*$7$7$9GL!$$22''
 
  &::<CCEGG$*$D$D$F$M$M$OGL!$779GFOm]' r&   c                   T   ^  \ rS rSrSrS	U 4S jjrS\S\4U 4S jjrU 4S jr	Sr
U =r$ )
r   T   a  
Execute an FX graph Node-by-Node and
record the shape and type of the result
into the corresponding node.

Example:
     In this example, we record the shape
     and data type of a module given
     an example input ``torch.randn(50, D_in)``.
     We print the name, shape and dtype of each node.

    class TwoLayerNet(torch.nn.Module):
        def __init__(self, D_in, H, D_out):
            super().__init__()
            self.linear1 = torch.nn.Linear(D_in, H)
            self.linear2 = torch.nn.Linear(H, D_out)
        def forward(self, x):
            h_relu = self.linear1(x).clamp(min=0)
            y_pred = self.linear2(h_relu)
            return y_pred
    N, D_in, H, D_out = 64, 1000, 100, 10
    x = torch.randn(N, D_in)
    y = torch.randn(N, D_out)
    model = TwoLayerNet(D_in, H, D_out)
    gm = torch.fx.symbolic_trace(model)
    sample_input = torch.randn(50, D_in)
    ShapeProp(gm).propagate(sample_input)

    for node in gm.graph.nodes:
        print(node.name, node.meta['tensor_meta'].dtype,
            node.meta['tensor_meta'].shape)

    The output of this code is:

    x torch.float32 torch.Size([50, 1000])
    linear1 torch.float32 torch.Size([50, 100])
    clamp_1 torch.float32 torch.Size([50, 100])
    linear2 torch.float32 torch.Size([50, 10])
    output torch.float32 torch.Size([50, 10])

Args:
     module (GraphModule): The module to be executed
     fake_mode (FakeTensorMode): A fake mode for copying the gm

c                    > [         TU ]  U5        Uc
  [        5       nUb%  SSKJn  U" U R
                  U5      U l        X l        OS U l        S U l        U R
                  U l        g )Nr   )deepcopy_to_fake_tensor)	super__init__r   torch._dynamo.utilsrE   modulefake_module	fake_modereal_module)selfgmrK   rE   	__class__s       r'   rG   ShapeProp.__init__   s\    (*I C  7t{{IND&N#D!DN;;r&   nr)   c                   >^
 SSK JnJn   U R                  b  U R                  U l         U R
                  bU  U R
                     [        5          [        TU ]!  U5      nU" U R
                  R                  X5        S S S 5        S S S 5        O[        TU ]!  U5      nU R                  U l        Sm
U
4S jn[#        WU5      nT
(       a  XqR                   S'   U R
                  (       a:  U R
                  R                  =n(       a  U" X5      =n	(       a  XR                   S'   [%        U5      UR                   S	'   U$ ! , (       d  f       N= f! , (       d  f       N= f! U R                  U l        f = f! [         aD  n[        R                  " 5         [        SUR                  5        SUR                    35      UeS nAff = f)
Nr   )compute_unbacked_bindingsrebind_unbackedzShapeProp error for: node=z with meta=Fc                 `   > [        U [        R                  5      (       a  Sm[        U 5      $ U $ )NT)
isinstancer   TensorrA   )objfound_tensors    r'   extract_tensor_meta/ShapeProp.run_node.<locals>.extract_tensor_meta   s)    #u||,,#/44
r&   tensor_metaunbacked_bindingstype)%torch.fx.experimental.symbolic_shapesrS   rT   rJ   rI   rK   r   rF   run_node	shape_envrL   	Exception	traceback	print_excRuntimeErrorformat_nodemetar
   r^   )rM   rQ   rS   rT   r(   erZ   rg   ra   symbol_to_pathrY   rO   s             @r'   r`   ShapeProp.run_node   sn   	

	+ #../>>-)A)C!&!1!!4'(@(@!L *D #W-a0F".. 	 V%89$(FF=!>>!^^555	5";I"NNN.<*+fvE *D)C #.. 	!,Q]]_,=[Q	s^   E3 E E-D;<EE E3 ;
E		E
EE E00E3 3
G=?F<<Gc                    > U R                   bM  U Vs/ s H?  n[        U[        R                  5      (       a  U R                   R	                  U5      OUPMA     nnOUn[
        TU ]  " U6 $ s  snf )z
Run `module` via interpretation and return the result and
record the shape and type of each node.

Args:
    *args (Tensor): the sample input.

Returns:
    Any: The value returned from executing the Module
)rK   rV   r   rW   from_tensorrF   run)rM   argst	fake_argsrO   s       r'   	propagateShapeProp.propagate   ss     >>% A 2<Au||1L1L**1-RSS  I
 Iw{I&&s   AA+)rK   rJ   rI   rL   )N)r   r   r   r   __doc__rG   r   r   r`   rq   r%   __classcell__)rO   s   @r'   r   r   T   s,    ,\'0/$ /3 /b' 'r&   )T)rc   typingr   r   r   r   torch.fxtorch._dispatch.pythonr   torch._guardsr   torch._subclasses.meta_utilsr   torch.fx._compatibilityr	   torch.fx.noder
   r   __all__r   rW   rA   fxInterpreterr   r   r&   r'   <module>r      s     , ,   ; * 6 1 - [
) d+Z  ," .2.LL..b d+J'$$ J' ,J'r&   