o
    Zh*.                     @   s   d dl Z d dlm  mZ d dlmZ d dlZdd Zdd Z	dd Z
d	d
 Zdd Zejdd Zdd Zdd Zdd Zdd Zdd ZdS )    N)
namedtuplec                    s   t   fdd}|S )Nc               
      s    drdj}|| i |S  ds drD dr#dnd}|dkr+dnd}|j}td| d d| d| d	 | i |S )	Nautogradsave_for_backwardbackwardzWe found a 'z' registration for  at z but were unable to find a 'z' registration. To use the CustomOp API to register a backward formula, please provide us both a backward function and a 'save for backward' function via `impl_backward` and `impl_save_for_backward` respectively.)Z	_has_impl	_get_implfunclocationRuntimeError)argskwargskernelmissingfoundlocZautograd_fallback	custom_op H/var/www/auris/lib/python3.10/site-packages/torch/_custom_op/autograd.pyinner   s"   
z*autograd_kernel_indirection.<locals>.inner)autograd_not_implemented)r   r   r   r   r   autograd_kernel_indirection   s   r   c                    s    fdd}|S )Nc                     s`   t  rtdd | |frtdt j   | i |W  d    S 1 s)w   Y  d S )Nc                 S   s   t | tjo| jS N)
isinstancetorchTensorZrequires_gradxr   r   r   <lambda>4       z:autograd_not_implemented.<locals>.kernel.<locals>.<lambda>z.Autograd has not been implemented for operator)r   Zis_grad_enabledpytreeZtree_anyr
   _C_AutoDispatchBelowAutograd)r   r   r   r   r   r   2   s   $z(autograd_not_implemented.<locals>.kernelr   )r   r   r   r#   r   r   1   s   r   c              	   C   s   |d ur_t |ts|f}n|}t|t|ksJ g }tt||D ]3\}\}}t |tjr6|s5|| q"t |trC|sB|	| q"|rUt
d| d| dt| dq"|ra| j|  d S d S d S )NzWith output_differentiability=z	. At idx z , we received an object of type za that is not a Tensor, so it cannot have be marked as differentiable in output_differentiability.)r   tuplelen	enumeratezipr   r   appendlistextendr
   typemark_non_differentiable)ctxoutputoutput_differentiabilityZtuple_outputZnon_differentiable_tensorsidxZdifferentiableoutr   r   r   r,   <   s8   



r,   c                    s    fdd}|S )Nc                     sp   t | \}d   fdd} fdd}tjd ||}|j| } d us0J t t| S )Nc                    s   |  d tt|}tj   | }W d    n1 s!w   Y  ttt	|}t|}||}t
| ||f t| | t|\}t|S )NT)Zset_materialize_gradsr    tree_unflattenr)   r   r!   r"   namedtuple_argstree_mapr+   save_pytree_for_backwardr,   tree_flattenr$   )r-   	flat_argsr   r.   	args_infoZsave_for_backward_fn_inputsZto_saveflat_output)op_overloadout_specr/   save_for_backward_fnschemaspecr   r   forwardh   s   



z9construct_autograd_kernel.<locals>.apply.<locals>.forwardc                    sf   d usJ t t|}t| \}}t }t|ts|f} ||g|R  }t|| t||S r   )	r    r2   r)   unpack_savedobjectr   r$   validate_grad_inputs_dictgrad_inputs_dict_to_flat_tuple)r-   Zflat_grad_outputZgradssavedr8   Z	inner_ctxgrad_inputs_dict)backward_fnr   r;   r   r   r   |   s   

z:construct_autograd_kernel.<locals>.apply.<locals>.backwardZ	_customop)r    r6   gen_autograd_functionZ_opnameapplyr2   r)   )r   r7   r?   r   generated_clsr9   rF   r   r:   r/   r<   r=   )r;   r>   r   rH   d   s   
z(construct_autograd_kernel.<locals>.applyr   )r=   r/   r   r:   r<   rF   rH   r   rJ   r   construct_autograd_kernel\   s   .rK   c                 C   s$   t | tjjft|t|d}|S )N)r?   r   )r+   r   r   Functionstaticmethod)namer?   r   rI   r   r   r   rG      s   rG   c                 C   s.   dd | j jD }t| jd }t||}|S )Nc                 S   s   g | ]}|j qS r   )rN   .0argr   r   r   
<listcomp>   r   z'namedtuple_args_cls.<locals>.<listcomp>_args)	argumentsflat_allstrrN   r   )r=   attribsrN   	tuple_clsr   r   r   namedtuple_args_cls   s   
rY   c                 C   s   t |tsJ t| }|| S r   )r   r$   rY   )r=   r   rX   r   r   r   r3      s   r3   c                    s   fdd}t | ts|dt|   dd  jjjD }|  }||kr1|d| d| d |  D ]\}}t||}t |t	rt |t
t	fsW|d	| d
t| d t|t|ksp|d	| dt| dt|  tt||D ]6\}	\}
}|
d u rqwt |
tjs|d	| dt|
 d|	  t|tjs|d	| d|	 d|	 d|  qwq5|d u rq5t |tjs|dt| d| d t|tjs|d| d| d| d q5d S )Nc                    s&     d}td  d|j d|  )Nr   z%In the backward function defined for r   z using the CustomOp API, )r   r
   r	   )whatr   
forward_opr   r   error   s   
z(validate_grad_inputs_dict.<locals>.errorzBexpected the output of the backward function to be a dict but got c                 S   s   h | ]
}|j  r|jqS r   )r+   Zis_tensor_likerN   rO   r   r   r   	<setcomp>   s    z,validate_grad_inputs_dict.<locals>.<setcomp>z3expected the returned grad_input dict to have keys z	 but got z. The backward function must return a gradient (can be None) for each arg to the CustomOp that may be a Tensor or Sequence[Tensor]. Args declared to be non-Tensor-like types should not appear in the grad_input dictzfor input 'zR' expected the grad_input dict to hold a list of gradients but got object of type .z1' expected the grad_input dict to hold a list of z gradients but got z\' expected the grad_input dict to hold a list of None or Tensor gradients but got object of z
 at index z(', got a Tensor as the gradient for the z(-th value but expected None because the z(-th value was not a Tensor (it was type zgot object of type z as the gradient for input 'z:', but expected the gradient to be either None or a Tensorz(got a Tensor as the gradient for input 'z3' but expected None as the gradient because input 'z ' was not a Tensor (it was type z).)r   dictr+   Z_schemarT   rU   keysitemsgetattrr)   r$   r%   r&   r'   r   r   
issubclass)rE   r\   r8   r]   Zexpected_keysZactual_keysrN   Zgradarg_infor0   ginfor   r[   r   rB      sv   











rB   c                 C   sV   g }|   D ]\}}|| vr|tdd | q|| |  qtt|S )Nc                 S   s   d S r   r   r   r   r   r   r      s    z0grad_inputs_dict_to_flat_tuple.<locals>.<lambda>)_asdictrb   r(   r    r4   r$   Ztree_leaves)rE   r8   resultrN   re   r   r   r   rC      s   rC   c           	      C   s   t |\}}t|}dd t|D }dd t|D }dd |D }dd |D }|| _|| _| j|  || _|| _|| _	d S )Nc                 S   s    g | ]\}}t |tjr|qS r   r   r   r   rP   r0   thingr   r   r   rR          

z,save_pytree_for_backward.<locals>.<listcomp>c                 S   s    g | ]\}}t |tjs|qS r   rj   rk   r   r   r   rR      rm   c                 S   s   g | ]
}t |tjr|qS r   rj   rP   rl   r   r   r   rR          c                 S   s   g | ]
}t |tjs|qS r   rj   rn   r   r   r   rR     ro   )
r    r6   r%   r&   r>   num_eltsr   tensor_idxssaved_non_tensorsnon_tensor_idxs)	r-   stuff
flat_stuffr>   rp   rq   rs   ZtensorsZnon_tensorsr   r   r   r5      s   

r5   c                 C   s^   d g| j  }t| j| jD ]\}}|||< qt| j| jD ]\}}|||< qt|| j}|S r   )	rp   r'   Zsaved_tensorsrq   rr   rs   r    r2   r>   )r-   ru   Ztensorr0   Z
non_tensorrt   r   r   r   r@     s   

r@   )r   Ztorch.utils._pytreeutilsZ_pytreer    collectionsr   	functoolsr   r   r,   rK   rG   	lru_cacherY   r3   rB   rC   r5   r@   r   r   r   r   <module>   s     9
<