a
    h3/                     @   s   d dl Z d dlmZ d dlZd dlm  mZ dd Zdd Z	dd Z
d	d
 Zdd Ze jdd Zdd Zdd Zdd Zdd Zdd ZdS )    N)
namedtuplec                    s   t   fdd}|S )Nc               
      s    dr$dj}|| i |S  ds8 dr drFdnd}|dkrVdnd}|j}td| d d| d| d	 | i |S )	Nautogradsave_for_backwardbackwardzWe found a 'z' registration for  at z but were unable to find a 'z' registration. To use the CustomOp API to register a backward formula, please provide us both a backward function and a 'save for backward' function via `impl_backward` and `impl_save_for_backward` respectively.)Z	_has_impl	_get_implfunclocationRuntimeError)argskwargskernelmissingfoundlocZautograd_fallback	custom_op G/var/www/auris/lib/python3.9/site-packages/torch/_custom_op/autograd.pyinner   s     
z*autograd_kernel_indirection.<locals>.inner)autograd_not_implemented)r   r   r   r   r   autograd_kernel_indirection   s    r   c                    s    fdd}|S )Nc                     s`   t  r$tdd | |fr$tdt j   | i |W  d    S 1 sR0    Y  d S )Nc                 S   s   t | tjo| jS N)
isinstancetorchTensorZrequires_gradxr   r   r   <lambda>6       z:autograd_not_implemented.<locals>.kernel.<locals>.<lambda>z.Autograd has not been implemented for operator)r   Zis_grad_enabledpytreeZtree_anyr
   _C_AutoDispatchBelowAutograd)r   r   r   r   r   r   4   s    z(autograd_not_implemented.<locals>.kernelr   )r   r   r   r#   r   r   3   s    r   c              	   C   s   |d urt |ts|f}n|}t|t|ks2J g }tt||D ]f\}\}}t |tjrl|sD|| qDt |tr|sD|	| qD|rDt
d| d| dt| dqD|r| j|  d S )NzWith output_differentiability=z	. At idx z , we received an object of type za that is not a Tensor, so it cannot have be marked as differentiable in output_differentiability.)r   tuplelen	enumeratezipr   r   appendlistextendr
   typemark_non_differentiable)ctxoutputoutput_differentiabilityZtuple_outputZnon_differentiable_tensorsidxZdifferentiableoutr   r   r   r,   ?   s6    



r,   c                    s    fdd}|S )Nc                     sp   t | \}d   fdd} fdd}tjd ||}|j| } d us`J t t| S )Nc                    s   |  d tt|}tj   | }W d    n1 sB0    Y  ttt	|}t|}||}t
| ||f t| | t|\}t|S )NT)Zset_materialize_gradsr    tree_unflattenr)   r   r!   r"   namedtuple_argstree_mapr+   save_pytree_for_backwardr,   tree_flattenr$   )r-   	flat_argsr   r.   	args_infoZsave_for_backward_fn_inputsZto_saveflat_output)op_overloadout_specr/   save_for_backward_fnschemaspecr   r   forwardn   s    
&

z9construct_autograd_kernel.<locals>.apply.<locals>.forwardc                    sf   d usJ t t|}t| \}}t }t|ts>|f} ||g|R  }t|| t||S r   )	r    r2   r)   unpack_savedobjectr   r$   validate_grad_inputs_dictgrad_inputs_dict_to_flat_tuple)r-   Zflat_grad_outputZgradsZsavedr8   Z	inner_ctxgrad_inputs_dict)backward_fnr   r;   r   r   r      s    
z:construct_autograd_kernel.<locals>.apply.<locals>.backwardZ	_customop)r    r6   gen_autograd_functionZ_opnameapplyr2   r)   )r   r7   r?   r   generated_clsr9   rE   r   r:   r/   r<   r=   )r;   r>   r   rG   j   s    
z(construct_autograd_kernel.<locals>.applyr   )r=   r/   r   r:   r<   rE   rG   r   rI   r   construct_autograd_kernelb   s    /rJ   c                 C   s$   t | tjjft|t|d}|S )N)r?   r   )r+   r   r   ZFunctionstaticmethod)namer?   r   rH   r   r   r   rF      s    rF   c                 C   s.   dd | j jD }t| jd }t||}|S )Nc                 S   s   g | ]
}|j qS r   )rL   .0argr   r   r   
<listcomp>   r   z'namedtuple_args_cls.<locals>.<listcomp>_args)	argumentsflat_allstrrL   r   )r=   ZattribsrL   	tuple_clsr   r   r   namedtuple_args_cls   s    
rV   c                 C   s   t |tsJ t| }|| S r   )r   r$   rV   )r=   r   rU   r   r   r   r3      s    r3   c                    s   fdd}t | ts(|dt|   dd  jjjD }|  }||krb|d| d| d |  D ]V\}}t||}t |t	rft |t
t	fs|d	| d
t| d t|t|ks|d	| dt| dt|  tt||D ]p\}	\}
}|
d u r
qt |
tjs6|d	| dt|
 d|	  t|tjs|d	| d|	 d|	 d|  qqj|d u rrqjt |tjs|dt| d| d t|tjsj|d| d| d| d qjd S )Nc                    s*     d}td  d|j d|  d S )Nr   z%In the backward function defined for r   z using the CustomOp API, )r   r
   r	   )whatr   
forward_opr   r   error   s    
z(validate_grad_inputs_dict.<locals>.errorzBexpected the output of the backward function to be a dict but got c                 S   s   h | ]}|j  r|jqS r   )r+   Zis_tensor_likerL   rM   r   r   r   	<setcomp>   s   
z,validate_grad_inputs_dict.<locals>.<setcomp>z3expected the returned grad_input dict to have keys z	 but got z. The backward function must return a gradient (can be None) for each arg to the CustomOp that may be a Tensor or Sequence[Tensor]. Args declared to be non-Tensor-like types should not appear in the grad_input dictzfor input 'zR' expected the grad_input dict to hold a list of gradients but got object of type .z1' expected the grad_input dict to hold a list of z gradients but got z\' expected the grad_input dict to hold a list of None or Tensor gradients but got object of z
 at index z(', got a Tensor as the gradient for the z(-th value but expected None because the z(-th value was not a Tensor (it was type zgot object of type z as the gradient for input 'z:', but expected the gradient to be either None or a Tensorz(got a Tensor as the gradient for input 'z3' but expected None as the gradient because input 'z ' was not a Tensor (it was type z).)r   dictr+   Z_schemarR   rS   keysitemsgetattrr)   r$   r%   r&   r'   r   r   
issubclass)rD   rY   r8   rZ   Zexpected_keysZactual_keysrL   Zgradarg_infor0   ginfor   rX   r   rB      s    
	


rB   c                 C   sV   g }|   D ]6\}}|| vr8|tdd | q|| |  qtt|S )Nc                 S   s   d S r   r   r   r   r   r   r     r   z0grad_inputs_dict_to_flat_tuple.<locals>.<lambda>)_asdictr_   r(   r    r4   r$   Ztree_leaves)rD   r8   resultrL   rb   r   r   r   rC     s    rC   c           	      C   s   t |\}}t|}dd t|D }dd t|D }dd |D }dd |D }|| _|| _| j|  || _|| _|| _	d S )Nc                 S   s    g | ]\}}t |tjr|qS r   r   r   r   rN   r0   thingr   r   r   rP     s   z,save_pytree_for_backward.<locals>.<listcomp>c                 S   s    g | ]\}}t |tjs|qS r   rg   rh   r   r   r   rP     s   c                 S   s   g | ]}t |tjr|qS r   rg   rN   ri   r   r   r   rP      r   c                 S   s   g | ]}t |tjs|qS r   rg   rj   r   r   r   rP   !  r   )
r    r6   r%   r&   r>   num_eltsr   tensor_idxssaved_non_tensorsnon_tensor_idxs)	r-   stuff
flat_stuffr>   rk   rl   rn   ZtensorsZnon_tensorsr   r   r   r5     s     
r5   c                 C   s^   d g| j  }t| j| jD ]\}}|||< qt| j| jD ]\}}|||< q:t|| j}|S r   )	rk   r'   Zsaved_tensorsrl   rm   rn   r    r2   r>   )r-   rp   Ztensorr0   Z
non_tensorro   r   r   r   r@   ,  s    

r@   )	functoolscollectionsr   r   Ztorch.utils._pytreeutilsZ_pytreer    r   r   r,   rJ   rF   	lru_cacherV   r3   rB   rC   r5   r@   r   r   r   r   <module>   s    #:
P