o
    Zh'J                     @   sZ  d dl Z d dlZd dlZd dlZd dlZd dlmZ d dlmZmZm	Z	m
Z
 d dlZd dlmZ d dlmZmZmZmZmZ d dlmZmZmZmZmZ e
eef Ze
eeedf f Zdd	 Zd
ee	e  dedefddZde
eeedf f defddZ dededeg e!f defddZ"dedededeeee ee ef fddZ#d
ee dee dedefddZ$dd  Z%de
eeedf f d!eded"ededefd#d$Z&d%d& Z'd!ededdfd'd(Z(defd)d*Z)d+a*e+ Z,da-d,d- Z.d.d/ Z/d0d1 Z0d2d3 Z1d4d5 Z2d6d7 Z3d8d9 Z4d:d; Z5e j6d<d= Z7d>d? Z8d@dA Z9dBdC Z:dDdE Z;dS )F    N)partial)AnyCallableOptionalUnion)Tensor)_add_batch_dim_remove_batch_dim_vmap_decrement_nesting_vmap_increment_nestingis_batchedtensor)_broadcast_to_and_flattentree_flatten	tree_map_tree_unflattenTreeSpec.c                    s    dt   fdd}|S )Nzvtorch.func.{grad, vjp, jacrev, hessian} don't yet support saved tensor hooks. Please open an issue with your use case.c                     s@   t jj  | i |W  d    S 1 sw   Y  d S N)torchZautogradgraphZdisable_saved_tensors_hooks)argskwargsfmessage D/var/www/auris/lib/python3.10/site-packages/torch/_functorch/vmap.pyfn-   s   $z.doesnt_support_saved_tensors_hooks.<locals>.fn)	functoolswraps)r   r   r   r   r   "doesnt_support_saved_tensors_hooks'   s
   r   flat_in_dims	flat_argsreturnc                    sZ   dd t | |D  t dkrtd r)t fdd D r)td  d d S )	Nc                 S   s"   g | ]\}}|d ur| |qS r   )size.0in_dimargr   r   r   
<listcomp>9   s
    z0_validate_and_get_batch_size.<locals>.<listcomp>r   z/vmap: Expected at least one Tensor to vmap overc                 3   s    | ]	}| d  kV  qdS )r   Nr   )r%   r#   Zbatch_sizesr   r   	<genexpr>@   s    z/_validate_and_get_batch_size.<locals>.<genexpr>zTvmap: Expected all tensors to have the same size in the mapped dimension, got sizes z for the mapped dimension)ziplen
ValueErrorany)r    r!   r   r)   r   _validate_and_get_batch_size6   s   r/   batched_outputsc                 C   s   t | tr	t| S dS )N   )
isinstancetupler,   )r0   r   r   r   _num_outputsH   s   
r4   valuenum_elementserror_message_lambdac                 C   s.   t | ts
| f| S t| |krt| | S r   )r2   r3   r,   r-   )r5   r6   r7   r   r   r   	_as_tupleR   s
   


r8   in_dimsr   funcc           	      C   s  t | tst | tstdt| d|  dt|  dt|dkr,tdt| dt|\}}t| |}|d u rRtdt| d|  dt| d  d	| d	t	t
||D ]~\}\}}t |tsx|d urxtdt| d|  d
| dt |trt |tstdt| d|  d
| dt| d	|d ur||  k s|| krtdt| d|  d
| d|  d|  d|  d|d ur|dk r||  ||< qYt|||||fS )Nvmap(z
, in_dims=zv, ...)(<inputs>): expected `in_dims` to be int or a (potentially nested) tuple matching the structure of inputs, got: .r   z)(<inputs>): got no inputs. Maybe you forgot to add inputs, or you are trying to vmap over a function with no inputs. The latter is unsupported.zb, ...)(<inputs>): in_dims is not compatible with the structure of `inputs`. in_dims has structure r1   z but inputs has structure z, ...)(<inputs>): Got in_dim=zE for an input but in_dim must be either an integer dimension or None.z' for an input but the input is of type zT. We cannot vmap over non-Tensor arguments, please use None as the respective in_dimz> for some input, but that input is a Tensor of dimensionality z  so expected in_dim to satisfy -z <= in_dim < )r2   intr3   r-   	_get_nametyper,   r   r   	enumerater+   r   dimr/   )	r9   r   r:   r!   	args_specr    ir'   r&   r   r   r   _process_batched_inputs\   sn   

"rD   
vmap_levelc                    s"    fddt | |D }t||S )Nc                    s(   g | ]\}}|d u r|nt || qS r   )r   r$   rE   r   r   r(      s    z*_create_batched_inputs.<locals>.<listcomp>)r+   r   )r    r!   rE   rB   batched_inputsr   rF   r   _create_batched_inputs   s   

rH   c                 C   sp   |d u rt |tjrt|rtd|  d|  d|S t |tjs1td|  d|  dt| dt||||S )Nr;   z	, ...): `z5` can not return a BatchedTensor when out_dim is Nonez%` must only return Tensors, got type z3. Did you mean to set out_dims= to None for output?)r2   r   r   r   r-   r?   r	   )namebatched_outputrE   
batch_sizeout_dimr   r   r   _maybe_remove_batch_dim   s   rM   out_dimsrK   c           	         s   t | \}fdd}t| tjr7ttrg}n&ttr+tdkr+}nd u r3g}n|  nt}|d u rC|   fddt||D }t	|S )Nc                
      s.   t dt  d dtd  d d	)Nr;   , ..., out_dims=z`)(<inputs>): out_dims is not compatible with the structure of `outputs`. out_dims has structure r1   z but outputs has structure r<   )r-   r>   r   r   )r:   rN   output_specr   r   incompatible_error   s   
z+_unwrap_batched.<locals>.incompatible_errorr1   c                    s$   g | ]\}}t t| |qS r   )rM   r>   )r%   rJ   rL   )rK   r:   rE   r   r   r(      s    z#_unwrap_batched.<locals>.<listcomp>)
r   r2   r   r   r=   r3   r,   r   r+   r   )	r0   rN   rE   rK   r:   Zflat_batched_outputsrQ   flat_out_dimsZflat_outputsr   )rK   r:   rN   rP   rE   r   _unwrap_batched   s"   


rS   c                 C   s4   t | trd S | d u rd S tdt| d| d)Nr;   rO   z): `out_dims` must be an int, None or a python collection of ints representing where in the outputs the vmapped dimension should appear.)r2   r=   r-   r>   )xr:   rN   r   r   r   _check_int_or_none   s   
rU   c                 C   s&   t | trd S ttt|| d|  d S )N)r:   rN   )r2   r=   r   r   rU   )rN   r:   r   r   r   $_check_out_dims_is_int_or_int_pytree   s   
rV   c                 C   s6   t | dr| jS t| tjrdt| j dS t| S )N__name__zfunctools.partial(z, ...))hasattrrW   r2   r   r   r>   r:   repr)r:   r   r   r   r>      s
   
r>   Fc                     s  t rd S t t r	 W d    d S tjdddkrnda 	 W d    d S tjdddaddl	m
   fd	d
} | tjjjj | tjjjj | tjjjj | tjjjj | tjjjj | tjjjj | tjjjj | tjjjj da W d    d S 1 sw   Y  d S )NZPYTORCH_JIT1TatenZIMPLZFuncTorchBatchedr   decomposition_tablec                    s*   |  v rt |  |   d S td|  )Nz!could not find decomposition for )VMAP_DECOMPOSITIONS_LIBimplRuntimeError)decompr\   r   r   #_register_python_decomposition_vmap$  s   zElazy_load_decompositions.<locals>._register_python_decomposition_vmap)DECOMPOSITIONS_LOADEDDECOMPOSITIONS_LOCKosenvirongetr   ZlibraryLibraryr^   Ztorch._decompr]   opsr[   Zmse_loss_backwarddefaultZsmooth_l1_loss_backwardZhuber_loss_backwardZnll_loss_forwardZnll_loss2d_forwardZnll_loss_backwardZnll_loss2d_backwardaddr)rb   r   r\   r   lazy_load_decompositions  s6   
"rl   c                 O   sp   t   t||  t||| \}}}	}
|d ur*t|	|||}t| |||
||fi |S t| |||	|
||fi |S r   )rl   rV   rD   _get_chunked_inputs_chunked_vmap
_flat_vmap)r:   r9   rN   
randomness
chunk_sizer   r   rK   r    r!   rB   chunks_flat_argsr   r   r   	vmap_impl8  s<   
rs   c                 C   s4   | |  }}|g| }| | }|dkr| | |S )Nr   )append)Ztotal_elemsrq   Zn_chunkschunk_sizes	remainderr   r   r   get_chunk_sizesZ  s   

rw   c                    sN   |f |d urt ||}tt| t fddt| |D }t| }|S )Nc                 3   s8    | ]\}}|d ur|j  |dn|gt  V  qd S N)rA   )Ztensor_splitr,   )r%   tr&   Z
split_idxsr   r   r*   j  s    	
z&_get_chunked_inputs.<locals>.<genexpr>)rw   r3   	itertools
accumulater+   )r!   r    rK   rq   ru   Zflat_args_chunksrr   r   rz   r   rm   d  s   
	rm   c                 C   sH   g }d }| D ]}t |\}}|| |d u r|}qtt| }||fS r   )r   rt   listr+   )Zchunks_output_Zflat_chunks_outputarg_specoutputflat_outputZ	arg_specsflat_output_chunksr   r   r   _flatten_chunks_output|  s   
r   c                 C   sX   t | |}t|t|ksJ g }t|D ]\}}|tj|| |d d ||< q|S rx   )r   r,   r@   rt   r   cat)rN   r~   r   rR   r   idxrL   r   r   r   _concat_chunked_outputs  s   

r   c                 K   s   g }|dkr
t  nd }|D ]&}	t||	}
|
dkrq|d ur#t | |t| |
||	|||fi | qt|\}}~t|||}t||S )Nsamer   )	r   Zget_rng_stater/   Zset_rng_statert   ro   r   r   r   )r:   r    rr   rB   rN   rp   r   Zchunks_outputrsr!   rK   r   r~   r   r   r   r   rn     s2   


rn   c                 C   s   | dvrt d|  d S )N)errorZ	differentr   zLOnly allowed values for randomness are 'error', 'different', or 'same'. Got )r`   )rp   r   r   r   _check_randomness_arg  s
   r   c                 c   s(    zt | |}|V  W t  d S t  w r   )r   r
   )rK   rp   rE   r   r   r   vmap_increment_nesting  s
   
r   c                 K   sZ   t ||}t||||}	| |	i |}
t|
|||| W  d    S 1 s&w   Y  d S r   )r   rH   rS   )r:   rK   r    r!   rB   rN   rp   r   rE   rG   r0   r   r   r   ro     s   $ro   c                    s    fdd}|S )Nc                     sR   t  }t| |}|i |}t||W  d    S 1 s"w   Y  d S r   )r   wrap_batchedunwrap_batched)r   r   rE   rG   r0   rK   r:   r9   rp   r   r   inner  s
   $zrestore_vmap.<locals>.innerr   )r:   r9   rK   rp   r   r   r   r   restore_vmap  s   r   c                 C   s4   t | \}}t||}|d usJ t||||}|S r   )r   r   rH   )r   bdimslevelr!   specZ
flat_bdimsresultr   r   r   r     s
   
r   c                    sR   t | \}}t|dkr| dfS  fdd|D }t| \}}t||t||fS )Nr   r   c                    s0   g | ]}t |tjrtjj| n|d fqS r   )r2   r   r   Z_CZ
_functorchrS   )r%   r'   r   r   r   r(     s    
z"unwrap_batched.<locals>.<listcomp>)r   r,   r+   r   )r   r   r!   r   r   r   r   r   r   r   r     s   
r   )<
contextlibr   r{   re   	threadingr   typingr   r   r   r   r   r   Ztorch._C._functorchr   r	   r
   r   r   Ztorch.utils._pytreer   r   r   r   r   r=   r3   Z	in_dims_tZ
out_dims_tr   r}   r/   r4   strr8   rD   rH   rM   rS   rU   rV   r>   rc   Lockrd   r^   rl   rs   rw   rm   r   r   rn   r   contextmanagerr   ro   r   r   r   r   r   r   r   <module>   s   	

"




>

*,"
2

