o
    Zh%                     @   s  d dl Z d dlmZ d dlZd dlmZmZ ddlmZ ee	Z
G dd dejZG dd	 d	ejZG d
d dejZG dd dejZG dd dejZG dd dejZG dd dejZG dd dejZG dd dejZG dd dejZG dd dejZG dd deZi deded d!d"fd#ed$ed%ed&d'ifd(ed)ed*ed+ejd,ed-ed.ed/ejd0ed1ejd2ejd3ejejejejd4ZeeZ d5d6 Z!e!d%Z"e!d$Z#e!dZ$e!d#Z%e!d.Z&e!d3Z'e!d-Z(e!d,Z)dS )7    N)OrderedDict)Tensornn   )loggingc                   @   "   e Zd ZdZdedefddZdS )PytorchGELUTanha  
    A fast C implementation of the tanh approximation of the GeLU activation function. See
    https://arxiv.org/abs/1606.08415.

    This implementation is equivalent to NewGELU and FastGELU but much faster. However, it is not an exact numerical
    match due to rounding errors.
    inputreturnc                 C   s   t jj|ddS )Ntanh)Zapproximate)r   
functionalgeluselfr	    r   G/var/www/auris/lib/python3.10/site-packages/transformers/activations.pyforward$   s   zPytorchGELUTanh.forwardN__name__
__module____qualname____doc__r   r   r   r   r   r   r      s    r   c                   @   r   )NewGELUActivationz
    Implementation of the GELU activation function currently in Google BERT repo (identical to OpenAI GPT). Also see
    the Gaussian Error Linear Units paper: https://arxiv.org/abs/1606.08415
    r	   r
   c                 C   s6   d| dt tdtj |dt |d     S )N      ?      ?       @Hm?g      @)torchr   mathsqrtpipowr   r   r   r   r   .   s   6zNewGELUActivation.forwardNr   r   r   r   r   r   (   s    r   c                       sL   e Zd ZdZddef fddZdedefdd	Zdedefd
dZ  Z	S )GELUActivationa  
    Original Implementation of the GELU activation function in Google BERT repo when initially created. For
    information: OpenAI GPT's GELU is slightly different (and gives slightly different results): 0.5 * x * (1 +
    torch.tanh(math.sqrt(2 / math.pi) * (x + 0.044715 * torch.pow(x, 3)))) This is now written in C in nn.functional
    Also see the Gaussian Error Linear Units paper: https://arxiv.org/abs/1606.08415
    Fuse_gelu_pythonc                    s(   t    |r| j| _d S tjj| _d S N)super__init___gelu_pythonactr   r   r   )r   r#   	__class__r   r   r&   :   s   
zGELUActivation.__init__r	   r
   c                 C   s    |d dt |td   S )Nr   r   r   )r   erfr   r   r   r   r   r   r'   A   s    zGELUActivation._gelu_pythonc                 C   
   |  |S r$   r(   r   r   r   r   r   D      
zGELUActivation.forward)F)
r   r   r   r   boolr&   r   r'   r   __classcell__r   r   r)   r   r"   2   s
    r"   c                   @   r   )FastGELUActivationz}
    Applies GELU approximation that is slower than QuickGELU but more accurate. See: https://github.com/hendrycks/GELUs
    r	   r
   c                 C   s*   d| dt |d dd| |     S )Nr   r   g3E?r   )r   r   r   r   r   r   r   M   s   *zFastGELUActivation.forwardNr   r   r   r   r   r1   H       r1   c                   @   r   )QuickGELUActivationzr
    Applies GELU approximation that is fast but somewhat inaccurate. See: https://github.com/hendrycks/GELUs
    r	   r
   c                 C   s   |t d|  S )NgZd;?)r   sigmoidr   r   r   r   r   V   s   zQuickGELUActivation.forwardNr   r   r   r   r   r3   Q   r2   r3   c                       s<   e Zd ZdZdedef fddZdedefdd	Z  ZS )
ClippedGELUActivationa  
    Clip the range of possible GeLU outputs between [min, max]. This is especially useful for quantization purpose, as
    it allows mapping negatives values in the GeLU spectrum. For more information on this trick, please refer to
    https://arxiv.org/abs/2004.09602.

    Gaussian Error Linear Unit. Original Implementation of the gelu activation function in Google Bert repo when
    initially created.

    For information: OpenAI GPT's gelu is slightly different (and gives slightly different results): 0.5 * x * (1 +
    torch.tanh(math.sqrt(2 / math.pi) * (x + 0.044715 * torch.pow(x, 3)))). See https://arxiv.org/abs/1606.08415
    minmaxc                    s8   ||krt d| d| dt   || _|| _d S )Nzmin should be < max (got min: z, max: ))
ValueErrorr%   r&   r6   r7   )r   r6   r7   r)   r   r   r&   g   s
   

zClippedGELUActivation.__init__xr
   c                 C   s   t t|| j| jS r$   )r   Zclipr   r6   r7   )r   r:   r   r   r   r   o      zClippedGELUActivation.forward)	r   r   r   r   floatr&   r   r   r0   r   r   r)   r   r5   Z   s    r5   c                       s2   e Zd ZdZ fddZdedefddZ  ZS )AccurateGELUActivationz
    Applies GELU approximation that is faster than default and more accurate than QuickGELU. See:
    https://github.com/hendrycks/GELUs

    Implemented along with MEGA (Moving Average Equipped Gated Attention)
    c                    s    t    tdtj | _d S )N   )r%   r&   r   r   r    precomputed_constantr   r)   r   r   r&   {   s   
zAccurateGELUActivation.__init__r	   r
   c                 C   s,   d| dt | j|dt |d     S )Nr   r   r      )r   r   r?   r!   r   r   r   r   r      s   ,zAccurateGELUActivation.forward)r   r   r   r   r&   r   r   r0   r   r   r)   r   r=   s   s    r=   c                       sD   e Zd ZdZ fddZdedefddZdedefdd	Z  ZS )
MishActivationz
    See Mish: A Self-Regularized Non-Monotonic Activation Function (Misra., https://arxiv.org/abs/1908.08681). Also
    visit the official repository for the paper: https://github.com/digantamisra98/Mish
    c                    s   t    tjj| _d S r$   )r%   r&   r   r   mishr(   r@   r)   r   r   r&      s   
zMishActivation.__init__r	   r
   c                 C   s   |t tj| S r$   )r   r   r   r   Zsoftplusr   r   r   r   _mish_python   r;   zMishActivation._mish_pythonc                 C   r,   r$   r-   r   r   r   r   r      r.   zMishActivation.forward)	r   r   r   r   r&   r   rD   r   r0   r   r   r)   r   rB      s
    rB   c                   @   r   )LinearActivationz[
    Applies the linear activation function, i.e. forwarding input directly to output.
    r	   r
   c                 C   s   |S r$   r   r   r   r   r   r      s   zLinearActivation.forwardNr   r   r   r   r   rE      r2   rE   c                   @   s   e Zd ZdZdddZdS )LaplaceActivationz
    Applies elementwise activation based on Laplace function, introduced in MEGA as an attention activation. See
    https://arxiv.org/abs/2209.10655

    Inspired by squared relu, but with bounded range and gradient for better stability
    绹۞? ^/?c                 C   s*   ||  |td }ddt|  S )Nr   r   r   )divr   r   r   r+   )r   r	   musigmar   r   r   r      s   zLaplaceActivation.forwardN)rG   rH   r   r   r   r   r   r   r   r   r   rF      s    rF   c                   @   s   e Zd ZdZdd ZdS )ReLUSquaredActivationzX
    Applies the relu^2 activation introduced in https://arxiv.org/abs/2109.08668v2
    c                 C   s   t j|}t|}|S r$   )r   r   relur   Zsquare)r   r	   Zrelu_appliedZsquaredr   r   r   r      s   
zReLUSquaredActivation.forwardNrL   r   r   r   r   rM      s    rM   c                       s   e Zd Z fddZ  ZS )ClassInstantierc                    s4   t  |}t|tr|n|i f\}}|di |S )Nr   )r%   __getitem__
isinstancetuple)r   keycontentclskwargsr)   r   r   rP      s   zClassInstantier.__getitem__)r   r   r   rP   r0   r   r   r)   r   rO      s    rO   r   Zgelu_10i
   )r6   r7   	gelu_fastgelu_newgelu_pythonr#   TZgelu_pytorch_tanhZgelu_accurateZlaplaceZ
leaky_reluZlinearrC   
quick_gelurN   Zrelu2Zrelu6r4   silu)Zswishr   Zpreluc                 C   s,   | t v rt |  S td|  dtt   )Nz	function z not found in ACT2FN mapping )ACT2FNKeyErrorlistkeys)Zactivation_stringr   r   r   get_activation   s   ra   )*r   collectionsr   r   r   r   utilsr   Z
get_loggerr   loggerModuler   r   r"   r1   r3   r5   r=   rB   rE   rF   rM   rO   Z	LeakyReLUZReLUZReLU6ZSigmoidZSiLUZTanhZPReLUZACT2CLSr]   ra   rZ   rY   r   rX   r[   r\   rC   Z
linear_actr   r   r   r   <module>   s   

				
