o
    Zh                     @   s   d Z ddlmZ ddlmZ ddlmZmZm	Z	 ddl
mZ eeZd	Zd
ejdededejfddZG dd de	ZG dd deZG dd deZg dZdS )zFlax mT5 model.    N   )logging   )FlaxT5EncoderModelFlaxT5ForConditionalGenerationFlaxT5Model   )	MT5ConfigZT5Config	input_idspad_token_iddecoder_start_token_idreturnc                 C   sd   t | }|jddddf | ddddf }|jdddf |}t |dk||}|S )z1
    Shift input ids one token to the right.
    Nr   r   i)jnpZ
zeros_likeatsetwhere)r
   r   r   Zshifted_input_ids r   X/var/www/auris/lib/python3.10/site-packages/transformers/models/mt5/modeling_flax_mt5.pyshift_tokens_right   s
   
,r   c                   @      e Zd ZdZdZeZdS )FlaxMT5Modela  
    This class overrides [`FlaxT5Model`]. Please check the superclass for the appropriate documentation alongside usage
    examples.

    Examples:

    ```python
    >>> from transformers import FlaxMT5Model, AutoTokenizer

    >>> model = FlaxMT5Model.from_pretrained("google/mt5-small")
    >>> tokenizer = AutoTokenizer.from_pretrained("google/mt5-small")

    >>> article = "UN Offizier sagt, dass weiter verhandelt werden muss in Syrien."
    >>> summary = "Weiter Verhandlung in Syrien."
    >>> inputs = tokenizer(article, return_tensors="np")

    >>> decoder_input_ids = tokenizer(text_target=summary, return_tensors="np").input_ids

    >>> outputs = model(input_ids=inputs["input_ids"], decoder_input_ids=decoder_input_ids)
    >>> hidden_states = outputs.last_hidden_state
    ```mt5N__name__
__module____qualname____doc__Z
model_typer	   Zconfig_classr   r   r   r   r   *       r   c                   @   r   )FlaxMT5EncoderModela	  
    This class overrides [`FlaxT5EncoderModel`]. Please check the superclass for the appropriate documentation
    alongside usage examples.

    Examples:

    ```python
    >>> from transformers import FlaxT5EncoderModel, AutoTokenizer

    >>> model = FlaxT5EncoderModel.from_pretrained("google/mt5-small")
    >>> tokenizer = AutoTokenizer.from_pretrained("google/mt5-small")

    >>> article = "UN Offizier sagt, dass weiter verhandelt werden muss in Syrien."
    >>> summary = "Weiter Verhandlung in Syrien."
    >>> inputs = tokenizer(article, return_tensors="np")

    >>> decoder_input_ids = tokenizer(text_target=summary, return_tensors="np").input_ids

    >>> outputs = model(input_ids=inputs["input_ids"])
    >>> hidden_states = outputs.last_hidden_state
    ```r   Nr   r   r   r   r   r   E   r   r   c                   @   r   )FlaxMT5ForConditionalGenerationa-  
    This class overrides [`FlaxT5ForConditionalGeneration`]. Please check the superclass for the appropriate
    documentation alongside usage examples.

    Examples:

    ```python
    >>> from transformers import FlaxMT5ForConditionalGeneration, AutoTokenizer

    >>> model = FlaxMT5ForConditionalGeneration.from_pretrained("google/mt5-small")
    >>> tokenizer = AutoTokenizer.from_pretrained("google/mt5-small")

    >>> article = "UN Offizier sagt, dass weiter verhandelt werden muss in Syrien."
    >>> summary = "Weiter Verhandlung in Syrien."
    >>> inputs = tokenizer(article, return_tensors="np")

    >>> decoder_input_ids = tokenizer(text_target=summary, return_tensors="np").input_ids

    >>> outputs = model(**inputs, decoder_input_ids=decoder_input_ids)
    >>> logits = outputs.logits
    ```r   Nr   r   r   r   r   r    `   r   r    )r   r    r   )r   Z	jax.numpynumpyr   utilsr   Zt5.modeling_flax_t5r   r   r   Zconfiguration_mt5r	   Z
get_loggerr   loggerZ_CONFIG_FOR_DOCZndarrayintr   r   r   r    __all__r   r   r   r   <module>   s   
