o
    Zh,                     @   s  d dl Z d dlZd dlZd dlmZ d dlmZmZmZm	Z	m
Z
 d dlZddlmZmZmZmZ ddlmZ ed d Zg d	Zd
ddddZdhZdhZddddddZedZG dd dZG dd dZG dd dZG dd dZ h d Z!d!d" Z"d#d$ Z#d%d& Z$d'd( Z%d)d* Z&dXd+d,Z'd-ee(ee) f fd.d/Z*d0d1 Z+d2ed3e,d-e-fd4d5Z.d6d7 Z/d8e	e)ee) f d-e-fd9d:Z0d;d< Z1dYd=d>Z2d?d@ Z3dAdB Z4dCdD Z5dEdF Z6dGdH Z7dIdJ Z8dKdL Z9dMdN Z:dOdP Z;dZdQdRZ<d[dSdTZ=d\ddddUdVdWZ>dS )]    N)Path)ListOptionalTupleUnionget_args   )MODELS_TO_PIPELINE#PIPELINE_TASKS_TO_SAMPLE_DOCSTRINGSPT_SAMPLE_DOCSTRINGS_prepare_output_docstrings)ModelOutputsrcZtransformers)zconfiguration_*.pyzmodeling_*.pyztokenization_*.pyzprocessing_*.pyzimage_processing_*_fast.pyzimage_processing_*.pyzfeature_extractor_*.py)Zimage_processing_autoZIMAGE_PROCESSOR_MAPPING_NAMES)Zfeature_extraction_autoZFEATURE_EXTRACTOR_MAPPING_NAMES)Zprocessing_autoZPROCESSOR_MAPPING_NAMES)configuration_autoCONFIG_MAPPING_NAMES)Zimage_processor_classZfeature_extractor_classZprocessor_classconfig_class
preprocessImageProcessorFastZOpenAIGPTConfigZXCLIPConfigZKosmos2ConfigZDonutSwinConfigZ	EsmConfig)Zopenaizx-clipZkosmos2ZdonutZesmfoldz*\[(.+?)\]\((https://huggingface\.co/.+?)\)c                   @   s   e Zd ZdddZdddZdddZdddZdddZdddZd	ddZ	d
ddZ
dddZdddZdddZdddZdddZdddZdddZdddZdddZdddZdS )ImageProcessorArgsz
    Image to preprocess. Expects a single or batch of images with pixel values ranging from 0 to 255. If
    passing in images with pixel values between 0 and 1, set `do_rescale=False`.
    Ndescriptionshapez
    Video to preprocess. Expects a single or batch of videos with pixel values ranging from 0 to 255. If
    passing in videos with pixel values between 0 and 1, set `do_rescale=False`.
    z&
    Whether to resize the image.
    z>
    Describes the maximum input dimensions to the model.
    zP
    Whether to default to a square image when resizing, if size is an int.
    z
    Resampling filter to use if resizing the image. This can be one of the enum `PILImageResampling`. Only
    has an effect if `do_resize` is set to `True`.
    z+
    Whether to center crop the image.
    z@
    Size of the output image after applying `center_crop`.
    z'
    Whether to rescale the image.
    zR
    Rescale factor to rescale the image by if `do_rescale` is set to `True`.
    z)
    Whether to normalize the image.
    ze
    Image mean to use for normalization. Only has an effect if `do_normalize` is set to `True`.
    zw
    Image standard deviation to use for normalization. Only has an effect if `do_normalize` is set to
    `True`.
    z.
    Whether to convert the image to RGB.
    zU
    Returns stacked tensors if set to `pt, otherwise returns a list of tensors.
    zc
    Only `ChannelDimension.FIRST` is supported. Added for compatibility with slow processors.
    a  
    The channel dimension format for the input image. If unset, the channel dimension format is inferred
    from the input image. Can be one of:
    - `"channels_first"` or `ChannelDimension.FIRST`: image in (num_channels, height, width) format.
    - `"channels_last"` or `ChannelDimension.LAST`: image in (height, width, num_channels) format.
    - `"none"` or `ChannelDimension.NONE`: image in (height, width) format.
    zf
    The device to process the images on. If unset, the device is inferred from the input images.
    )__name__
__module____qualname__ZimagesZvideosZ	do_resizesizeZdefault_to_squareZresampleZdo_center_cropZ	crop_sizeZ
do_rescaleZrescale_factorZdo_normalizeZ
image_meanZ	image_stdZdo_convert_rgbZreturn_tensorsZdata_formatZinput_data_formatZdevice r   r   J/var/www/auris/lib/python3.10/site-packages/transformers/utils/args_doc.pyr   H   sn    				
r   c                   @   s~  e Zd ZdddZdddZdddZdddZdddZd	d
dZdddZ	dddZ
dddZdddZdddZdddZdddZdddZdddZdddZdddZdddZdddZdddZdddZdddZd d!dZd"ddZd#ddZd$ddZd%ddZd&d'dZd(d'dZd)ddZ d*ddZ!d+ddZ"d,d-dZ#d.ddZ$d/ddZ%d0d1dZ&d2d3dZ'dS )4	ModelArgsa7  
    Labels for computing the masked language modeling loss. Indices should either be in `[0, ...,
    config.vocab_size]` or -100 (see `input_ids` docstring). Tokens with indices set to `-100` are ignored
    (masked), the loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]`.
    z(of shape `(batch_size, sequence_length)`r   aC  
    Calculate logits for the last `num_logits_to_keep` tokens. If `0`, calculate logits for all
    `input_ids` (special case). Only last token logits are needed for generation, and calculating them only for that
    token can save memory, which becomes pretty significant for long sequences or large vocabulary size.
    Na"  
    Indices of input sequence tokens in the vocabulary. Padding will be ignored by default.

    Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and
    [`PreTrainedTokenizer.__call__`] for details.

    [What are input IDs?](../glossary#input-ids)
    a  
    Float values of input raw speech waveform. Values can be obtained by loading a `.flac` or `.wav` audio file
    into an array of type `List[float]` or a `numpy.ndarray`, *e.g.* via the soundfile library (`pip install
    soundfile`). To prepare the array into `input_values`, the [`AutoProcessor`] should be used for padding and
    conversion into a tensor of type `torch.FloatTensor`. See [`{processor_class}.__call__`] for details.
    z
    Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`:

    - 1 for tokens that are **not masked**,
    - 0 for tokens that are **masked**.

    [What are attention masks?](../glossary#attention-mask)
    z
    Mask to nullify selected heads of the self-attention modules. Mask values selected in `[0, 1]`:

    - 1 indicates the head is **not masked**,
    - 0 indicates the head is **masked**.
    z4of shape `(num_heads,)` or `(num_layers, num_heads)`z
    Mask to nullify selected heads of the cross-attention modules. Mask values selected in `[0, 1]`:

    - 1 indicates the head is **not masked**,
    - 0 indicates the head is **masked**.
    z"of shape `(num_layers, num_heads)`z
    Mask to avoid performing attention on certain token indices. By default, a causal mask will be used, to
    make sure the model can only look at previous inputs in order to predict the future.
    z/of shape `(batch_size, target_sequence_length)`z
    Mask to nullify selected heads of the attention modules in the decoder. Mask values selected in `[0, 1]`:

    - 1 indicates the head is **not masked**,
    - 0 indicates the head is **masked**.
    z4of shape `(decoder_layers, decoder_attention_heads)`z
    Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention
    if the model is configured as a decoder.
    z5of shape `(batch_size, sequence_length, hidden_size)`a,  
    Mask to avoid performing attention on the padding token indices of the encoder input. This mask is used in
    the cross-attention if the model is configured as a decoder. Mask values selected in `[0, 1]`:

    - 1 for tokens that are **not masked**,
    - 0 for tokens that are **masked**.
    a  
    Segment token indices to indicate first and second portions of the inputs. Indices are selected in `[0, 1]`:

    - 0 corresponds to a *sentence A* token,
    - 1 corresponds to a *sentence B* token.

    [What are token type IDs?](../glossary#token-type-ids)
    z
    Indices of positions of each input sequence tokens in the position embeddings. Selected in the range `[0, config.n_positions - 1]`.

    [What are position IDs?](../glossary#position-ids)
    a~  
    Pre-computed hidden-states (key and values in the self-attention blocks and in the cross-attention
    blocks) that can be used to speed up sequential decoding. This typically consists in the `past_key_values`
    returned by the model at a previous stage of decoding, when `use_cache=True` or `config.use_cache=True`.

    Two formats are allowed:
        - a [`~cache_utils.Cache`] instance, see our [kv cache guide](https://huggingface.co/docs/transformers/en/kv_cache);
        - Tuple of `tuple(torch.FloatTensor)` of length `config.n_layers`, with each tuple having 2 tensors of
        shape `(batch_size, num_heads, sequence_length, embed_size_per_head)`). This is also known as the legacy
        cache format.

    The model will output the same cache format that is fed as input. If no `past_key_values` are passed, the
    legacy cache format will be returned.

    If `past_key_values` are used, the user can optionally input only the last `input_ids` (those that don't
    have their past key value states given to this model) of shape `(batch_size, 1)` instead of all `input_ids`
    of shape `(batch_size, sequence_length)`.
    z2
    deprecated in favor of `past_key_values`
    a  
    Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This
    is useful if you want more control over how to convert `input_ids` indices into associated vectors than the
    model's internal embedding lookup matrix.
    a  
    Indices of decoder input sequence tokens in the vocabulary.

    Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and
    [`PreTrainedTokenizer.__call__`] for details.

    [What are decoder input IDs?](../glossary#decoder-input-ids)
    a(  
    Optionally, instead of passing `decoder_input_ids` you can choose to directly pass an embedded
    representation. If `past_key_values` is used, optionally only the last `decoder_inputs_embeds` have to be
    input (see `past_key_values`). This is useful if you want more control over how to convert
    `decoder_input_ids` indices into associated vectors than the model's internal embedding lookup matrix.

    If `decoder_input_ids` and `decoder_inputs_embeds` are both unset, `decoder_inputs_embeds` takes the value
    of `inputs_embeds`.
    z<of shape `(batch_size, target_sequence_length, hidden_size)`z
    If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding (see
    `past_key_values`).
    z
    Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned
    tensors for more detail.
    z
    Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for
    more detail.
    zU
    Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple.
    a  
    Indices depicting the position of the input sequence tokens in the sequence. Contrarily to `position_ids`,
    this tensor is not affected by padding. It is used to update the cache in the correct position and to infer
    the complete sequence length.
    zof shape `(sequence_length)`z9 input to the layer of shape `(batch, seq_len, embed_dim)zD
    Whether to interpolate the pre-trained position encodings.
    z
    Tuple containing the cosine and sine positional embeddings of shape `(batch_size, seq_len, head_dim)`,
    with `head_dim` being the embedding dimension of each attention head.
    a  
    Model configuration class with all the parameters of the model. Initializing with a config file does not
    load the weights associated with the model, only the configuration. Check out the
    [`~PreTrainedModel.from_pretrained`] method to load the model weights.
    a  
    Labels for position (index) of the start of the labelled span for computing the token classification loss.
    Positions are clamped to the length of the sequence (`sequence_length`). Position outside of the sequence
    are not taken into account for computing the loss.
    zof shape `(batch_size,)`a  
    Labels for position (index) of the end of the labelled span for computing the token classification loss.
    Positions are clamped to the length of the sequence (`sequence_length`). Position outside of the sequence
    are not taken into account for computing the loss.
    aB  
    Tuple consists of (`last_hidden_state`, *optional*: `hidden_states`, *optional*: `attentions`)
    `last_hidden_state` of shape `(batch_size, sequence_length, hidden_size)`, *optional*) is a sequence of
    hidden-states at the output of the last layer of the encoder. Used in the cross-attention of the decoder.
    z
    Whether or not to return the logits of all the routers. They are useful for computing the router loss, and
    should not be returned during inference.
    a  
    If an `int`, compute logits for the last `logits_to_keep` tokens. If `0`, calculate logits for all
    `input_ids` (special case). Only last token logits are needed for generation, and calculating them only for that
    token can save memory, which becomes pretty significant for long sequences or large vocabulary size.
    If a `torch.Tensor`, must be 1D corresponding to the indices to keep in the sequence length dimension.
    This is useful when using packed tensor format (single dimension for batch and sequence length).
    a  
    The tensors corresponding to the input images. Pixel values can be obtained using
    [`{image_processor_class}`]. See [`{image_processor_class}.__call__`] for details ([`{processor_class}`] uses
    [`{image_processor_class}`] for processing images).
    z=of shape `(batch_size, num_channels, image_size, image_size)`z
    The index of the layer to select the vision feature. If multiple indices are provided,
    the vision feature of the corresponding indices will be concatenated to form the
    vision features.
    z
    The feature selection strategy used to select the vision feature from the vision backbone.
    Can be one of `"default"` or `"full"`.
    zU
    The sizes of the images in the batch, being (height, width) for each image.
    zof shape `(batch_size, 2)`a  
    Mask to avoid performing attention on padding pixel values. Mask values selected in `[0, 1]`:

    - 1 for pixels that are real (i.e. **not masked**),
    - 0 for pixels that are padding (i.e. **masked**).

    [What are attention masks?](../glossary#attention-mask)
    z&of shape `(batch_size, height, width)`)(r   r   r   labelsZnum_logits_to_keepZ	input_idsZinput_valuesZattention_maskZ	head_maskZcross_attn_head_maskZdecoder_attention_maskZdecoder_head_maskZencoder_hidden_statesZencoder_attention_maskZtoken_type_idsZposition_idsZpast_key_valuesZpast_key_valueZinputs_embedsZdecoder_input_idsZdecoder_inputs_embedsZ	use_cacheZoutput_attentionsZoutput_hidden_statesZreturn_dictZcache_positionZhidden_statesZinterpolate_pos_encodingZposition_embeddingsconfigZstart_positionsZend_positionsZencoder_outputsZoutput_router_logitsZlogits_to_keepZpixel_valuesZvision_feature_layerZvision_feature_select_strategyZimage_sizesZ
pixel_maskr   r   r   r   r      s    

		

				
	



	

	
r   c                   @   s\   e Zd ZdZdZdZdZdZdZdZ	dZ
d	Zd
ZdZdZdZdZdZdZdZdZdZdZdS )ClassDocstringa  
    This model inherits from [`PreTrainedModel`]. Check the superclass documentation for the generic methods the
    library implements for all its model (such as downloading or saving, resizing the input embeddings, pruning heads
    etc.)

    This model is also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass.
    Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to general usage
    and behavior.
    zd
    The bare {model_name} Model outputting raw hidden-states without any specific head on top.
    zJ
    The {model_name} Model with a specified pretraining head on top.
    zf
    The bare {model_name} Decoder outputting raw hidden-states without any specific head on top.
    zh
    The bare {model_name} Text Model outputting raw hidden-states without any specific head on to.
    zk
    The {model_name} Model with a sequence classification/regression head on top e.g. for GLUE tasks.
    z
    The {model_name} transformer with a span classification head on top for extractive question-answering tasks like
    SQuAD (a linear layer on top of the hidden-states output to compute `span start logits` and `span end logits`).
    z
    The {model_name} Model with a multiple choice classification head on top (a linear layer on top of the pooled output and a
    softmax) e.g. for RocStories/SWAG tasks.
    zI
    The {model_name} Model with a `language modeling` head on top."
    z
    The {model_name} transformer with a token classification head on top (a linear layer on top of the hidden-states
    output) e.g. for Named-Entity-Recognition (NER) tasks.
    z{
    The {model_name} Model for token generation conditioned on other modalities (e.g. image-text-to-text generation).
    z>
    The {model_name} Model for causal language modeling.
    z9
    Constructs a fast {model_name} image processor.
    z$
    The {model_name} backbone.
    z\
    The {model_name} Model with an image classification head on top e.g. for ImageNet.
    zf
    The {model_name} Model with a semantic segmentation head on top e.g. for ADE20K, CityScapes.
    z{
    The {model_name} Model with an audio classification head on top (a linear layer on top of the pooled
    output).
    zl
    The {model_name} Model with a frame classification head on top for tasks like Speaker Diarization.
    z]
    The {model_name} Model with a distribution head on top for time-series forecasting.
    zm
    The {model_name} Model with a projection layer on top (a linear layer on top of the pooled output).
    N)r   r   r   PreTrainedModelZModelZForPreTrainingZDecoderZ	TextModelZForSequenceClassificationZForQuestionAnsweringZForMultipleChoiceZForMaskedLMZForTokenClassificationZForConditionalGenerationZForCausalLMr   ZBackboneZForImageClassificationZForSemanticSegmentationZForAudioClassificationZForAudioFrameClassificationZForPredictionZWithProjectionr   r   r   r   r!   3  s*    
r!   c                   @   s<   e Zd ZdZdZdZdZdZdZdZ	dZ
d	Zd
ZdZdZdS )
ClassAttrsz
    A string indicating the attribute associated to the base model in derived classes of the same architecture adding modules on top of the base model.
    a  
    Whether the model supports gradient checkpointing or not. Gradient checkpointing is a memory-saving technique that trades compute for memory, by storing only a subset of activations (checkpoints) and recomputing the activations that are not stored during the backward pass.
    ab  
    Layers of modules that should not be split across devices should be added to `_no_split_modules`. This can be useful for modules that contains skip connections or other operations that are not compatible with splitting the module across devices. Setting this attribute will enable the use of `device_map="auto"` in the `from_pretrained` method.
    zu
    A list of keys to ignore when moving inputs or outputs between devices when using the `accelerate` library.
    zS
    Whether the model's attention implementation supports FlashAttention 2.0.
    zd
    Whether the model's attention implementation supports SDPA (Scaled Dot Product Attention).
    zN
    Whether the model's attention implementation supports FlexAttention.
    zM
    Whether the model supports a `Cache` instance as `past_key_values`.
    z\
    Whether the model supports a `QuantoQuantizedCache` instance as `past_key_values`.
    zS
    Whether the model supports a `StaticCache` instance as `past_key_values`.
    z
    Whether the model supports attention interface functions. This flag signal that the model can be used as an efficient backend in TGI and vLLM.
    za
    A list of `state_dict` keys that are potentially tied to another key in the state_dict.
    N)r   r   r   Zbase_model_prefixZsupports_gradient_checkpointingZ_no_split_modulesZ_skip_keys_device_placementZ_supports_flash_attn_2Z_supports_sdpaZ_supports_flex_attnZ_supports_cache_classZ_supports_quantized_cacheZ_supports_static_cacheZ_supports_attention_backendZ_tied_weights_keysr   r   r   r   r#     s    r#   >   Zdeprecated_argumentsargskwargsselfc                 C   s   t | jdd d S )N.r      )lenr   split)funcr   r   r   get_indent_level  s   r,   c                 C   s(   d dd |  D } t| d| S )T
    Adjust the indentation of a docstring to match the specified indent level.
    
c                 S      g | ]}|  qS r   )lstrip).0liner   r   r   
<listcomp>      z#equalize_indent.<locals>.<listcomp> )join
splitlinestextwrapindent	docstringindent_levelr   r   r   equalize_indent  s   r=   c                 C   s   t t | d| S )r-   r5   )r8   r9   dedentr:   r   r   r   set_min_indent  s   r?   c                 C   *   t d}|| }|rd|d S d S )Nz(of shape\s*(?:`.*?`|\(.*?\)))r5   r   recompilesearchgroup)r;   Zshape_patternmatchr   r   r   parse_shape  
   

rG   c                 C   r@   )Nz(defaults to \s*[^)]*)r5   r   rA   )r;   Zdefault_patternrF   r   r   r   parse_default  rH   rI   c                 C   s  t d| }|r| | d }| d|  } nd}t dt j}|| }|r1|ddn| }|dd  dkrJd	|ddd }|dd	  d
ks`|dd	  dkrld	|ddd }t
|d	}i }|rt d| d| dt jt jB }||D ]<}|d}	|d}
|d}d|v }t|}t|}|d }t dd|d}d| }|
|||||d||	< q|r|rd| }t
|d	}||fS )ai  
    Parse the docstring to extract the Args section and return it as a dictionary.
    The docstring is expected to be in the format:
    Args:
        arg1 (type): Description of arg1.
        arg2 (type): Description of arg2.

    # This function will also return the remaining part of the docstring after the Args section.
    Returns:/Example:
    ...
    z(?m)^([ \t]*)(?=Example|Return)N z(?:Args:)(\n.*)?(\n)?$r   r.   z"""r   zr"""z^\s{0,z;}(\w+)\s*\(\s*([^, \)]*)(\s*.*?)\s*\)\s*:\s*((?:(?!\n^\s{0,z}\w+\s*\().)*)      optionalr(   ^z    )typer   rN   r   defaultadditional_info)rB   rD   startrC   DOTALLrE   r0   r*   stripr6   r?   	MULTILINEfinditerrG   rI   sub)r;   Zmax_indent_levelrF   Zremainder_docstringZargs_patternZ
args_matchZargs_sectionparamsZparam_pattern
param_name
param_typerR   rN   r   rQ   Zparam_descriptionr   r   r   parse_docstring  sP   
,





	
r\   returnc              
      s   t | }|dkr,zt|  | fW S  ty+ } ztt|  | fW  Y d}~S d}~ww  fdd|D }t|}|rB||d } || fS )z
    Check if a "nested" type hint contains a specific target type,
    return the first-level type containing the target_type if found.
    r   Nc                    s   g | ]	}t | d  qS r   )contains_typer1   argtarget_typer   r   r3   *  s    z!contains_type.<locals>.<listcomp>T)r   
issubclass	ExceptionrP   anyindex)Z	type_hintrc   r$   _Zfound_type_tupleZ
found_typer   rb   r   r_     s   r_   c                 C   s   t | }|tjjd dkrdS |tjjd }tD ]/}|dd }d|v r1|dd nd}||rL||rL|t	|t	|  }|  S qt
d|  d	S )
z>
    Get the model name from the file path of the object.
    modelsNrK   *r   rJ   uE   🚨 Something went wrong trying to find the model name in the path: model)inspectgetsourcefiler*   ospathsepAUTODOC_FILES
startswithendswithr)   print)objrp   	file_name	file_typerS   endmodel_name_lowercaser   r   r   get_model_name1  s   
r{   placeholders
model_namec                 C   sh   ddl m} i }| D ]'}|tv r1tt|t| d t| d | }t|ttfr-|d }|||< q
|S )zF
    Get the dictionary of placeholders for the given model name.
    r   autor   )transformers.modelsr   PLACEHOLDER_TO_AUTO_MODULEgetattr
isinstancelisttuple)r|   r}   auto_moduleplaceholders_dictplaceholderZplace_holder_valuer   r   r   get_placeholders_dictD  s   
r   c              	      s   t tdd fdd D }|s S t|| D ].} | d }td|}fdd|D }|rE|jdi fdd	|D }| | d< q S )z
    Replaces placeholders such as {image_processor_class} in the docstring with the actual values,
    deducted from the model name and the auto modules.
    z{(.*?)}rJ   c                 3   s    | ]	} | d  V  qdS )r   Nr   r`   )r$   r   r   	<genexpr>`  s    z(format_args_docstring.<locals>.<genexpr>r   c                    s   g | ]}| v r|qS r   r   r1   r   r   r   r   r3   k  s    z)format_args_docstring.<locals>.<listcomp>c                    s   i | ]}| | qS r   r   r   r   r   r   
<dictcomp>m  s    z)format_args_docstring.<locals>.<dictcomp>Nr   )setrB   findallr6   r   format)r$   r}   r|   ra   Znew_argr   )r$   r   r   format_args_docstringZ  s   $
r   args_classesc                 C   s2   t | ttfri }| D ]}||j q|S | jS N)r   r   r   update__dict__)r   Zargs_classes_dictZ
args_classr   r   r   source_args_docs  s   r   c                 C   sX   d }| j }t|}|D ]\}}|dr|d d }d| }||kr)|} |S q|S )N/rK   zhttps://huggingface.co/)__doc___re_checkpointr   rt   )r   
checkpointZconfig_sourceZcheckpointsZ	ckpt_nameZ	ckpt_linkZckpt_link_from_namer   r   r    get_checkpoint_from_config_class|  s   


r   c                 C   s,   d}| j dkrd| d}t||d }|S )NrJ   forwardzThe [`a  `] forward method, overrides the `__call__` special method.

        <Tip>

        Although the recipe for forward pass needs to be defined within this function, one should call the [`Module`]
        instance afterwards instead of this since the former takes care of running the pre and post processing steps while
        the latter silently ignores them.

        </Tip>

        r(   )r   r=   )r+   
class_nameparent_classr<   Zintro_docstringr   r   r   add_intro_docstring  s
   
r   c                 C   s   ddl m} |durt|}nt| }|r-|tt|td d td d vr-|dd}| jdd }|du r<d}n1ztt|td d td d | }W n tyl   |t	v r`t	| }n
d	}t
d
| d Y nw |||fS )z
    Extract model information from a function or its parent class.

    Args:
        func (`function`): The function to extract information from
        parent_class (`class`): Optional parent class of the function
    r   r~   Nr   r   rh   -r'   ZModelConfigu   🚨 Config not found for zM. You can manually add it to HARDCODED_CONFIG_FOR_MODELS in utils/args_doc.py)r   r   r{   r   r   replacer   r*   KeyErrorHARDCODED_CONFIG_FOR_MODELSru   )r+   r   r   rz   r   r   r   r   r   _get_model_info  s:   	





	r   c                 C   s   d}| j tjjkrb| j }dt|v r!dt|ddd}n%t|drE|j	dddd d	| j j
 }|d
 d	krD|dd }n	 d|v rQtdd|}d|v r^tdd|}d}||fS d}||fS )a  
    Process and format a parameter's type annotation.

    Args:
        param (`inspect.Parameter`): The parameter from the function signature
        param_name (`str`): The name of the parameter
        func (`function`): The function the parameter belongs to
    FtypingrJ   typing.transformers.~r   builtinsr'   r   r   N   🚨  for z of 	 in file z has an invalid type
ForwardRefForwardRef\('([\w.]+)'\)\1r   Optional\[(.*?)\]T)
annotationrm   	Parameteremptystrr6   r*   r   hasattrr   r   ru   r   __code__co_filenamerB   rX   )paramrZ   r+   rN   r[   r   r   r   _process_parameter_type  s&   	
$r   c                 C   s   d}d}d}d}d}	| |v rF|dkr"||   dddur"||  d }||  d }||  d }|r2|nd}||  d p;d}	||  d  d	}n| |v ra||  d }|rVd
| nd}||  d }d}	nd}|rgdnd}
||
||	||fS )a  
    Get parameter documentation details from the appropriate source.
    Tensor shape, optional status and description are taken from the custom docstring in priority if available.
    Type is taken from the function signature first, then from the custom docstring if missing from the signature

    Args:
        param_name (`str`): Name of the parameter
        documented_params (`dict`): Dictionary of documented parameters (manually specified in the docstring)
        source_args_dict (`dict`): Default source args dictionary to use if not in documented_params
        param_type (`str`): Current parameter type (may be updated)
        optional (`bool`): Whether the parameter is optional (may be updated)
    NrJ   TrP   rN   r   rR   r   r.   r5   Fz, *optional*)get)rZ   documented_paramssource_args_dictr[   rN   r   r   shape_stringis_documentedrR   optional_stringr   r   r   _get_parameter_info  s*   r   c              
   C   s  d}t ttg}i }| j D ]\}	}
|	tv s%|
jtjj	ks%|
jtjj
kr&qt|
|	|\}}d}|
jtjkrD|
jdurDdt|
j d}t|	||||\}}}}}}|r|	dkro|dkrcd| d}nd|dd	  d}n|dkrt	 d|v rz|nd| d}|r|	 d| | d| }n|	 d| | | | d| }|t||d 7 }q|r|nd|||r|nd|d||	< |d|	 d|j d|jj d q||fS )a  
    Process all regular parameters (not kwargs parameters) from the function signature.

    Args:
        sig (`inspect.Signature`): Function signature
        func (`function`): Function the parameters belong to
        class_name (`str`): Name of the class
        documented_params (`dict`): Dictionary of parameters that are already documented
        indent_level (`int`): Indentation level
        undocumented_parameters (`list`): List to append undocumented parameters to
    rJ   N, defaults to ``r    z[`z`]r'   rK   Fr   r   r    has no type ():   z<fill_type>z
    <fill_description>)rP   rN   r   r   rQ      🚨 `` is part of zZ's signature, but not documented. Make sure to add it to the docstring of the function in )r   r   r   
parametersitemsARGS_TO_IGNOREkindrm   r   VAR_POSITIONALVAR_KEYWORDr   rQ   _emptyr   r   r*   ru   r   r   r   r?   append)sigr+   r   r   r<   undocumented_parametersr;   r   missing_argsrZ   r   r[   rN   param_defaultr   r   rR   r   r   param_docstringr   r   r   _process_regular_parameters"  sP   




r   c                 C   s`   d}|}d}|s.| | D ]}|dkr|d7 }q|dkr'|d8 }|dkr'd} nq|d7 }|r|S )Nr   F(r   )Tr   )linesline_endZparenthesis_countZsig_line_endZ	found_sigcharr   r   r   find_sig_linej  s    

r   c                    sh  d}t t}|jtv }	|	s durt fddtD }	|	r2dd | j D }
|
D ]}|jt	j
jkr6q+|jjd j}|durPt|\}}|durPt||}|jjd j D ]\}}t|}d}d	|v rtd|d
dd}n|dddd d| }d|v rtdd|}d|v rtdd|}d}d} durtt |d}|dkrd| dnd}t|||||\}}}}}}|r|dkrtd| d|jjd j d|jj d d|v r|nd| d}|r|t| d| | d| |d 7 }qY|t| d| | | | d| |d 7 }qY|d| d|jjd j d |jj d qYq+|S )!a  
    Process **kwargs parameters if needed.

    Args:
        sig (`inspect.Signature`): Function signature
        func (`function`): Function the parameters belong to
        parent_class (`class`): Parent class of the function
        model_name_lowercase (`str`): Lowercase model name
        documented_kwargs (`dict`): Dictionary of kwargs that are already documented
        indent_level (`int`): Indentation level
        undocumented_parameters (`list`): List to append undocumented parameters to
    rJ   Nc                 3   s    | ]}| j v V  qd S r   r   )r1   Zunroll_kwargs_classr   r   r   r     s    

z-_process_kwargs_parameters.<locals>.<genexpr>c                 S   s"   g | ]\}}|j tjjkr|qS r   )r   rm   r   r   )r1   rh   kwargs_paramr   r   r   r3     s
    z._process_kwargs_parameters.<locals>.<listcomp>r   Fr   r   r   r   r   r'   r   r   r   r   r   Tr   r   r   r   r   r   r   r   r   r   r   zN, but not documented. Make sure to add it to the docstring of the function in )r   r   r   UNROLL_KWARGS_METHODSrf   UNROLL_KWARGS_CLASSESr   r   r   rm   r   r   __args__r   r\   r   __annotations__r   r6   r*   r   rB   rX   r   r   ru   r   r   r   r?   r   )r   r+   r   rz   documented_kwargsr<   r   r;   r   Zunroll_kwargsZkwargs_parametersZkwarg_paramZkwargs_documentationrh   rZ   Zparam_type_annotationr[   rN   r   r   r   rR   r   r   r   r   r   _process_kwargs_parameters{  sp   


$$0r   c                 C   s   t d|d }g }i }	i }
| dur t| \}	} |dur t|	|}	t||||	||\}}||7 }t|||||
||}||7 }t|dkrJtd| |S )a  
    Process the parameters section of the docstring.

    Args:
        func_documentation (`str`): Existing function documentation (manually specified in the docstring)
        sig (`inspect.Signature`): Function signature
        func (`function`): Function the parameters belong to
        class_name (`str`): Name of the class the function belongs to
        model_name_lowercase (`str`): Lowercase model name
        parent_class (`class`): Parent class of the function (if any)
        indent_level (`int`): Indentation level
    zArgs:
r(   Nr   r.   )r?   r\   r   r   r   r)   ru   r6   )func_documentationr   r+   r   rz   r   r<   r;   r   r   r   r   r   Zkwargs_docstringr   r   r   _process_parameters_section  s&   
r   c           	      C   s   d}| dur@t d|  }dur@t d| }|r+| | |  }| | d } n
| | d }d} t||d }|| fS |jdurg|jtjkrgt|jt\}}t	|||d}|
dd}t||d }|| fS )aK  
    Process the returns section of the docstring.

    Args:
        func_documentation (`str`): Existing function documentation (manually specified in the docstring)
        sig (`inspect.Signature`): Function signature
        config_class (`str`): Config class for the model
        indent_level (`int`): Indentation level
    rJ   Nz(?m)^([ \t]*)(?=Return)(?m)^([ \t]*)(?=Example)r(   )	add_intror   )rB   rD   rS   r?   return_annotationrm   r   r_   r   r   r   )	r   r   r   r<   return_docstringZmatch_startZ	match_endr   r   r   r   r   _process_returns_section
  s"   
r   c              	      s  ddl m} d}	| dur'td|  }
r'| |
 d }	dt|	|d  }	|	S |du r|durdd	t  d
}t||}|j	j
}| }du rzt|| }W n2 ty   |tv r|j	j}t|   | v r fdd| D d }||v rt|| }Y nw |dur|durd}| }t| j||dddddd}t||d }	|	S td| d|j d| d |	S tD ])}t|j|}|| v rt| }t| j||ddddd}t||d }	 |	S q|	S )a!  
    Process the example section of the docstring.

    Args:
        func_documentation (`str`): Existing function documentation (manually specified in the docstring)
        func (`function`): Function being processed
        parent_class (`class`): Parent class of the function
        class_name (`str`): Name of the class
        model_name_lowercase (`str`): Lowercase model name
        config_class (`str`): Config class for the model
        checkpoint: Checkpoint to use in examples
        indent_level (`int`): Indentation level
    r   r~   rJ   Nr   r.   r(   r   |r   c                    s   g | ]
\}}| kr|qS r   r   )r1   kvZconfig_class_namer   r   r3   W  s    z,_process_example_section.<locals>.<listcomp>z...      z<mask>)model_classr   expected_outputexpected_lossqa_target_start_indexqa_target_end_indexmasku   🚨 No checkpoint found for r'   zB. Please add a `checkpoint` arg to `auto_docstring` or add one in z's docstring)r   r   r   r   r   r   )r   r   rB   rD   rS   r?   r6   r   keysr   CONFIG_MAPPINGr   r   r   r   valuesr   rE   r   ru   r   r	   r   Zmodeling_autor
   )r   r+   r   r   rz   r   r   r<   r   example_docstringrF   taskZ
model_taskr   Zcheckpoint_exampler   Zmodel_name_for_auto_configZexample_annotationZname_model_list_for_taskZmodel_list_for_taskZpipeline_namer   r   r   _process_example_section-  s   =
	r   c              	   C   s   t | }t| }t| |\}}}	| j}
|dur(|
dur(t||d d |
 }
n|dur.|}
|dur:t||d }nt| |||d}|t|
|| ||||7 }t|
||	|\}}
||7 }t	|
| ||||	||}||7 }|| _| S )z9
    Wrapper that automatically generates docstring.
    Nr(   r.   )r   r   r<   )
rm   	signaturer,   r   r   r?   r   r   r   r   )r+   r   custom_introcustom_argsr   r   r<   rz   r   r   r   r;   r   r   r   r   r   auto_method_docstring  sB   

r  c                 C   sZ  ddl m} t| j| |djdd}t| }t| }|r*ddd |	d	D nd
}|rF|t
t
|td d td d vrF|d	d}tddtj  d| j}	|	g krr| jd
u rr|d
u rrtd| j dtj  |	g ks{|d
ur|	r|	d nd
}	|d
urt||}
|
ds|
d7 }
n|d
u rd}
n	t
t|	j|d}
t|
rt|
 |nd}|	dkrddd | jD v r|ttj |7 }|td| |7 }d}| j D ]C\}}t|s|ds|jjdkrd}nt|j}|	rd|	v rtdt
t|d
}|d
ur|t| d| d| d7 }qn	t d| j d  || _| S )!zm
    Wrapper that automatically generates a docstring for classes based on their attributes and methods.
    r   r~   )r   r  zArgs:zParameters:r5   c                 S   r/   r   )title)r1   r   r   r   r   r3     r4   z(auto_class_docstring.<locals>.<listcomp>rh   Nr   r   r   r   r   z)$r   z?` is not part of the auto doc. Here are the available classes: r.   rJ   )r}   r"   c                 s   s    | ]}|j V  qd S r   r   )r1   xr   r   r   r     s    z'auto_class_docstring.<locals>.<genexpr>__propertyZConfigz%Config should have explicit docstringz (`z`): z/You used `@auto_class_docstring` decorator on `zF` but this class is not part of the AutoMappings. Remove the decorator)!r   r   r  __init__r   r   r,   r{   r6   r*   r   r   rB   r   r!   r   r   r   
ValueErrorr=   rt   r   r)   r?   __mro__r"   r   callablers   	__class__rP   r#   ru   )clsr   r  r   r   Zdocstring_initr<   rz   Zmodel_name_titlenameZ	pre_blockr;   Z	attr_docsZ	attr_nameZ
attr_valueZ	attr_typeZindented_docr   r   r   auto_class_docstring  sb   "
"



r  )r   r  r   c                   s     fdd}| r|| S |S )a  
    Automatically generates docstrings for classes and methods in the Transformers library.

    This decorator can be used in the following forms:
    @auto_docstring
    def my_function(...):
        ...
    or
    @auto_docstring()
    def my_function(...):
        ...
    or
    @auto_docstring(custom_intro="Custom intro", ...)
    def my_function(...):
        ...

    Args:
        custom_intro (str, optional): Custom introduction text to add to the docstring. This will replace the default
            introduction text generated by the decorator before the Args section.
        checkpoint (str, optional): Checkpoint name to use in the docstring. This should be automatically inferred from the
            model configuration class, but can be overridden if needed.
    c                    s4   t | jddkrt|  dS t|  dS )Nr'   r   )r  r   r   )r)   r   r*   r  r  )rv   r   r  r   r   r   auto_docstring_decorator  s
   z0auto_docstring.<locals>.auto_docstring_decoratorr   )rv   r   r  r   r  r   r  r   auto_docstring  s   r  r^   )Nr   )NNNN)NNNr   )?rm   ro   r8   pathlibr   r   r   r   r   r   r   regexrB   docr	   r
   r   r   Zgenericr   resolveZPATH_TO_TRANSFORMERSrr   r   r   r   r   rC   r   r   r   r!   r#   r   r,   r=   r?   rG   rI   r\   boolobjectr_   r{   r   dictr   r   r   r   r   r   r   r   r   r   r   r   r   r   r  r  r  r   r   r   r   <module>   sv   
 	  eZ)	
A	
0"*Ha.#
Y
6E