
    fThE                     0    S r SSKJr   " S S\5      rS/rg)zI-JEPA model configuration   )PretrainedConfigc                   R   ^  \ rS rSrSrSr               SU 4S jjrSrU =r$ )IJepaConfig   a[  
This is the configuration class to store the configuration of a [`IJepaModel`]. It is used to instantiate an IJEPA
model according to the specified arguments, defining the model architecture. Instantiating a configuration with the
defaults will yield a similar configuration to that of the I-JEPA
[facebook/ijepa_vith14_1k](https://huggingface.co/facebook/ijepa_vith14_1k) architecture.

Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model outputs. Read the
documentation from [`PretrainedConfig`] for more information.


Args:
    hidden_size (`int`, *optional*, defaults to 768):
        Dimensionality of the encoder layers and the pooler layer.
    num_hidden_layers (`int`, *optional*, defaults to 12):
        Number of hidden layers in the Transformer encoder.
    num_attention_heads (`int`, *optional*, defaults to 12):
        Number of attention heads for each attention layer in the Transformer encoder.
    intermediate_size (`int`, *optional*, defaults to 3072):
        Dimensionality of the "intermediate" (i.e., feed-forward) layer in the Transformer encoder.
    hidden_act (`str` or `function`, *optional*, defaults to `"gelu"`):
        The non-linear activation function (function or string) in the encoder and pooler. If string, `"gelu"`,
        `"relu"`, `"selu"` and `"gelu_new"` are supported.
    hidden_dropout_prob (`float`, *optional*, defaults to 0.0):
        The dropout probability for all fully connected layers in the embeddings, encoder, and pooler.
    attention_probs_dropout_prob (`float`, *optional*, defaults to 0.0):
        The dropout ratio for the attention probabilities.
    initializer_range (`float`, *optional*, defaults to 0.02):
        The standard deviation of the truncated_normal_initializer for initializing all weight matrices.
    layer_norm_eps (`float`, *optional*, defaults to 1e-12):
        The epsilon used by the layer normalization layers.
    image_size (`int`, *optional*, defaults to 224):
        The size (resolution) of each image.
    patch_size (`int`, *optional*, defaults to 16):
        The size (resolution) of each patch.
    num_channels (`int`, *optional*, defaults to 3):
        The number of input channels.
    qkv_bias (`bool`, *optional*, defaults to `True`):
        Whether to add a bias to the queries, keys and values.
    pooler_output_size (`int`, *optional*):
       Dimensionality of the pooler layer. If None, defaults to `hidden_size`.
    pooler_act (`str`, *optional*, defaults to `"tanh"`):
       The activation function to be used by the pooler. Keys of ACT2FN are supported for Flax and
       Pytorch, and elements of https://www.tensorflow.org/api_docs/python/tf/keras/activations are
       supported for Tensorflow.

Example:

```python
>>> from transformers import IJepaConfig, IJepaModel

>>> # Initializing a IJEPA ijepa-base-patch16-224 style configuration
>>> configuration = IJepaConfig()

>>> # Initializing a model (with random weights) from the ijepa-base-patch16-224 style configuration
>>> model = IJepaModel(configuration)

>>> # Accessing the model configuration
>>> configuration = model.config
```ijepac                    > [         TU ]  " S0 UD6  Xl        X l        X0l        X@l        XPl        X`l        Xpl        Xl	        Xl
        Xl        Xl        Xl        Xl        U(       a  UOUU l        Xl        g )N )super__init__hidden_sizenum_hidden_layersnum_attention_headsintermediate_size
hidden_acthidden_dropout_probattention_probs_dropout_probinitializer_rangelayer_norm_eps
image_size
patch_sizenum_channelsqkv_biaspooler_output_size
pooler_act)selfr   r   r   r   r   r   r   r   r   r   r   r   r   r   r   kwargs	__class__s                    e/var/www/auris/envauris/lib/python3.13/site-packages/transformers/models/ijepa/configuration_ijepa.pyr   IJepaConfig.__init__S   sr    & 	"6"&!2#6 !2$#6 ,H)!2,$$( 8J"4P[$    )r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   )i      r!   i   gelu        r#   g{Gz?g-q=      r   TNtanh)	__name__
__module____qualname____firstlineno____doc__
model_typer   __static_attributes____classcell__)r   s   @r   r   r      sI    :x J %(!#% #%r    r   N)r+   configuration_utilsr   r   __all__r	   r    r   <module>r1      s&    ! 3b%" b%J /r    