
    hK                     ^   % S SK Jr  S SKJrJrJrJr  S SKrS SKJ	r	  SSK
Jr  SSKJr  SSKJrJrJr  SS	KJr  SS
KJrJr  / SQr " S S\	R0                  5      rS<S\\\\4      S\S\	R<                  4S jjr/ SQ/ SQ/ SQ/ SQS.r \!\\\\\4      4   \"S'   S\S\S\\   S\S\S\4S jr#S\SSS.r$ " S  S!\5      r% " S" S#\5      r& " S$ S%\5      r' " S& S'\5      r( " S( S)\5      r) " S* S+\5      r* " S, S-\5      r+ " S. S/\5      r,\" 5       \" S0\%RZ                  4S19SS2S3.S\\%   S\S\S\4S4 jj5       5       r.\" 5       \" S0\&RZ                  4S19SS2S3.S\\&   S\S\S\4S5 jj5       5       r/\" 5       \" S0\'RZ                  4S19SS2S3.S\\'   S\S\S\4S6 jj5       5       r0\" 5       \" S0\(RZ                  4S19SS2S3.S\\(   S\S\S\4S7 jj5       5       r1\" 5       \" S0\)RZ                  4S19SS2S3.S\\)   S\S\S\4S8 jj5       5       r2\" 5       \" S0\*RZ                  4S19SS2S3.S\\*   S\S\S\4S9 jj5       5       r3\" 5       \" S0\+RZ                  4S19SS2S3.S\\+   S\S\S\4S: jj5       5       r4\" 5       \" S0\,RZ                  4S19SS2S3.S\\,   S\S\S\4S; jj5       5       r5g)=    )partial)AnycastOptionalUnionN   )ImageClassification)_log_api_usage_once   )register_modelWeightsWeightsEnum)_IMAGENET_CATEGORIES)_ovewrite_named_paramhandle_legacy_interface)VGGVGG11_WeightsVGG11_BN_WeightsVGG13_WeightsVGG13_BN_WeightsVGG16_WeightsVGG16_BN_WeightsVGG19_WeightsVGG19_BN_Weightsvgg11vgg11_bnvgg13vgg13_bnvgg16vgg16_bnvgg19vgg19_bnc                      ^  \ rS rSr SS\R
                  S\S\S\SS4
U 4S jjjr	S	\
R                  S\
R                  4S
 jrSrU =r$ )r   #   featuresnum_classesinit_weightsdropoutreturnNc                   > [         TU ]  5         [        U 5        Xl        [        R
                  " S5      U l        [        R                  " [        R                  " SS5      [        R                  " S5      [        R                  " US9[        R                  " SS5      [        R                  " S5      [        R                  " US9[        R                  " SU5      5      U l        U(       Ga  U R                  5        GHs  n[        U[        R                  5      (       ad  [        R                  R!                  UR"                  SSS9  UR$                  b,  [        R                  R'                  UR$                  S	5        M  M  [        U[        R(                  5      (       aV  [        R                  R'                  UR"                  S
5        [        R                  R'                  UR$                  S	5        M  [        U[        R                  5      (       d  GM  [        R                  R+                  UR"                  S	S5        [        R                  R'                  UR$                  S	5        GMv     g g )N)   r+   i b  i   T)pfan_outrelu)modenonlinearityr   r   g{Gz?)super__init__r
   r%   nnAdaptiveAvgPool2davgpool
SequentialLinearReLUDropout
classifiermodules
isinstanceConv2dinitkaiming_normal_weightbias	constant_BatchNorm2dnormal_)selfr%   r&   r'   r(   m	__class__s         N/var/www/auris/envauris/lib/python3.13/site-packages/torchvision/models/vgg.pyr2   VGG.__init__$   s    	D! ++F3--IIk4(GGDMJJ!IIdD!GGDMJJ!IIdK(
 \\^a++GG++AHH9SY+Zvv)))!&&!4 *2>>22GG%%ahh2GG%%affa0299--GGOOAHHa6GG%%affa0 $     xc                     U R                  U5      nU R                  U5      n[        R                  " US5      nU R	                  U5      nU$ )Nr   )r%   r5   torchflattenr:   )rE   rK   s     rH   forwardVGG.forwardA   s@    MM!LLOMM!QOOArJ   )r5   r:   r%   )i  Tg      ?)__name__
__module____qualname____firstlineno__r3   Moduleintboolfloatr2   rM   TensorrO   __static_attributes____classcell__)rG   s   @rH   r   r   #   s\    hk1		1031JN1`e1	1 1: %,,  rJ   r   cfg
batch_normr)   c                 n   / nSnU  H  nUS:X  a  U[         R                  " SSS9/-  nM$  [        [        U5      n[         R                  " X4SSS9nU(       a.  X%[         R
                  " U5      [         R                  " SS9/-  nOX%[         R                  " SS9/-  nUnM     [         R                  " U6 $ )	N   Mr   )kernel_sizestrider   )ra   paddingT)inplace)r3   	MaxPool2dr   rV   r=   rC   r8   r6   )r\   r]   layersin_channelsvconv2ds         rH   make_layersrj   I   s     FK8r||!<==FS!AYY{1aHF2>>!#4bggd6KLL2774#899K  ==&!!rJ   )@   r`      r`      rm   r`      rn   r`   rn   rn   r`   )rk   rk   r`   rl   rl   r`   rm   rm   r`   rn   rn   r`   rn   rn   r`   )rk   rk   r`   rl   rl   r`   rm   rm   rm   r`   rn   rn   rn   r`   rn   rn   rn   r`   )rk   rk   r`   rl   rl   r`   rm   rm   rm   rm   r`   rn   rn   rn   rn   r`   rn   rn   rn   rn   r`   )ABDEcfgsweightsprogresskwargsc                     Ub8  SUS'   UR                   S   b#  [        US[        UR                   S   5      5        [        [	        [
        U    US940 UD6nUb  UR                  UR                  USS95        U$ )NFr'   
categoriesr&   )r]   T)ru   
check_hash)metar   lenr   rj   rs   load_state_dictget_state_dict)r\   r]   rt   ru   rv   models         rH   _vggr   b   s    !&~<<%1!&-W\\,=W9XYDI*=HHEg44hSW4XYLrJ   )    r   zUhttps://github.com/pytorch/vision/tree/main/references/classification#alexnet-and-vggzNThese weights were trained from scratch by using a simplified training recipe.)min_sizerx   recipe_docsc            
       N    \ rS rSr\" S\" \SS90 \ESSSSS	.0S
SS.ES9r\r	Sr
g)r   u   z6https://download.pytorch.org/models/vgg11-8a719046.pth   	crop_sizeihUImageNet-1KgzGAQ@gx&1(V@zacc@1zacc@5V-o@g=
ףp@
num_params_metrics_ops
_file_sizeurl
transformsrz    NrQ   rR   rS   rT   r   r   r	   _COMMON_METAIMAGENET1K_V1DEFAULTrZ   r   rJ   rH   r   r   u   sQ    D.#>

###   
M  GrJ   r   c            
       N    \ rS rSr\" S\" \SS90 \ESSSSS	.0S
SS.ES9r\r	Sr
g)r      z9https://download.pytorch.org/models/vgg11_bn-6002323d.pthr   r   ijr   gHzQ@gp=
sV@r   r   gjt@r   r   r   Nr   r   rJ   rH   r   r      Q    G.#>

###  !
M  GrJ   r   c            
       N    \ rS rSr\" S\" \SS90 \ESSSSS	.0S
SS.ES9r\r	Sr
g)r      z6https://download.pytorch.org/models/vgg13-19584684.pthr   r   i(&r   gZd{Q@g9vOV@r   V-&@gQ@r   r   r   Nr   r   rJ   rH   r   r      Q    D.#>

###  !
M  GrJ   r   c            
       N    \ rS rSr\" S\" \SS90 \ESSSSS	.0S
SS.ES9r\r	Sr
g)r      z9https://download.pytorch.org/models/vgg13_bn-abd245e5.pthr   r   i(=r   g/$Q@g-V@r   r   g=
ףp@r   r   r   Nr   r   rJ   rH   r   r      sQ    G.#>

###   
M  GrJ   r   c                       \ rS rSr\" S\" \SS90 \ESSSSS	.0S
SS.ES9r\" S\" \SSSS90 \ESSSS\	" S5      \	" S5      S	.0S
SSS.ES9r
\rSrg)r      z6https://download.pytorch.org/models/vgg16-397923af.pthr   r   i(+?r   gSQ@g rV@r   q=
ף.@g|?5^~@r   r   zIhttps://download.pytorch.org/models/vgg16_features-amdegroot-88682ab5.pth)g;pΈ?gN]?g|
?)p?r   r   )r   meanstdNz5https://github.com/amdegroot/ssd.pytorch#training-ssdnang#~j~@a`  
                These weights can't be used for classification because they are missing values in the `classifier`
                module. Only the `features` module has valid values and can be used for feature extraction. The weights
                were trained using the original input standardization method as described in the paper.
            )r   rx   r   r   r   r   r   r   )rQ   rR   rS   rT   r   r   r	   r   r   rX   IMAGENET1K_FEATURESr   rZ   r   rJ   rH   r   r      s    D.#>

###  !
M  "W,7	


#M"5\"5\  !
: GrJ   r   c            
       N    \ rS rSr\" S\" \SS90 \ESSSSS	.0S
SS.ES9r\r	Sr
g)r      z9https://download.pytorch.org/models/vgg16_bn-6c64b313.pthr   r   i(L?r   gףp=
WR@g/$V@r   r   grh~@r   r   r   Nr   r   rJ   rH   r   r      r   rJ   r   c            
       N    \ rS rSr\" S\" \SS90 \ESSSSS	.0S
SS.ES9r\r	Sr
g)r   i
  z6https://download.pytorch.org/models/vgg19-dcbb9e9d.pthr   r   i(0r   gMbR@gMbV@r   oʡ3@g rh @r   r   r   Nr   r   rJ   rH   r   r   
  r   rJ   r   c            
       N    \ rS rSr\" S\" \SS90 \ESSSSS	.0S
SS.ES9r\r	Sr
g)r   i  z9https://download.pytorch.org/models/vgg19_bn-c79401a0.pthr   r   i([r   gˡER@gSV@r   r   g/$!@r   r   r   Nr   r   rJ   rH   r   r     sQ    G.#>

###  !
M  GrJ   r   
pretrained)rt   T)rt   ru   c                 H    [         R                  U 5      n [        SSX40 UD6$ )a4  VGG-11 from `Very Deep Convolutional Networks for Large-Scale Image Recognition <https://arxiv.org/abs/1409.1556>`__.

Args:
    weights (:class:`~torchvision.models.VGG11_Weights`, optional): The
        pretrained weights to use. See
        :class:`~torchvision.models.VGG11_Weights` below for
        more details, and possible values. By default, no pre-trained
        weights are used.
    progress (bool, optional): If True, displays a progress bar of the
        download to stderr. Default is True.
    **kwargs: parameters passed to the ``torchvision.models.vgg.VGG``
        base class. Please refer to the `source code
        <https://github.com/pytorch/vision/blob/main/torchvision/models/vgg.py>`_
        for more details about this class.

.. autoclass:: torchvision.models.VGG11_Weights
    :members:
ro   F)r   verifyr   rt   ru   rv   s      rH   r   r   2  (    * ""7+GUG888rJ   c                 H    [         R                  U 5      n [        SSX40 UD6$ )a@  VGG-11-BN from `Very Deep Convolutional Networks for Large-Scale Image Recognition <https://arxiv.org/abs/1409.1556>`__.

Args:
    weights (:class:`~torchvision.models.VGG11_BN_Weights`, optional): The
        pretrained weights to use. See
        :class:`~torchvision.models.VGG11_BN_Weights` below for
        more details, and possible values. By default, no pre-trained
        weights are used.
    progress (bool, optional): If True, displays a progress bar of the
        download to stderr. Default is True.
    **kwargs: parameters passed to the ``torchvision.models.vgg.VGG``
        base class. Please refer to the `source code
        <https://github.com/pytorch/vision/blob/main/torchvision/models/vgg.py>`_
        for more details about this class.

.. autoclass:: torchvision.models.VGG11_BN_Weights
    :members:
ro   T)r   r   r   r   s      rH   r   r   L  (    * %%g.GT7777rJ   c                 H    [         R                  U 5      n [        SSX40 UD6$ )a4  VGG-13 from `Very Deep Convolutional Networks for Large-Scale Image Recognition <https://arxiv.org/abs/1409.1556>`__.

Args:
    weights (:class:`~torchvision.models.VGG13_Weights`, optional): The
        pretrained weights to use. See
        :class:`~torchvision.models.VGG13_Weights` below for
        more details, and possible values. By default, no pre-trained
        weights are used.
    progress (bool, optional): If True, displays a progress bar of the
        download to stderr. Default is True.
    **kwargs: parameters passed to the ``torchvision.models.vgg.VGG``
        base class. Please refer to the `source code
        <https://github.com/pytorch/vision/blob/main/torchvision/models/vgg.py>`_
        for more details about this class.

.. autoclass:: torchvision.models.VGG13_Weights
    :members:
rp   F)r   r   r   r   s      rH   r   r   f  r   rJ   c                 H    [         R                  U 5      n [        SSX40 UD6$ )a@  VGG-13-BN from `Very Deep Convolutional Networks for Large-Scale Image Recognition <https://arxiv.org/abs/1409.1556>`__.

Args:
    weights (:class:`~torchvision.models.VGG13_BN_Weights`, optional): The
        pretrained weights to use. See
        :class:`~torchvision.models.VGG13_BN_Weights` below for
        more details, and possible values. By default, no pre-trained
        weights are used.
    progress (bool, optional): If True, displays a progress bar of the
        download to stderr. Default is True.
    **kwargs: parameters passed to the ``torchvision.models.vgg.VGG``
        base class. Please refer to the `source code
        <https://github.com/pytorch/vision/blob/main/torchvision/models/vgg.py>`_
        for more details about this class.

.. autoclass:: torchvision.models.VGG13_BN_Weights
    :members:
rp   T)r   r   r   r   s      rH   r   r     r   rJ   c                 H    [         R                  U 5      n [        SSX40 UD6$ )a4  VGG-16 from `Very Deep Convolutional Networks for Large-Scale Image Recognition <https://arxiv.org/abs/1409.1556>`__.

Args:
    weights (:class:`~torchvision.models.VGG16_Weights`, optional): The
        pretrained weights to use. See
        :class:`~torchvision.models.VGG16_Weights` below for
        more details, and possible values. By default, no pre-trained
        weights are used.
    progress (bool, optional): If True, displays a progress bar of the
        download to stderr. Default is True.
    **kwargs: parameters passed to the ``torchvision.models.vgg.VGG``
        base class. Please refer to the `source code
        <https://github.com/pytorch/vision/blob/main/torchvision/models/vgg.py>`_
        for more details about this class.

.. autoclass:: torchvision.models.VGG16_Weights
    :members:
rq   F)r   r   r   r   s      rH   r   r     r   rJ   c                 H    [         R                  U 5      n [        SSX40 UD6$ )a@  VGG-16-BN from `Very Deep Convolutional Networks for Large-Scale Image Recognition <https://arxiv.org/abs/1409.1556>`__.

Args:
    weights (:class:`~torchvision.models.VGG16_BN_Weights`, optional): The
        pretrained weights to use. See
        :class:`~torchvision.models.VGG16_BN_Weights` below for
        more details, and possible values. By default, no pre-trained
        weights are used.
    progress (bool, optional): If True, displays a progress bar of the
        download to stderr. Default is True.
    **kwargs: parameters passed to the ``torchvision.models.vgg.VGG``
        base class. Please refer to the `source code
        <https://github.com/pytorch/vision/blob/main/torchvision/models/vgg.py>`_
        for more details about this class.

.. autoclass:: torchvision.models.VGG16_BN_Weights
    :members:
rq   T)r   r   r   r   s      rH   r    r      r   rJ   c                 H    [         R                  U 5      n [        SSX40 UD6$ )a4  VGG-19 from `Very Deep Convolutional Networks for Large-Scale Image Recognition <https://arxiv.org/abs/1409.1556>`__.

Args:
    weights (:class:`~torchvision.models.VGG19_Weights`, optional): The
        pretrained weights to use. See
        :class:`~torchvision.models.VGG19_Weights` below for
        more details, and possible values. By default, no pre-trained
        weights are used.
    progress (bool, optional): If True, displays a progress bar of the
        download to stderr. Default is True.
    **kwargs: parameters passed to the ``torchvision.models.vgg.VGG``
        base class. Please refer to the `source code
        <https://github.com/pytorch/vision/blob/main/torchvision/models/vgg.py>`_
        for more details about this class.

.. autoclass:: torchvision.models.VGG19_Weights
    :members:
rr   F)r   r   r   r   s      rH   r!   r!     r   rJ   c                 H    [         R                  U 5      n [        SSX40 UD6$ )a@  VGG-19_BN from `Very Deep Convolutional Networks for Large-Scale Image Recognition <https://arxiv.org/abs/1409.1556>`__.

Args:
    weights (:class:`~torchvision.models.VGG19_BN_Weights`, optional): The
        pretrained weights to use. See
        :class:`~torchvision.models.VGG19_BN_Weights` below for
        more details, and possible values. By default, no pre-trained
        weights are used.
    progress (bool, optional): If True, displays a progress bar of the
        download to stderr. Default is True.
    **kwargs: parameters passed to the ``torchvision.models.vgg.VGG``
        base class. Please refer to the `source code
        <https://github.com/pytorch/vision/blob/main/torchvision/models/vgg.py>`_
        for more details about this class.

.. autoclass:: torchvision.models.VGG19_BN_Weights
    :members:
rr   T)r   r   r   r   s      rH   r"   r"     r   rJ   )F)6	functoolsr   typingr   r   r   r   rM   torch.nnr3   transforms._presetsr	   utilsr
   _apir   r   r   _metar   _utilsr   r   __all__rU   r   liststrrV   rW   r6   rj   rs   dict__annotations__r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r    r!   r"   r   rJ   rH   <module>r      s    - -   5 ' 6 6 ' B*#")) #L"T%S/* " " "$ 
J	R	a	p	*d3U38_%%& c t h{.C t _b gj  &ea	K ({ (K ({ (.K .b{ (K ({ ( ,0K0K!LM04t 9h}- 9 9WZ 9_b 9 N 90 ,0@0N0N!OP6:T 8"23 8d 8]` 8eh 8 Q 80 ,0K0K!LM04t 9h}- 9 9WZ 9_b 9 N 90 ,0@0N0N!OP6:T 8"23 8d 8]` 8eh 8 Q 80 ,0K0K!LM04t 9h}- 9 9WZ 9_b 9 N 90 ,0@0N0N!OP6:T 8"23 8d 8]` 8eh 8 Q 80 ,0K0K!LM04t 9h}- 9 9WZ 9_b 9 N 90 ,0@0N0N!OP6:T 8"23 8d 8]` 8eh 8 Q 8rJ   