
    JTh                         S SK r S SKJr  S SKJrJr  S SKrS SKJr	  S SK
Jr  S SKJr  S/r\" SSS	9r " S
 S\\   5      rg)    N)Iterator)OptionalTypeVar)Dataset)SamplerDistributedSampler_T_coT)	covariantc                       \ rS rSrSr     SS\S\\   S\\   S\S\S	\S
S4S jjr	S
\
\   4S jrS
\4S jrS\S
S4S jrSrg)r      a  Sampler that restricts data loading to a subset of the dataset.

It is especially useful in conjunction with
:class:`torch.nn.parallel.DistributedDataParallel`. In such a case, each
process can pass a :class:`~torch.utils.data.DistributedSampler` instance as a
:class:`~torch.utils.data.DataLoader` sampler, and load a subset of the
original dataset that is exclusive to it.

.. note::
    Dataset is assumed to be of constant size and that any instance of it always
    returns the same elements in the same order.

Args:
    dataset: Dataset used for sampling.
    num_replicas (int, optional): Number of processes participating in
        distributed training. By default, :attr:`world_size` is retrieved from the
        current distributed group.
    rank (int, optional): Rank of the current process within :attr:`num_replicas`.
        By default, :attr:`rank` is retrieved from the current distributed
        group.
    shuffle (bool, optional): If ``True`` (default), sampler will shuffle the
        indices.
    seed (int, optional): random seed used to shuffle the sampler if
        :attr:`shuffle=True`. This number should be identical across all
        processes in the distributed group. Default: ``0``.
    drop_last (bool, optional): if ``True``, then the sampler will drop the
        tail of the data to make it evenly divisible across the number of
        replicas. If ``False``, the sampler will add extra indices to make
        the data evenly divisible across the replicas. Default: ``False``.

.. warning::
    In distributed mode, calling the :meth:`set_epoch` method at
    the beginning of each epoch **before** creating the :class:`DataLoader` iterator
    is necessary to make shuffling work properly across multiple epochs. Otherwise,
    the same ordering will be always used.

Example::

    >>> # xdoctest: +SKIP
    >>> sampler = DistributedSampler(dataset) if is_distributed else None
    >>> loader = DataLoader(dataset, shuffle=(sampler is None),
    ...                     sampler=sampler)
    >>> for epoch in range(start_epoch, n_epochs):
    ...     if is_distributed:
    ...         sampler.set_epoch(epoch)
    ...     train(loader)
Ndatasetnum_replicasrankshuffleseed	drop_lastreturnc                 @   Uc:  [         R                  " 5       (       d  [        S5      e[         R                  " 5       nUc:  [         R                  " 5       (       d  [        S5      e[         R                  " 5       nX2:  d  US:  a  [        SU SUS-
   S35      eXl        X l        X0l        SU l	        X`l
        U R                  (       ao  [        U R                  5      U R                  -  S:w  aI  [        R                  " [        U R                  5      U R                  -
  U R                  -  5      U l        O;[        R                  " [        U R                  5      U R                  -  5      U l        U R                  U R                  -  U l        X@l        XPl        g )Nz,Requires distributed package to be availabler   zInvalid rank z%, rank should be in the interval [0,    ])distis_availableRuntimeErrorget_world_sizeget_rank
ValueErrorr   r   r   epochr   lenmathceilnum_samples
total_sizer   r   )selfr   r   r   r   r   r   s          T/var/www/auris/envauris/lib/python3.13/site-packages/torch/utils/data/distributed.py__init__DistributedSampler.__init__B   sR    $$&&"#QRR..0L<$$&&"#QRR==?D4!8v%J<Z[K[J\\]^  (	
" >>c$,,/$2C2CCqH  $yyT\\"T%6%66$:K:KK D  $yyT\\):T=N=N)NOD**T->->>	    c                    U R                   (       at  [        R                  " 5       nUR                  U R                  U R
                  -   5        [        R                  " [        U R                  5      US9R                  5       nO'[        [        [        U R                  5      5      5      nU R                  (       dZ  U R                  [        U5      -
  nU[        U5      ::  a  X"S U -  nO:X"[        R                  " U[        U5      -  5      -  S U -  nOUS U R                   n[        U5      U R                  :X  d   eX R                   U R                  U R"                  2   n[        U5      U R$                  :X  d   e['        U5      $ )N)	generator)r   torch	Generatormanual_seedr   r   randpermr   r   tolistlistranger   r"   r   r    r   r   r!   iter)r#   gindicespadding_sizes       r$   __iter__DistributedSampler.__iter__k   s1   <<!AMM$))djj01nnS%6!DKKMG5T\\!234G~~??S\9Ls7|+=L11diis7|0K&LL!\ 
 /0G7|t... ))doo8I8IIJ7|t/////G}r'   c                     U R                   $ )N)r!   )r#   s    r$   __len__DistributedSampler.__len__   s    r'   r   c                     Xl         g)z
Set the epoch for this sampler.

When :attr:`shuffle=True`, this ensures all replicas
use a different random ordering for each epoch. Otherwise, the next iteration of this
sampler will yield the same ordering.

Args:
    epoch (int): Epoch number.
N)r   )r#   r   s     r$   	set_epochDistributedSampler.set_epoch   s	     
r'   )	r   r   r   r   r!   r   r   r   r"   )NNTr   F)__name__
__module____qualname____firstlineno____doc__r   r   intboolr%   r   r	   r5   r8   r;   __static_attributes__ r'   r$   r   r      s    .f '+"'' sm' sm	'
 ' ' ' 
'R(5/ :   s t r'   )r   collections.abcr   typingr   r   r*   torch.distributeddistributedr   torch.utils.data.datasetr   torch.utils.data.samplerr   __all__r	   r   rE   r'   r$   <module>rM      sD     $ $    , ,  
  	4(E Er'   