
    [ThH                         % S SK Jr  S SKJrJr  S SKJr  S SKJs  J	s  J
r  S SKJs  J	s  Jr  S SKJrJr  S SKJrJrJrJr  / r\\   \S'    " S S5      rg)	    )Future)AnyOptionalN)MetadataSTATE_DICT_TYPE)LoadPlannerSavePlannerStorageReaderStorageWriter__all__c                       \ rS rSrSrSSSSSS.S\S\S	\\R                     S
\
S\S\\   S\\   4S jjrS\S\4S jrS\S\4S jrS\\\4   SS4S jrSrg)_Checkpointer   a  This base class specefies a high level API for saving and loading
distributed `state_dict` 's. It provides an abstraction over the low-level APIs
provided by :py:mod:`torch.distributed.checkpoint.storage`, essentially calling
:py:meth: `torch.distributed.state_dict_saver.save` and
:py:meth: `torch.distributed.state_dict_loader.load` with the provided storage
readers and writers.

.. warning::
    This feature is experimental and subject to removal/change.

Nr   F)process_groupcoordinator_rankno_distload_plannersave_plannerstorage_writerstorage_readerr   r   r   r   r   c                X    Xl         X l        X0l        X@l        XPl        X`l        Xpl        g)a3  Initializes the Checkpointer instance.

Args:
    storage_writer: Instance of StorageWrite use to perform writes.
    storage_reader: StorageReader used to load data from.
    process_group: ProcessGroup to be used for cross-rank synchronization.
    coordinator_rank: Rank to use to coordinate the checkpoint. rank0 is used by default.
    no_dist: If ``True``, distributed checkpoint will not load in SPMD style. (Default: ``False``)
    loader_planner: Instance of LoadPlanner to use when loading.
    save_planner: Instance of SavePlanner to use when saving.
N)r   r   r   r   r   r   r   )selfr   r   r   r   r   r   r   s           b/var/www/auris/envauris/lib/python3.13/site-packages/torch/distributed/checkpoint/_checkpointer.py__init___Checkpointer.__init__    s-    , -,* 0((    
state_dictreturnc           	          [         R                  " UU R                  U R                  U R                  U R
                  U R                  S9$ )ziCalls :py:meth: `torch.distributed.state_dict_saver.save`. Utilizing values passed during initialization.)r   r   r   planner)saversaver   r   r   r   r   r   r   s     r   r"   _Checkpointer.save>   sC    
 zz,,!22LL%%
 	
r   c                 l    [         R                  " UU R                  U R                  U R                  S9$ )z
Calls :py:meth: `torch.distributed.state_dict_saver._async_save`. Utilizing values passed during initialization.

Returns:
    Future: A future holding the resultant Metadata object from `save`.
)r   r   r    )r!   
async_saver   r   r   r#   s     r   r&   _Checkpointer.async_saveL   s5     ..,,%%	
 	
r   c                 n    [         R                  " UU R                  U R                  U R                  S9  g)zjCalls :py:meth: `torch.distributed.state_dict_loader.load`. Utilizing values passed during initialization.)r   r   r    N)loaderloadr   r   r   r#   s     r   r*   _Checkpointer.load]   s,    ..,,%%		
r   )r   r   r   r   r   r   r   )__name__
__module____qualname____firstlineno____doc__r   r
   r   distProcessGroupintboolr   r	   r   r   r   r"   r   r&   dictstrr   r*   __static_attributes__ r   r   r   r      s    
" 6: !.2.2)%) &)
   1 12) ) ) {+) {+)<
#
 


#
 

"
tCH~ 
$ 
r   r   )concurrent.futuresr   typingr   r   torch.distributeddistributedr1   .torch.distributed.checkpoint.state_dict_loader
checkpointstate_dict_loaderr)   -torch.distributed.checkpoint.state_dict_saverstate_dict_saverr!   %torch.distributed.checkpoint.metadatar   r   $torch.distributed.checkpoint.storager   r	   r
   r   r   listr6   __annotations__r   r8   r   r   <module>rF      sC    %     ? ? = = K  c Q
 Q
r   