
    JTh             
       n   % S SK r S SKrS SKrS SKrS SKrS SKJr  S SK7  S SKrS SKJ	r	  S SK
r
S SKJs  Jr  S SKJr  S SKJr  S SKJrJr  S SKJr  / S	QrS
rSq\\   \S'   \ R<                  S\\   4S j5       rS\ \!S4   S\ \
RD                  S4   4S jr#S\$\!   SS4S jr%SPS jr& " S S5      r'S r(S\ \)\*   \)\
RD                     4   4S jr+SS.SQS jjr,SPS jr- " S S\
R\                  R^                  5      r0S r1\
Rd                  S\1\SS.S \\   S!\3/ \ \4\44   4   S"\5S#\4S$ jj5       r6SRS% jr7S& r8S'q9\ R<                  S(\4S) j5       r: " S* S+5      r; " S, S-5      r< " S. S/\
R\                  R^                  5      r= " S0 S15      r>S2r? " S3 S4\@5      rAS\ \3/ \!4   \3\A/S4   4   4S5 jrBS6\
RD                  S\C\5\!4   4S7 jrD\\DS8S9 0rE\C\5\3\
RD                  /\!4   4   \S:'    " S; S<\F5      rG " S= S>\
R\                  R                  R                  5      rJ " S? S@\
R\                  R                  R                  5      rKSA rL " SB SC5      rMSD rN " SE SF5      rO " SG SH\R                  5      rQSI rR\
R                  R                  R                  R                  \
R                  R                  R                  R                  1\Y" \
R                  R                  R                  R                  5      -  r^ " SJ SK\5      r_ " SL SM\5      r`SSSN jraS'\1\S4S!\3/ \ \4\44   4   S"\5S#\4SO jjrbg)T    N)defaultdict)*)ReferenceType)is_fun)tree_map)capture_logsLoggingTensorMode)TorchDispatchMode)
checkpointcheckpoint_sequentialCheckpointErrorCheckpointFunctioncheck_backward_validitydetach_variableget_device_statesset_device_statesnoop_context_fnset_checkpoint_early_stopDefaultDeviceTypeset_checkpoint_debug_enabledCheckpointPolicySelectiveCheckpointContext$create_selective_checkpoint_contextsSAC_IGNORED_OPSdefault_checkpoint_debug_enabledenabledc              #   8   #     [         nU q Sv   Uq g! Wq f = f7f)a  
Context manager that sets whether checkpoint should print additional debug
information when running. See the ``debug`` flag for
:func:`~torch.utils.checkpoint.checkpoint` for more information. Note that
when set, this context manager overrides the value of ``debug`` passed to
checkpoint. To defer to the local setting, pass ``None`` to this context.

Args:
    enabled (bool): Whether checkpoint should print debug information.
        Default is 'None'.
N)r   )r   prevs     N/var/www/auris/envauris/lib/python3.13/site-packages/torch/utils/checkpoint.pyr   r   +   s$     )($+!$(!D!    inputs.returnc                 ^   [        U [        5      (       az  / nU  Hg  n[        U[        R                  5      (       d  UR	                  U5        M5  UR                  5       nUR                  Ul        UR	                  U5        Mi     [        U5      $ [        S[        U 5      R                  5      e)Nz@Only tuple of tensors is supported. Got Unsupported input type: )

isinstancetupletorchTensorappenddetachrequires_gradRuntimeErrortype__name__)r"   outinpxs       r    r   r   A   s    &%  Cc5<<00

3

A!//AOJJqM  SzNL!!
 	
    c                 `    [        S U  5       5      (       d  [        R                  " S5        g g )Nc              3   |   #    U  H2  n[        U[        R                  5      (       d  M$  UR                  v   M4     g 7fN)r%   r'   r(   r+   .0r0   s     r    	<genexpr>*check_backward_validity.<locals>.<genexpr>U   s&     TFSjell6S s  Fs   #<<zBNone of the inputs have requires_grad=True. Gradients will be None)anywarningswarn)r"   s    r    r   r   T   s(    TFTTTP	
 Ur2   c                 ^    U S:X  a  [         R                  " S5      $ [        [         U 5      nU$ )Nmeta)r'   devicegetattr)r?   device_modules     r    _get_device_modulerB   [   s+    ||F##E6*Mr2   c                   P    \ rS rSrSrSr\S
S\4S jj5       r\S\4S j5       r	Sr
g	)r   b   a)  
A class that manages the default device type for checkpointing.

If no non-CPU tensors are present, the default device type will
be used. The default value is 'cuda'. The device type is used in
the checkpointing process when determining which device states
to save and restore for recomputation.
cudar?   c                     U [         l        g)z
Set the default device type for checkpointing.

Args:
    device (str): The device type to be set as default. Default is 'cuda'.
Nr   _default_device_type)r?   s    r    set_device_type!DefaultDeviceType.set_device_typen   s     28.r2   r#   c                  "    [         R                  $ )zl
Get the current default device type for checkpointing.

Returns:
    str: The current default device type.
rG    r2   r    get_device_type!DefaultDeviceType.get_device_typex   s     !555r2   rL   NrE   )r.   
__module____qualname____firstlineno____doc__rH   staticmethodstrrI   rM   __static_attributes__rL   r2   r    r   r   b   sC     "8 8 8 6S 6 6r2   r   c                    ^ / mU4S jn[        X5        [        T5      n[        U5      S:  a(  [        R                  " S[        U5       STS    35        [        T5      S:X  a  [        R                  5       $ SU;   a  gTS   $ )Nc                    > [        U [        R                  5      (       aA  U R                  R                  S:X  d&  TR                  U R                  R                  5        g g g )Ncpu)r%   r'   r(   r?   r-   r)   )argdevice_typess    r    add_device_types,_infer_device_type.<locals>.add_device_types   sB    c5<<((E1I

0 2J(r2      a  Tensor arguments, excluding CPU tensors, are detected on at least two types of devices. Device state will only be saved for devices of a single device type, and the remaining devices will be ignored. Consequently, if any checkpointed functions involve randomness, this may result in incorrect gradients. (Note that if CUDA devices are among the devices detected, it will be prioritized; otherwise, the first device encountered will be selected.)
Device types: z first device type: r   rE   )r   setlenr;   r<   sortedr   rM   )argsr\   device_types_setr[   s      @r    _infer_device_typerd      s    L1 $<(
q 
  &&6788L\Z[_L]_	
 <A 0022	#	#Ar2   c                     ^ / mU4S jn[        X5        / n[        [        U 6 5      nT H<  nUR                  U5         UR	                  UR                  5       5        S S S 5        M>     TU4$ ! , (       d  f       MS  = f)Nc                    > [        U [        R                  5      (       a;  U R                  R                  S;  a   TR                  U R                  5       5        g g g )N>   rY   r>   )r%   r'   r(   r?   r-   r)   
get_device)rZ   fwd_device_idss    r    add_device_ids)get_device_states.<locals>.add_device_ids   sA    c5<<((SZZ__O-S!!#.."23 .T(r2   )r   rB   rd   r?   r)   get_rng_state)rb   ri   fwd_device_statesrA   	device_idrh   s        @r    r   r      s}     N4 ^"&'94'@AM#	!!),$$]%@%@%BC -, $ ,,, -,s    A..
A=	device_typec                    Uc  [         R                  5       nUS:X  a  g[        U5      n[        X5       H0  u  pEUR	                  U5         UR                  U5        SSS5        M2     g! , (       d  f       MD  = f)a  Sets random number generator states for the specified devices.

Args:
    devices: Device ids to set states for.
    states: States to set.
    device_type: ``device_type`` of the devices to set states for. Default
        is the device returned by a call to ``DefaultDeviceType.get_device_type()``,
        which is ``cuda`` if not changed by calling ``DefaultDeviceType::set_device_type()``.
Nr>   )r   rM   rB   zipr?   set_rng_state)devicesstatesro   rA   r?   states         r    r   r      si     '779f&{3MW-!!&)''. *) .))s   A**
A9	c                 Z   [         R                  R                  U 5      (       aB  [         R                  " U 5      [         R                  " U 5      [         R
                  " 5       S.nOS n[         R                  " S5      [         R                  " S5      [         R
                  " 5       S.nX4$ )N)r   dtypecache_enabledrY   )r'   ampis_autocast_availableis_autocast_enabledget_autocast_dtypeis_autocast_cache_enabled)ro   device_autocast_kwargscpu_autocast_kwargss      r    _get_autocast_kwargsr      s    yy&&{3300=--k:"<<>"
 "& ,,U3))%088: "66r2   c                   4    \ rS rSr\S 5       r\S 5       rSrg)r      c                 6   [        U5        Xl        X l        [        U6 U l        [        U R                  5      u  U l        U l        U(       ad  [        R                  " 5       U l
        SU l        [        U R                  5      n[        USS5      (       a  SU l        [        U6 u  U l        U l        / U l        / U l        / n['        U5       H  u  pg[        R(                  " U5      (       aI  UR+                  U5        U R$                  R+                  U5        U R"                  R+                  S 5        Mi  U R"                  R+                  U5        M     U R,                  " U6   [        R.                  " 5          U" U6 nS S S 5        U$ ! , (       d  f       W$ = f)NF_initializedT)r   run_functionpreserve_rng_staterd   ro   r   r~   r   r'   rk   fwd_cpu_statehad_device_in_fwdrB   r@   r   fwd_devicesrl   r"   tensor_indices	enumerate	is_tensorr)   save_for_backwardno_grad)	ctxr   r   rb   rA   tensor_inputsirZ   outputss	            r    forwardCheckpointFunction.forward   sL   %'!3,d3>ROO?
;"C$;  % 3 3 5C
 %*C!.s?M}ne<<(,%9JD9Q6!6 
oFAs##$$S)""))!,

!!$'

!!#& & 	}-]]_"D)G  _s   9F		
Fc           	         [         R                  R                  5       (       d  [        S5      e[	        U R
                  5      nU R                  nU R                  n[        U5       H  u  pVXE   X&'   M     / nU R                  (       a  U R                  (       a  U R                  n[         R                  R                  XpR                  U R                  S9   U R                  (       aZ  [         R                  " U R                   5        U R                  (       a)  [#        U R                  U R$                  U R                  S9  ['        [)        U5      5      n[         R*                  R-                  U R                  5      (       a6  [         R*                  R.                  " S	SU R                  0U R0                  D6O[2        R4                  " 5       n	[         R6                  " 5          U	   [         R*                  R.                  " S
0 U R8                  D6   U R:                  " U6 n
S S S 5        S S S 5        S S S 5        S S S 5        [=        W
[         R>                  5      (       a  U
4n
/ n/ n[A        [C        U
5      5       H]  n[         RD                  " X   5      (       d  M"  X   RF                  (       d  M7  URI                  X   5        URI                  X   5        M_     [C        U5      S:X  a  [        S5      e[         R                  RK                  X5        [)        S W 5       5      nSU-   $ ! , (       d  f       GN= f! , (       d  f       GN= f! , (       d  f       GN&= f! , (       d  f       GN0= f)NzWhen use_reentrant=True, torch.utils.checkpoint is incompatible with .grad() or passing an `inputs` parameter to .backward(). To resolve this error, you can either set use_reentrant=False, or call .backward() without passing the `inputs` argument.rs   r   ro   rn   ro   r   zInone of output has requires_grad=True, this checkpoint() is not necessaryc              3   |   #    U  H2  n[        U[        R                  5      (       a  UR                  OS v   M4     g 7fr5   )r%   r'   r(   gradr6   s     r    r8   .CheckpointFunction.backward.<locals>.<genexpr>A  s0      
& #355CHH4?&s   :<)NNrL   rY   )&r'   autograd_is_checkpoint_validr,   listr"   r   saved_tensorsr   r   r   r   randomfork_rngro   rr   r   r   rl   r   r&   ry   rz   autocastr~   
contextlibnullcontextenable_gradr   r   r%   r(   ranger`   r   r+   r)   backward)r   rb   r"   r   tensorsr   idxrng_devicesdetached_inputsdevice_autocast_ctxr   outputs_with_gradargs_with_gradgradss                 r    r   CheckpointFunction.backward
  s   ~~2244N  cjj!++##  /FA!*FK 0 !!c&;&;//K\\"")?)?S__ # 
 %%##C$5$56((%coos7L7LZ]ZiZij-eFm<O 00AA #())"4"4 #OO#/2/I/I#GQG]G]G_   ""$&9599;M;M;oWZWnWn;o**O< <p&9$
 gu||,,jG s7|$Awz**wz/G/G/G!((4%%dg. %  !Q&6  	 1B 
&
 

 e##1 <p;o&9&9$$
 
sU   DM%&M)+ML/	$M,M4M%/
L>9M
MM
M"	M%%
M4rL   N)r.   rP   rQ   rR   rT   r   r   rV   rL   r2   r    r   r      s)    & &P ;$ ;$r2   r   c                  V    [         R                  " 5       [         R                  " 5       4$ r5   )r   r   rL   r2   r    r   r   I  s    !!#Z%;%;%===r2   F)use_reentrant
context_fndeterminism_checkdebugr   r   r   r   c                   Uc  [         R                  " SSS9  SnUR                  SS5      nU(       a+  U(       a$  [        SSR	                  S	 U 5       5      -   5      eU(       a0  U[
        Ld  US
La  [        S5      e[        R                  " X/UQ76 $ [        XX#U/UQ70 UD6n[        U5        U " U0 UD6n	 [        U5        g! [         a    U	s $ f = f)a  Checkpoint a model or part of the model.

Activation checkpointing is a technique that trades compute for memory.
Instead of keeping tensors needed for backward alive until they are used in
gradient computation during backward, forward computation in checkpointed
regions omits saving tensors for backward and recomputes them during the
backward pass. Activation checkpointing can be applied to any part of a
model.

There are currently two checkpointing implementations available, determined
by the :attr:`use_reentrant` parameter. It is recommended that you use
``use_reentrant=False``. Please refer the note below for a discussion of
their differences.

.. warning::

    If the :attr:`function` invocation during the backward pass differs
    from the forward pass, e.g., due to a global variable, the checkpointed
    version may not be equivalent, potentially causing an
    error being raised or leading to silently incorrect gradients.

.. warning::

    The ``use_reentrant`` parameter should be passed explicitly. In version
    2.4 we will raise an exception if ``use_reentrant`` is not passed.
    If you are using the ``use_reentrant=True`` variant, please refer to the
    note below for important considerations and potential limitations.

.. note::

    The reentrant variant of checkpoint (``use_reentrant=True``) and
    the non-reentrant variant of checkpoint (``use_reentrant=False``)
    differ in the following ways:

    * Non-reentrant checkpoint stops recomputation as soon as all needed
      intermediate activations have been recomputed. This feature is enabled
      by default, but can be disabled with :func:`set_checkpoint_early_stop`.
      Reentrant checkpoint always recomputes :attr:`function` in its
      entirety during the backward pass.

    * The reentrant variant does not record the autograd graph during the
      forward pass, as it runs with the forward pass under
      :func:`torch.no_grad`. The non-reentrant version does record the
      autograd graph, allowing one to perform backward on the graph within
      checkpointed regions.

    * The reentrant checkpoint only supports the
      :func:`torch.autograd.backward` API for the backward pass without its
      `inputs` argument, while the non-reentrant version supports all ways
      of performing the backward pass.

    * At least one input and output must have ``requires_grad=True`` for the
      reentrant variant. If this condition is unmet, the checkpointed part
      of the model will not have gradients. The non-reentrant version does
      not have this requirement.

    * The reentrant version does not consider tensors in nested structures
      (e.g., custom objects, lists, dicts, etc) as participating in
      autograd, while the non-reentrant version does.

    * The reentrant checkpoint does not support checkpointed regions with
      detached tensors from the computational graph, whereas the
      non-reentrant version does. For the reentrant variant, if the
      checkpointed segment contains tensors detached using ``detach()`` or
      with :func:`torch.no_grad`, the backward pass will raise an error.
      This is because ``checkpoint`` makes all the outputs require gradients
      and this causes issues when a tensor is defined to have no gradient in
      the model. To avoid this, detach the tensors outside of the
      ``checkpoint`` function.

Args:
    function: describes what to run in the forward pass of the model or
        part of the model. It should also know how to handle the inputs
        passed as the tuple. For example, in LSTM, if user passes
        ``(activation, hidden)``, :attr:`function` should correctly use the
        first input as ``activation`` and the second input as ``hidden``
    preserve_rng_state(bool, optional):  Omit stashing and restoring
        the RNG state during each checkpoint. Note that under torch.compile,
        this flag doesn't take effect and we always preserve RNG state.
        Default: ``True``
    use_reentrant(bool):
        specify whether to use the activation checkpoint variant that
        requires reentrant autograd. This parameter should be passed
        explicitly. In version 2.5 we will raise an exception if
        ``use_reentrant`` is not passed. If ``use_reentrant=False``,
        ``checkpoint`` will use an implementation that does not require
        reentrant autograd. This allows ``checkpoint`` to support additional
        functionality, such as working as expected with
        ``torch.autograd.grad`` and support for keyword arguments input into
        the checkpointed function.
    context_fn(Callable, optional): A callable returning a tuple of two
        context managers. The function and its recomputation will be run
        under the first and second context managers respectively.
        This argument is only supported if ``use_reentrant=False``.
    determinism_check(str, optional): A string specifying the determinism
        check to perform. By default it is set to ``"default"`` which
        compares the shapes, dtypes, and devices of the recomputed tensors
        against those the saved tensors. To turn off this check, specify
        ``"none"``. Currently these are the only two supported values.
        Please open an issue if you would like to see more determinism
        checks. This argument is only supported if ``use_reentrant=False``,
        if ``use_reentrant=True``, the determinism check is always disabled.
    debug(bool, optional): If ``True``, error messages will also include
        a trace of the operators ran during the original forward computation
        as well as the recomputation. This argument is only supported if
        ``use_reentrant=False``.
    args: tuple containing inputs to the :attr:`function`

Returns:
    Output of running :attr:`function` on :attr:`*args`
Nae  torch.utils.checkpoint: the use_reentrant parameter should be passed explicitly. In version 2.5 we will raise an exception if use_reentrant is not passed. use_reentrant=False is recommended, but if you need to preserve the current default behavior, you can pass use_reentrant=True. Refer to docs for more details on the differences between the two variants.   )
stacklevelTr   Unexpected keyword arguments: ,c              3   $   #    U  H  ov   M     g 7fr5   rL   r7   rZ   s     r    r8   checkpoint.<locals>.<genexpr>       7Nvv   FzKPassing `context_fn` or `debug` is only supported when use_reentrant=False.)r;   r<   pop
ValueErrorjoinr   r   apply'_checkpoint_without_reentrant_generatornextStopIteration)
functionr   r   r   r   rb   kwargspreservegenrets
             r    r   r   V  s    r C 	
  zz.5H-,sxx7Nv7N/NN
 	
 _,U0B'  "''BTBB5
u
GK
OU
 	S	''	I 	J	s   ;C CCc                     Uc  [         R                  " S5        SnUR                  SS5      nU(       a$  [        SSR	                  S U 5       5      -   5      eS n[        U [        R                  R                  5      (       a  [        U R                  5       5      n [        U 5      U-  nSn[        S	XqS
-
  -  U5       H  n	X-   S
-
  n[        U" XU 5      UUUS9nM     U" US
-   [        U 5      S
-
  U 5      " U5      $ )a  Checkpoint a sequential model to save memory.

Sequential models execute a list of modules/functions in order
(sequentially). Therefore, we can divide such a model in various segments
and checkpoint each segment. All segments except the last will not store
the intermediate activations. The inputs of each checkpointed segment will
be saved for re-running the segment in the backward pass.

.. warning::
    The ``use_reentrant`` parameter should be passed explicitly. In version
    2.4 we will raise an exception if ``use_reentrant`` is not passed.
    If you are using the ``use_reentrant=True` variant, please see
    :func:`~torch.utils.checkpoint.checkpoint` for
    the important considerations and limitations of this variant. It is
    recommended that you use ``use_reentrant=False``.

.. warning:
    Since PyTorch 1.4, it allows only one Tensor as the input and
    intermediate outputs, just like :class:`torch.nn.Sequential`.

Args:
    functions: A :class:`torch.nn.Sequential` or the list of modules or
        functions (comprising the model) to run sequentially.
    segments: Number of chunks to create in the model
    input: A Tensor that is input to :attr:`functions`
    preserve_rng_state(bool, optional):  Omit stashing and restoring
        the RNG state during each checkpoint.
        Default: ``True``
    use_reentrant(bool):
        specify whether to use the activation checkpoint variant that
        requires reentrant autograd. This parameter should be passed
        explicitly. In version 2.5 we will raise an exception if
        ``use_reentrant`` is not passed. If ``use_reentrant=False``,
        ``checkpoint`` will use an implementation that does not require
        reentrant autograd. This allows ``checkpoint`` to support additional
        functionality, such as working as expected with
        ``torch.autograd.grad`` and support for keyword arguments input into
        the checkpointed function.

Returns:
    Output of running :attr:`functions` sequentially on :attr:`*inputs`

Example:
    >>> # xdoctest: +SKIP("stub")
    >>> model = nn.Sequential(...)
    >>> input_var = checkpoint_sequential(model, chunks, input_var)
a{  torch.utils.checkpoint.checkpoint_sequential: the use_reentrant parameter should be passed explicitly. In version 2.5 we will raise an exception if use_reentrant is not passed. use_reentrant=False is recommended, but if you need to preserve the current default behavior, you can pass use_reentrant=True. Refer to docs for more details on the differences between the two variants.Tr   r   r   c              3   $   #    U  H  ov   M     g 7fr5   rL   r   s     r    r8   (checkpoint_sequential.<locals>.<genexpr>7  r   r   c                    ^ ^^ UUU 4S jnU$ )Nc                 J   > [        TTS-   5       H  nTU   " U 5      n M     U $ Nr^   )r   )inputjend	functionsstarts     r    r   <checkpoint_sequential.<locals>.run_function.<locals>.forward;  s+    5#'*!!U+ +Lr2   rL   )r   r   r   r   s   ``` r    r   +checkpoint_sequential.<locals>.run_function:  s    	
 r2   r   r^   )r   r   )r;   r<   r   r   r   r%   r'   nn
Sequentialr   childrenr`   r   r   )
r   segmentsr   r   r   r   r   segment_sizer   r   s
             r    r   r     s
   ` C	
  zz.5H,sxx7Nv7N/NN
 	
 )UXX0011++-.	y>X-L
Cq,Q,7F"Q&Y/''	
 G aY!!3Y?FFr2   c                 (    U (       d  [        S5      eg )NzqSomething went unexpectedly wrong in activation checkpoint. Please report this bug by filing an issue to PyTorch.AssertionError)conds    r    _internal_assertr   S  s    D
 	
 r2   Tenablec              #   8   #     [         nU q Sv   Uq g! Wq f = f7f)a  Context manager that sets whether checkpoint should stop recomputation early.

By default, non-reentrant checkpoint stops recomputation as soon as it
has computed all needed Tensors. This context manager can be used to disable
that feature if it is problematic for your specific application.

This context manager only needs to be active when forward is run. It does
not need to be active during backward.

Example::

>>> # xdoctest: +SKIP(failing)
>>> message = "saved tensors default hooks are disabled"
>>> with set_checkpoint_early_stop(False):
...     # Any checkpoint under this context manager will respect this
...     # context manager, even if its backward is performed outside.
...     out = checkpoint(fn, inputs)
...
>>> out.backward()
N)_enable_checkpoint_early_stop)r   r   s     r    r   r     s$     .-,(.%(,%%r!   c                       \ rS rSrSrg)_Handlei   rL   Nr.   rP   rQ   rR   rV   rL   r2   r    r   r          r2   r   c                       \ rS rSrS rSrg)_Holderi  c                     0 U l         g r5   handlesselfs    r    __init___Holder.__init__  s	    57r2   r   N)r.   rP   rQ   rR   r   rV   rL   r2   r    r   r     s    8r2   r   c            	       b    \ rS rSr\S 5       r\S\S\\S4   S\SS4S	 j5       r\S
 5       r	Sr
g)_NoopSaveInputsi	  c                  .    [         R                  " S5      $ )Nr   )r'   empty)rb   s    r    r   _NoopSaveInputs.forward
  s    {{4  r2   r   r"   .outputr#   Nc           
        ^	^
^ [        [        U5       VVs/ s H)  u  p4[        U[        R                  5      (       d  M&  X44PM+     snn6 u  mn[        T5       VVs0 s H  u  pgXv_M	     snnm
U Vs/ s H&  n[        U[        R                  5      (       a  S OUPM(     snm	U	U
U4S jnXl        U R                  " U6   g s  snnf s  snnf s  snf )Nc                 t   > [        T5       VVs/ s H  u  pUT;   a  U TU      OUPM     nnnUSS  $ s  snnf r   )r   )r   r   or   rb   idx2saved_idxr   s       r    get_args/_NoopSaveInputs.setup_context.<locals>.get_args  sS     &dO+DA 453FmA./AM+   qr7Ns   4)rq   r   r%   r'   r(   r   r   )r   r"   r   r   r   r   abr   rb   r   r   s            @@@r    setup_context_NoopSaveInputs.setup_context  s     #&!*6!2R!2jELL6Qfqf!2R#
 +4N*CD*C$!*CDDJKFq
1ell33:FK	  w') SDKs   %C
 C
C3-Cc                     [        S5      e)Nz(Did not expect to backward on this graphr   )r   grad_outputss     r    r   _NoopSaveInputs.backward)  s    GHHr2   rL   )r.   rP   rQ   rR   rT   r   AnyTupler  r   rV   rL   r2   r    r   r   	  sb    ! ! (3 (c3h ( ( ( (4 I Ir2   r   c                        \ rS rSrS rS rSrg)_CheckpointFramei.  c                    Xl         S U l        / U l        [        [        R
                  5      U l        [        [        5      U l        [        [        5      U l
        X l        X@l        X0l        / U l        SU l        SU l        g NF)recompute_fninput_saverweak_holdersr   weakrefWeakKeyDictionary
recomputedintrecomp_counterboolis_recomputed
early_stopmetadata_fnunpack_error_cbx_metadatasforward_completedignore_saved_mismatch)r   r  r  r  r  s        r    r   _CheckpointFrame.__init__/  sy    (13 112 	
 6A5E5@5F % '.!&%*"r2   c           	          U R                   (       a  g [        U R                  5      U R                  U   :X  d1  [	        S[        U R                  5       SU R                  U    35      e/ n[        U R                  5       H  u  p4U" 5       nUc  M  [        XR                  ;   5        [        UR                  U   S L5        [        UR                  U   U R                  U   ;   5        U R                  U   nU R                  U   UR                  U      nX`R                  U5      :w  d  M  UR                  X6U R                  U5      45        M     [        U5      S:  a+  SnU H  u  p6n	USU SU SU	 S3-  nM     [	        S	U 35      eg )
Nztorch.utils.checkpoint: A different number of tensors was saved during the original forward and recomputation.
Number of tensors saved during forward: z/
Number of tensors saved during recomputation: r    ztensor at position z:
saved metadata: z
recomputed metadata: 
zztorch.utils.checkpoint: Recomputed values for the following tensors have different metadata than during the forward pass.
)r  r`   r  r  r   r   r   r   r  r  r  r)   )
r   gidnb_meta_differentr   weak_holderholderx_metarecomputed_xmismatched_tensorsrecomputed_metas
             r    check_recomputed_tensors_match/_CheckpointFrame.check_recomputed_tensors_matchH  s   %%  4$$%)<)<S)AA ";;>t?P?P;Q:R SAAEATATUXAY@Z\   )$*;*; <C ]F~
 SNN23 V^^C0<=V^^C0DOOC4HHI%%c*F??3/s0CDL)),77!((#t7G7G7U)VW% !=(  !A%!#0A,_")# /''-h /,,;+<B@" 1B "J%&(  &r2   )r  r  r  r  r  r  r  r  r  r  r  r  N)r.   rP   rQ   rR   r   r)  rV   rL   r2   r    r
  r
  .  s    +2Br2   r
  a
   An error happened while unpacking tensors; dumping logs of latest computation
because you passed `debug=True` to `torch.utils.checkpoint.checkpoint()`.
Scroll all the way down for guidance on how to navigate these logs.

+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~+
|        1. Stack traces of the operators that ran in the original forward     |
+------------------------------------------------------------------------------+

{forward_traces}
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~+
|        2. Stack traces of the operators that ran during recomputation        |
+------------------------------------------------------------------------------+

{recompute_traces}
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~+
|       3. Log of operators in the original forward and recomputation          |
+------------------------------------------------------------------------------+
(Scroll up to correlate stack traces with each operation listed below. This
 helps identify their source in the code.)

IMPORTANT: Differences in "detach" calls between the original forward and the
           recomputation are expected. They are introduced by the checkpointing
           mechanism and can be ignored.

Operations executed during the original forward:

{forward_ops}

Operations executed during recomputation:

{recompute_ops}

+------------------------------------------------------------------------------+
 ERROR: Detected non-determinism while running activation checkpointing

 You are seeing this error because you passed `debug=True` to checkpoint and
 tensors to be saved during the original forward and differ between those saved
 during recomputation. This can happen if different operators were ran in the
 original forward and in the recomputation.

 To identify where the mismatch may be coming from, you can do the following:

 1) Compare the operators ran during original forward and recomputation to
    see where they differ. These operators are printed above in the order they
    were executed.

 2) Review the stack trace for each operator to locate its invocation source.
    Each operator's stack trace is printed in their execution order.

 Note that the logs can be quite long. Here's how they are structured:
 (Tip: you can Ctrl-f for these headers)

 1. Stack traces of the operators that ran in the original forward
 2. Stack traces of the operators that ran during recomputation
 3. Log of operators in the original forward and recomputation
 4. Error message                                             <--- You are here
--------------------------------------------------------------------------------
c                       \ rS rSrSrg)r   i  rL   Nr   rL   r2   r    r   r     r   r2   r   c                     ^^^ [         R                  " 5       S:H  =(       a    [         R                  " 5       S:H  m " U4S jS5      n U " 5       mU " 5       mS[        4UU4S jjnUU4S jnX!4$ )Nx86_64Linuxc                   (   > \ rS rSrS rU 4S jrSrg)._get_debug_context_and_cb.<locals>.CaptureLogsi  c                      S U l         S U l        g r5   logstbsr   s    r    r   7_get_debug_context_and_cb.<locals>.CaptureLogs.__init__  s    DIDHr2   c                 H   >^  [         R                  UU 4S j5       nU" 5       $ )Nc               3      >#    [        5          [        SSSTS9 n U u  Tl        Tl        U v   S S S 5        S S S 5        g ! , (       d  f       N= f! , (       d  f       g = f7f)NT)	python_tb	script_tbcpp_tb)r	   r   r3  r4  )logs_and_tbr:  r   s    r    logging_modeX_get_debug_context_and_cb.<locals>.CaptureLogs.get_context_manager.<locals>.logging_mode  sN     &(!$$$vVZe*5'DItx%% W )(VV )(s1   A"AA A	A" 
A	
A
AA")r   contextmanager)r   r<  r:  s   ` r    get_context_managerB_get_debug_context_and_cb.<locals>.CaptureLogs.get_context_manager  s%    &&& '&
  >!r2   r2  N)r.   rP   rQ   rR   r   r?  rV   )r:  s   r    CaptureLogsr0    s    		" 	"r2   rA  ec           
        > S nTR                   c   eTR                   c   e[        [        R                  U" ST5      U" ST5      SR	                  TR                   5      SR	                  TR                   5      S95      U e)Nc           
      V   Sn[        UR                  5      n[        [        UR                  UR                  5      5       Hd  u  nu  pVX% SUS-    SU SU  S3-  nSnU H<  nUS   S	:H  n	U(       d	  U	(       d  M  U	(       a  S
nM&  X(S    SUS    SUS    S3-  nM>     US-  nMf     U$ )Nr  z   (r^   z of z in z)

Fname__torch_dispatch__Tfilename:liner   z

)r`   r3  r   rq   r4  )
labelr   r/   	total_lenr   logtbfound_torch_dispatchrI  is_torch_dispatchs
             r    
get_str_tbF_get_debug_context_and_cb.<locals>.unpack_error_cb.<locals>.get_str_tb  s    CL--.I )#l.?.?AQAQ*R S9Cd1q5'i[UG5II',$D(,V8L(L%/8I */3, :./qfaV~RPPC  v !T Jr2   original	recomputer   )forward_tracesrecompute_tracesforward_opsrecompute_ops)r3  r   _checkpoint_error_templateformatr   )rB  rP  capture_logs_fwdcapture_logs_recomputes     r    r  2_get_debug_context_and_cb.<locals>.unpack_error_cb  s    	"  $$000%**666&--)*6FG!+K9O!P II&6&;&;<"ii(>(C(CD	 . 
 	r2   c                  D   > T R                  5       TR                  5       4$ r5   )r?  )rZ  r[  s   r    r   -_get_debug_context_and_cb.<locals>.context_fn  s!    3357M7a7a7cccr2   )platformmachinesystemr   )rA  r  r   rZ  r[  r:  s      @@@r    _get_debug_context_and_cbrb    sg     8+L0AW0LF" " #}(]?  :d &&r2   r1   c                 J    U R                   U R                  U R                  S.$ )Nshaperw   r?   rd  r1   s    r    _default_meta_extractorrg    s#     (( r2   nonec                     g r5   rL   )_s    r    <lambda>rk    s    dr2   "_allowed_determinism_checks_to_fnsc                       \ rS rSrSrg)_StopRecomputationErrori  rL   Nr   rL   r2   r    rn  rn    r   r2   rn  c                   4   ^  \ rS rSrS\S\4U 4S jjrSrU =r$ )_recomputation_hooki  target_frame_refr!  c                 <   >^^ UU4S jnS n[         TU ]  X45        g )Nc                   > U R                   (       a  U R                  5       OU n T" 5       nUc   eUR                  T   nUR                  T==   S-  ss'   U[        UR                  5      :  a8  UR
                  (       a   eUR                  (       d	  SUl        U $ [        S5      eUR                  U   " 5       nUb\  [        UR                  R                  TS 5      S L 5        [        5       UR                  T'   XR                  T   UR                  T   '   UR
                  (       a,  UR                  T   [        UR                  5      :X  a  [        eU $ )Nr^   Tzotorch.utils.checkpoint: trying to save more tensors during recomputation than during the original forward pass.)r+   r*   r  r`   r  r  r  r  r   r   r   getr   r  rn  )r1   target_frame
recomp_idxr$  r!  rq  s       r    	pack_hook/_recomputation_hook.__init__.<locals>.pack_hook  s=   oo
1A+-L+++%44S9J'',1,S!:!:;;'2222#55 :>L6H%K 
 "..z:<F ! !3!3C!>$!FG&-is#DE'',V^^C-@A&&<+F+Fs+Ks))P , .-Hr2   c                     U $ r5   rL   rf  s    r    unpack_hook1_recomputation_hook.__init__.<locals>.unpack_hookB  s	     Hr2   )superr   )r   rq  r!  rw  rz  	__class__s    ``  r    r   _recomputation_hook.__init__  s    $	L	
 	0r2   rL   )	r.   rP   rQ   rR   r   r  r   rV   __classcell__r}  s   @r    rp  rp    s    ,1 ,1S ,1 ,1r2   rp  c                   (   ^  \ rS rSrU 4S jrSrU =r$ )_checkpoint_hookiJ  c                    >^^ U4S jnU4S jmTR                   b  UU4S jn[        TU ]	  X#5        g [        TU ]	  UT5        g )Nc                 L  > [        5       nTR                  R                  [        R                  " U5      5        TR
                  bJ  [        R                  " 5          TR                  R                  TR                  U 5      5        S S S 5        U$ U$ ! , (       d  f       U$ = fr5   )	r   r  r)   r  refr  r'   r   r  )r1   r$  frames     r    rw  ,_checkpoint_hook.__init__.<locals>.pack_hookL  sv    YF%%gkk&&9:  ,]]_%%,,U->->q-AB %M6M %_Ms   +B
B#c                   > [         R                  R                  5       nUS:X  a  [        [        R
                  " 5       5      nTR                  U   (       d  TR                  R                  nUR                  UR                  5      n [        [        R                  " T5      U5         [         R                  R                  5          TR                   " U6   S S S 5        S S S 5        STR                  U'   TR%                  U5        ['        XR(                  ;   5        U R(                  U   c  [+        S5      e['        U R(                  U   TR,                  U   ;   5        TR,                  U   U R(                  U      nS U R(                  U'   U$ ! , (       d  f       N= f! , (       d  f       N= f! ["         a     Nf = f)Nr   Tztorch.utils.checkpoint: Unpack is being triggered for a tensor that was already unpacked once. If you are calling ctx.saved_tensors in backward, make sure to do so only once. Otherwise please open an issue with details on your use case.)r'   _C_current_graph_task_idr  uuiduuid4r  r  grad_fnr   r   rp  r  r  r   r   r  rn  r)  r   r   r   r  )r$  r!  r   rb   r   r  s        r    rz  ._checkpoint_hook.__init__.<locals>.unpack_hookV  sn   ((113Cby$**,'&&s+''//||C$5$56,E*C~~113**D1 4 ,0##C(44S9SNN23~~c"*%b 
 V^^C0E4D4DS4IIJ""3's(;<C"&FNN3J% 43  / sH   
 F5 *F$	FF$!F5 
F!	F$$
F2.F5 2F5 5
GGc                 f   >  T" U 5      $ ! [          a  nTR                  U5         S nAg S nAff = fr5   )r   r  )r$  rB  r  rz  s     r    unpack_hook_with_error_cb<_checkpoint_hook.__init__.<locals>.unpack_hook_with_error_cbx  s4    -&v..& -))!,,-s    
0+0)r  r|  r   )r   r  rw  r  rz  r}  s    `  @r    r   _checkpoint_hook.__init__K  s?    		B   ,-
 GYBGY4r2   rL   )r.   rP   rQ   rR   r   rV   r  r  s   @r    r  r  J  s    45 45r2   r  c                 |    U H6  n[        U[        R                  5      (       d  M$  [        U5      (       d  M6    g   g)NTF)r%   r'   r(   r   )funcrb   r   rZ   s       r    _is_compilingr    s/     c5<<((VC[[  r2   c                        \ rS rSrS rS rSrg)_VersionWrapperi  c                     Xl         [        U[        R                  5      (       a  UR                  U l        g S U l        g r5   )valr%   r'   r(   _versionversion)r   r  s     r    r   _VersionWrapper.__init__  s)    -06@ell6S6ScllY]r2   c                     U R                   b6  U(       d/  U R                  R                  U R                   :w  a  [        S5      eU R                  $ )NzETensor cached during selective activation checkpoint has been mutated)r  r  r  r,   )r   allow_cache_entry_mutations     r    get_val_VersionWrapper.get_val  s@    <<#,Fxx  DLL0"[  xxr2   )r  r  N)r.   rP   rQ   rR   r   r  rV   rL   r2   r    r  r    s    ^r2   r  c                    [        U [        R                  5      (       a  U R                  5       (       d  U R	                  5       (       d  U(       a]  [        R
                  R                  [        R
                  R                  R                  S5         U R                  5       n S S S 5        U $ U $ ! , (       d  f       U $ = fr  )
r%   r'   r(   is_floating_point
is_complexr  _SetExcludeDispatchKeyGuardDispatchKeyADInplaceOrViewr*   r1   any_ret_has_alias_infos     r    _maybe_detachr    s     !U\\""(;(;(=(=SiXX11%((2F2F2V2VX]^ 
A _ H1H _^ Hs   B00
B?c                       \ rS rSrSrS rSrg)r   i  a  
Context passed to policy function during selective checkpointing.

This class is used to pass relevant metadata to the policy function during
selective checkpointing. The metadata includes whether the current invocation
of the policy function is during recomputation or not.

Example:
    >>> # xdoctest: +SKIP(stub)
    >>>
    >>> def policy_fn(ctx, op, *args, **kwargs):
    >>>    print(ctx.is_recompute)
    >>>
    >>> context_fn = functools.partial(create_selective_checkpoint_contexts, policy_fn)
    >>>
    >>> out = torch.utils.checkpoint.checkpoint(
    >>>     fn, x, y,
    >>>     use_reentrant=False,
    >>>     context_fn=context_fn,
    >>> )
c                    Xl         g r5   is_recompute)r   r  s     r    r   #SelectiveCheckpointContext.__init__  s    (r2   r  N)r.   rP   rQ   rR   rS   r   rV   rL   r2   r    r   r     s    *)r2   r   c                   (    \ rS rSrSrSrSrSrSrSr	g)	r   i  ax  
Enum for specifying the policy for checkpointing during backpropagation.

The following policies are supported:

- ``{MUST,PREFER}_SAVE``: The operation's output will be saved during the forward
  pass and will not be recomputed during the backward pass
- ``{MUST,PREFER}_RECOMPUTE``: The operation's output will not be saved during the
  forward pass and will be recomputed during the backward pass

Use ``MUST_*`` over ``PREFER_*`` to indicate that the policy should not be overridden
by other subsystems like `torch.compile`.

.. note::
    A policy function that always returns ``PREFER_RECOMPUTE`` is
    equivalent to vanilla checkpointing.

    A policy function that returns ``PREFER_SAVE`` every op is
    NOT equivalent to not using checkpointing. Using such a policy would
    save additional tensors not limited to ones that are actually needed for
    gradient computation.
r   r^   r      rL   N)
r.   rP   rQ   rR   rS   	MUST_SAVEPREFER_SAVEMUST_RECOMPUTEPREFER_RECOMPUTErV   rL   r2   r    r   r     s    , IKNr2   r   c                 P    U (       a  [         R                  $ [         R                  $ r5   r   r  r  )r  s    r    _policy_from_boolr    s    )*%%Q0@0Q0QQr2   c                   $    \ rS rSrS rSS jrSrg)_CachingTorchDispatchModei  c                     Xl         X l        g r5   	policy_fnstorage)r   r  r  s      r    r   "_CachingTorchDispatchMode.__init__  s    "r2   Nc                   ^ U[         ;   a  U" U0 UD6$ Uc  0 OUnU R                  " [        SS9U/UQ70 UD6n[        U[        5      (       a  [        U5      n[        XU5      nU(       a  U[        R                  S'   U" U0 UD6n[        S UR                  R                   5       5      mU[        R                  [        R                  4;   d  U(       a,  U R                  U   R!                  [#        U4S jU5      5        U$ )NFr  rS  c              3   <   #    U  H  oR                   S Lv   M     g 7fr5   )
alias_info)r7   r   s     r    r8   ?_CachingTorchDispatchMode.__torch_dispatch__.<locals>.<genexpr>  s     $`K_C^^4%?K_s   c                 .   > [        [        U T5      5      $ r5   )r  r  r  s    r    rk  >_CachingTorchDispatchMode.__torch_dispatch__.<locals>.<lambda>  s    WXZpIq9rr2   )r   r  r   r%   r  r  r  fx_tracebackcurrent_metar:   _schemareturnsr   r  r  r  r)   r   )	r   r  typesrb   r   policyis_compilingr/   r  s	           @r    rF  ,_CachingTorchDispatchMode.__torch_dispatch__  s    ?"(((~6 : N $7'+7/57fd##&v.F$T85;L%%k2D#F#!$$`4<<K_K_$`!`&002B2N2NOOS_LL%%h/rtw&xy
r2   r  rL   Nr.   rP   rQ   rR   r   rF  rV   rL   r2   r    r  r    s    r2   r  c                   $    \ rS rSrS rSS jrSrg)_CachedTorchDispatchModei  c                 (    Xl         X l        X0l        g r5   )r  r  r  )r   r  r  r  s       r    r   !_CachedTorchDispatchMode.__init__  s    "*D'r2   Nc                   ^  U[         ;   a  U" U0 UD6$ Uc  0 OUnT R                  " [        SS9U/UQ70 UD6n[        U[        5      (       a  [        U5      n[        XU5      nU[        R                  [        R                  4;   d  U(       ag  T R                  R                  U5      nUc  [        U S35      e[        U5      S:X  a  [        S5      e[        U 4S jUR                  S5      5      nU$ U" U0 UD6nU$ )NTr  z6 encountered during backward, but not found in storager   zTrying to backward an extra time. You are only allowed to backward once on any region computed under selective activation checkpoint.c                 :   > U R                  TR                  5      $ r5   )r  r  )r1   r   s    r    rk  =_CachedTorchDispatchMode.__torch_dispatch__.<locals>.<lambda>/  s    QYYt/N/N%Or2   )r   r  r   r%   r  r  r  r   r  r  r  rt  r,   r`   r   r   )	r   r  r  rb   r   r  r  r  r/   s	   `        r    rF  +_CachedTorchDispatchMode.__torch_dispatch__  s   ?"(((~6 : M $7'+7/57fd##&v.F$T8&002B2N2NOOS_ll&&t,G"dV+a#bcc7|q "T  OQXQ\Q\]^Q_`C 
 ''C
r2   )r  r  r  r  r  rL   r2   r    r  r    s    E
r2   r  c           	        ^  [        T [        5      (       a  T  Hv  n[        U[        R                  R                  5      (       a  M.  [        U[        R                  R
                  5      (       a  SOSn[        SU S[        U5       SU 35      e   U 4S jnO[        T 5      (       a  T nO[        S5      e[        [        5      n[        XE5      [        XEU5      4$ )a  
Helper to avoid recomputing certain ops during activation checkpointing.

Use this with `torch.utils.checkpoint.checkpoint` to control which
operations are recomputed during the backward pass.

Args:
    policy_fn_or_list (Callable or List):
      - If a policy function is provided, it should accept a
        :class:`SelectiveCheckpointContext`, the :class:`OpOverload`, args and
        kwargs to the op, and return a :class:`CheckpointPolicy` enum value
        indicating whether the execution of the op should be recomputed or not.
      - If a list of operations is provided, it is equivalent to a policy
        returning `CheckpointPolicy.MUST_SAVE` for the specified
        operations and `CheckpointPolicy.PREFER_RECOMPUTE` for all other
        operations.
    allow_cache_entry_mutation (bool, optional): By default, an error is
        raised if any tensors cached by selective activation checkpoint are
        mutated in order to ensure correctness. If set to `True`, this check
        is disabled.
Returns:
    A tuple of two context managers.

Example:
    >>> # xdoctest: +REQUIRES(LINUX)
    >>> import functools
    >>>
    >>> x = torch.rand(10, 10, requires_grad=True)
    >>> y = torch.rand(10, 10, requires_grad=True)
    >>>
    >>> ops_to_save = [
    >>>    torch.ops.aten.mm.default,
    >>> ]
    >>>
    >>> def policy_fn(ctx, op, *args, **kwargs):
    >>>    if op in ops_to_save:
    >>>        return CheckpointPolicy.MUST_SAVE
    >>>    else:
    >>>        return CheckpointPolicy.PREFER_RECOMPUTE
    >>>
    >>> context_fn = functools.partial(create_selective_checkpoint_contexts, policy_fn)
    >>>
    >>> # or equivalently
    >>> context_fn = functools.partial(create_selective_checkpoint_contexts, ops_to_save)
    >>>
    >>> def fn(x, y):
    >>>     return torch.sigmoid(torch.matmul(torch.matmul(x, y), y)) * y
    >>>
    >>> out = torch.utils.checkpoint.checkpoint(
    >>>     fn, x, y,
    >>>     use_reentrant=False,
    >>>     context_fn=context_fn,
    >>> )
zPlease update the OpOverloadPacket to a specific OpOverload.For example, if you have `torch.ops.aten.mm`, change it to `torch.ops.aten.mm.default`.r  z6Expected op in `op_list` to be an OpOverload but got: z	 of type z. c                 P   > UT;   a  [         R                  $ [         R                  $ r5   r  )r   oprb   r   policy_fn_or_lists       r    r  7create_selective_checkpoint_contexts.<locals>.policy_fnz  s$    &&'111'888r2   z=policy_fn_or_list must be either a function or a list of ops.)r%   r   r'   _ops
OpOverloadOpOverloadPacketr   r-   callable	TypeErrorr   r  r  )r  r  r  
_extra_msgr  r  s   `     r    r   r   5  s    r #T**#Bb%**"7"788  EJJ$?$?@@nFH  !LRD Q#Bxj:,8  $	9
 
#	$	$%	WXX$/$5G!)5 5OP r2   c           
   /   H  ^ ^^^^^^^^^#    Sn[         b  [         (       a)  OU(       a!  U[        :w  a  [        S5      e[        5       u  p'U[        ;   a
  [        U   nO,[        S[        [        R                  5       5       SU 35      e[        U6 m[        T5      n	U" 5       u  n
m[        T XV5      (       a;  U[        :w  a1  [        U
[        5      (       a  [        T[        5      (       d   S5       e[        TS9u  mmT(       a6  [        R                  " 5       mSm[        U	SS5      (       a  S	m[!        U6 u  mmUUUU UUUUUU4
S
 jn[#        U[$        UU5      n[        R&                  " SS	S9n[(        R*                  " X/UQ76 Ul        UR,                  R.                  c  Sv   g[1        U5         U
   Sv   SSS5        SSS5        S	Ul        [        U	SS5      (       a  T(       a  T(       d  [5        S5      eg! , (       d  f       NI= f! , (       d  f       NR= f7f)a  Checkpointing without reentrant autograd.

Args:
    fn: describes what to run in the forward pass of the model or
        part of the model. It should also know how to handle the inputs
        passed as the tuple. For example, in LSTM, if user passes
        ``(activation, hidden)``, :attr:`function` should correctly use the
        first input as ``activation`` and the second input as ``hidden``
    preserve_rng_state(bool, optional):  Omit stashing and restoring
        the RNG state during each checkpoint.
        Default: ``True``
    context_fn(Callable, optional): A callable returning a tuple of two
        context managers. The function and its recomputation will be run
        under the first and second context managers respectively.
    determinism_check(str, optional): A string specifying the determinism
        check to perform. By default it is set to ``"default"`` which
        compares the shapes, dtypes, and devices of the recomputed tensors
        against those the saved tensors. To turn off this check, specify
        ``"none"``. Currently these are the only two supported values.
        Please open an issue if you would like to see more determinism
        checks.
    debug(bool, optional): If ``True``, error messages will also include
        a trace of the operators ran during the original forward computation
        as well as the recomputation.
    *args: Arguments to pass in to the given ``function``.
    **kwargs: Keyword arguments to pass into the given ``function``.
Nz6debug=True is incompatible with non-default context_fnz#determinism_check should be one of z
, but got z}In torch.compile mode, `context_fn` arg passed to `torch.utils.checkpoint` must generate a tuple of two `TorchDispatchMode`s.rn   Fr   Tc            	        >
 U tp/ nT(       a	  T(       a  Tn[         R                  R                  UTTS9   T(       a(  [         R                  " T	5        T(       a  [	        TT
TS9  [         R
                  R                  T5      (       a"  [         R
                  R                  " SST0TD6O[        R                  " 5       nU   [         R
                  R                  " S0 TD6   T   T" U0 UD6  S S S 5        S S S 5        S S S 5        S S S 5        g ! , (       d  f       N'= f! , (       d  f       N0= f! , (       d  f       N9= f! , (       d  f       g = f)Nr   rn   ro   rL   r   )
r'   r   r   rr   r   ry   rz   r   r   r   )r"   r   rb   r   r   r   r~   ro   fnr   rl   r   r   r   recompute_contexts        r    r  =_checkpoint_without_reentrant_generator.<locals>.recompute_fn  s    "3%K\\""); # 
 "##M2$%k3DR]^ 00== #())"4"4 #'#+A#CMCYCYC[   %eii&8&8&VBU&VXiD#F# Yj&V$
 
 YjXi&V&V$$
 
sT   BE!D3$D"'	D	0D"8D3 E
DD""
D0,D33
E	=E
Er   )r+   zPyTorch's device state was initialized in the forward pass of a Checkpoint, which is not allowed. Please open an issue if you need this feature.)r   r   r   rb  rl  r   keysrd   rB   r  r%   r
   r   r'   rk   r@   r   r
  r   r   r   r   r  r  r  r  r,   )r  r   r   r   r   rb   r   r  r  rA   forward_contextr  	new_framedummyr   r~   ro   r   rl   r   r   r  s   ``            @@@@@@@@r    r   r     s	    H O$=$I  u(H  '@&A#
>>89JK1$7Y7^7^7`2a1b c()+
 	

 %d+K&{3M)3&O&R&&:+H(9::(*;<<	AA		A= 3GS^2_//++- "=.%88 $->-E*K*$ $* !%	I KKD1E+11%G$GI $$,	)	$o '6	$"&I}ne44"3 (
 	
  '6o	$	$s6   F)H"5H8H =H;H" 
H	
H
HH"rO   )r#   Nr5   )F)cr   r_  r  r;   r  collectionsr   typingenumr   r'   torch.fx.tracebackfx	tracebackr  /torch._functorch._aot_autograd.functional_utilsr   torch.utils._pytreer   &torch.testing._internal.logging_tensorr   r	   torch.utils._python_dispatchr
   __all___DEFAULT_DETERMINISM_MODEr   Optionalr  __annotations__r>  r   r  r  r(   r   Iterabler   rB   r   rd   Listr  r   r   r   r   Functionr   r   _disable_dynamoCallableContextManagerrU   r   r   r   r   r   r   r   r   r
  rX  r,   r   rb  Dictrg  rl  	Exceptionrn  graphsaved_tensors_hooksrp  r  r  r  r  r   Enumr   r  opsatenr*   r   primr?   r_   _subclassesfunctional_tensorFunctionalTensormetadata_fnsr   r  r  r   r   rL   r2   r    <module>r     s        #   !  ) ) B ( R :& & ,0 8D> 0 )(4. ) )*
E#s(O 
ellC6G0H 
&
HSM 
d 
6 6BD-d3iell1C&C D -( 7; /(7&f$00 f$R>  %)FU6] D>] U>>#ABBC	]
 ] ] ]@YGx
V !%  -d - -<	 	8 8
"Ienn-- "IJ\ \~: x	l 	9'5"c'):HoEVX\E\<])]#^ 9'vu|| S#X  6
NP "Dh~s7J.K)K$L 	i 	-1%....BB -1`55u~~++?? 55p  &) )4tyy :R 
IINN!! 
IINN!! ++<<IIJK 1 <0 DSt FU6 U>>#ABBC 	
 r2   