o
    wZh/                  	   @   s  U d Z ddlZddlZddlmZmZmZmZ ddlZddlm	Z
mZ ddlmZmZmZ ddlmZ ddlmZ ee
eef ZejZejZd	ag aeeeg df ee f  ed
< e Ze  Z!e Z"dd Z#dd Z$de%fddZ&dDddZ'G dd de(Z)de%fddZ*de%fddZ+dEdee ddfddZ,defddZ-defdd Z.dEdee defd!d"Z/dEdee defd#d$Z0	dFd'ee d(ed)eddfd*d+Z1de2eef fd,d-Z3dEdee deeef fd.d/Z4dDd0d1Z5d2efd3d4Z6deddfd5d6Z7G d7d dZ	G d8d9 d9Z8d2ed: de8fd;d2Z9dGdeeeej	f defd=d>Z:	<dGd?edeeeej	f ddfd@dAZ;ddBl<T g dCZ=dS )HzH
This package enables an interface for accessing MTIA backend in python
    N)AnyCallableOptionalUnion)deviceTensor)_dummy_type_LazySeedTrackerclassproperty)Device   )_get_device_indexF_queued_callsc                   C   s
   t   d S N)
_lazy_init r   r   B/var/www/auris/lib/python3.10/site-packages/torch/mtia/__init__.pyinit!      
r   c                   C   s   t ot  S )z9Return whether PyTorch's MTIA state has been initialized.)_initialized_is_in_bad_forkr   r   r   r   is_initialized%   s   r   returnc                   C   
   t j S r   )torch_CZ_mtia_isInBadForkr   r   r   r   r   *   r   r   c                  C   s  t  sttdr
d S tq t  r	 W d    d S t r tdt s'tdtj	
  dt_tdd t D  z1tD ]'\} }z|   W q> tye } zdt| dd	| }t||d }~ww W ttd nttd w daW d    d S 1 sw   Y  d S )
Nis_initializingzwCannot re-initialize MTIA in forked subprocess. To use MTIA with multiprocessing, you must use the 'spawn' start methodzTorch not compiled with MTIA enabled. Ensure you have `import mtia.host_runtime.torch_mtia.dynamic_library` in your python src file and include `//mtia/host_runtime/torch_mtia:torch_mtia` as your target dependency!Tc                 s   s    | ]}|r|V  qd S r   r   ).0Zcallsr   r   r   	<genexpr>P   s    z_lazy_init.<locals>.<genexpr>z6MTIA call failed lazily at initialization with error: z(

MTIA call was originally invoked at:

 )r   hasattr_tls_initialization_lockr   RuntimeError_is_compiledAssertionErrorr   r   Z
_mtia_initr   r   extend_lazy_seed_trackerZ	get_calls	ExceptionstrjoinDeferredMtiaCallErrordelattrr   )Zqueued_callZorig_tracebackemsgr   r   r   r   .   sB   



"r   c                   @   s   e Zd ZdS )r+   N)__name__
__module____qualname__r   r   r   r   r+   a   s    r+   c                   C   r   )z*Return true if compiled with MTIA support.)r   r   Z_mtia_isBuiltr   r   r   r   r$   e      
r$   c                   C   s   t  sdS t dkS )z'Return true if MTIA device is availableFr   )r$   device_countr   r   r   r   is_availablej   s   
r4   r   c                 C   s:   t j|  t j W  d   S 1 sw   Y  dS )z?Waits for all jobs in all streams on a MTIA device to complete.N)r   mtiar   r   Z_mtia_deviceSynchronizer   r   r   r   synchronizer   s   $r7   c                   C   r   )z,Return the number of MTIA devices available.)r   r   Z_mtia_getDeviceCountr   r   r   r   r3   x      
r3   c                   C   r   )z0Return the index of a currently selected device.)r   r   Z%_accelerator_hooks_get_current_devicer   r   r   r   current_device~   r2   r9   c                 C      t jt| ddS )aS  Return the currently selected :class:`Stream` for a given device.

    Args:
        device (torch.device or int, optional): selected device. Returns
            the currently selected :class:`Stream` for the current device, given
            by :func:`~torch.mtia.current_device`, if :attr:`device` is ``None``
            (default).
    Toptional)r   r   Z_mtia_getCurrentStreamr   r6   r   r   r   current_stream      	r=   c                 C   r:   )a=  Return the default :class:`Stream` for a given device.

    Args:
        device (torch.device or int, optional): selected device. Returns
            the default :class:`Stream` for the current device, given by
            :func:`~torch.mtia.current_device`, if :attr:`device` is ``None``
            (default).
    Tr;   )r   r   Z_mtia_getDefaultStreamr   r6   r   r   r   default_stream   r>   r?   allpythonenabledstacksmax_entriesc                 C   s   t  sdS tj| || dS )a  Enable/Disable the memory profiler on MTIA allocator

    Args:
        enabled (all or state, optional) selected device. Returns
            statistics for the current device, given by current_device(),
            if device is None (default).

        stacks ("python" or "cpp", optional). Select the stack trace to record.

        max_entries (int, optional). Maximum number of entries to record.
    N)r   r   r   Z_mtia_recordMemoryHistory)rB   rC   rD   r   r   r   record_memory_history   s   rE   c                   C   r   )z4Return a dictionary of MTIA memory allocator history)r   r   Z_mtia_memorySnapshotr   r   r   r   snapshot   r8   rF   c                 C   r:   )a  Return capability of a given device as a tuple of (major version, minor version).

    Args:
        device (torch.device or int, optional) selected device. Returns
            statistics for the current device, given by current_device(),
            if device is None (default).
    Tr;   )r   r   Z_mtia_getDeviceCapabilityr   r6   r   r   r   get_device_capability   s   rG   c                   C   r   )zEmpty the MTIA device cache.)r   r   Z_mtia_emptyCacher   r   r   r   empty_cache   r2   rH   streamc                 C   s   | du rdS t j|  dS )a  Set the current stream.This is a wrapper API to set the stream.
        Usage of this function is discouraged in favor of the ``stream``
        context manager.

    Args:
        stream (Stream): selected stream. This function is a no-op
            if this argument is ``None``.
    N)r   r   Z_mtia_setCurrentStreamrI   r   r   r   
set_stream   s   	rK   c                 C   s$   t | } | dkrtj|  dS dS )zSet the current device.

    Args:
        device (torch.device or int): selected device. This function is a no-op
            if this argument is negative.
    r   N)r   r   r   Z%_accelerator_hooks_set_current_devicer6   r   r   r   
set_device   s   rL   c                   @   s<   e Zd ZdZd efddZdd Zdededefd	d
ZdS )r   zContext-manager that changes the selected device.

    Args:
        device (torch.device or int): device index to select. It's a no-op if
            this argument is a negative integer or ``None``.
    c                 C   s   t |dd| _d| _d S )NTr;   )r   idxprev_idx)selfr   r   r   r   __init__   s   
zdevice.__init__c                 C   s   t j| j| _d S r   )r   r   (_accelerator_hooks_maybe_exchange_devicerN   rO   )rP   r   r   r   	__enter__   s   zdevice.__enter__typevalue	tracebackc                 C   s   t j| j| _dS )NF)r   r   rR   rO   rN   )rP   rT   rU   rV   r   r   r   __exit__   s   zdevice.__exit__N)r/   r0   r1   __doc__r   rQ   rS   rW   r   r   r   r   r      s
    c                   @   sN   e Zd ZU dZed ed< ded fddZdd Zd	ed
edefddZ	dS )StreamContexta  Context-manager that selects a given stream.

    All MTIA kernels queued within its context will be enqueued on a selected
    stream.

    Args:
        Stream (Stream): selected stream. This manager is a no-op if it's
            ``None``.
    .. note:: Streams are per-device.
    torch.mtia.Stream
cur_streamrI   c                 C   st   d | _ || _td d| _tj s| jd u rd| _tj s d ntjd | _	tj s1d | _
d S tjd | _
d S )NTrM   )r[   rI   r   rN   r   ZjitZis_scriptingr5   r?   src_prev_streamdst_prev_stream)rP   rI   r   r   r   rQ      s   


zStreamContext.__init__c                 C   s   | j }|d u s| jdkrd S tjd | _| jj|jkr9t|j tj|j| _W d    n1 s4w   Y  tj| d S NrM   )	rI   rN   r   r5   r=   r\   r   r]   rK   )rP   r[   r   r   r   rS     s   zStreamContext.__enter__rT   rU   rV   c                 C   sJ   | j }|d u s| jdkrd S | jj|jkrtj| j tj| j d S r^   )rI   rN   r\   r   r   r5   rK   r]   )rP   rT   rU   rV   r[   r   r   r   rW     s   zStreamContext.__exit__N)
r/   r0   r1   rX   r   __annotations__rQ   rS   r   rW   r   r   r   r   rY      s   
 rY   rZ   c                 C   s   t | S )a,  Wrap around the Context-manager StreamContext that selects a given stream.

    Arguments:
        stream (Stream): selected stream. This manager is a no-op if it's
            ``None``.
    .. note:: In eager mode stream is of type Stream class while in JIT it doesn't support torch.mtia.stream
    )rY   rJ   r   r   r   rI   ,  s   r5   c                 C   s$   t jdtdd tjdgtj| dS )zReturns the random number generator state as a ByteTensor.

    Args:
        device (torch.device or int, optional): The device to return the RNG state of.
            Default: ``'mtia'`` (i.e., ``torch.device('mtia')``, the current mtia device).
    z.get_rng_state is not implemented in torch.mtia   
stacklevelr   )Zdtyper   )warningswarnUserWarningr   ZzerosZuint8r6   r   r   r   get_rng_state7  s   rf   	new_statec                 C   s   t jdtdd dS )a  Sets the random number generator state.

    Args:
        new_state (torch.ByteTensor): The desired state
        device (torch.device or int, optional): The device to set the RNG state.
            Default: ``'mtia'`` (i.e., ``torch.device('mtia')``, the current mtia device).
    z.set_rng_state is not implemented in torch.mtiar`   ra   N)rc   rd   re   )rg   r   r   r   r   set_rng_stateF  s
   

rh   )*)r   r4   r   r7   r3   r9   r=   r?   Zmemory_statsZmax_memory_allocatedZreset_peak_memory_statsrG   rE   rF   rH   rL   rK   rI   r   rh   rf   )r   Nr   )r@   rA   r   )r5   )>rX   	threadingrc   typingr   r   r   r   r   r   Z_devicer   Ztorch._utilsr   r	   r
   Ztorch.typesr   _utilsr   r)   intZ	_device_tEventZStreamr   r   listtupler_   localr!   Lockr"   r'   r   r   boolr   r   r(   r+   r$   r4   r7   r3   r9   r=   r?   rE   dictrF   rG   rH   rK   rL   rY   rI   rf   rh   Zmemory__all__r   r   r   r   <module>   sz   
3
 
: 
