
    eTh4                        S SK r S SKrS SKrS SKrS SKJrJr  S SKJr  S SK	J
r
  S SKJr  SSKJr  \" 5       (       a  S SKrS SKrSSKJr  \R$                  " \5      r\R*                  R-                  5       rS	 r\R2                  " S
5      rS\S\4S jrS rS rS\R>                  4S jr S r!\R2                  " S5      r"S r#S r$S r%  SS\
\   S\
\&   4S jjr'\" SS9\SS\
\   S\
\&   4S jj5       5       r(g)    N)contextmanagerredirect_stdout)StringIO)Optional)requires   )is_torch_available)loggingc                      [         (       a#  [        R                  R                  5       (       d  g[        R                  R	                  5       S:H  $ )z7Return True if rank=0 or we aren't running distributed.Tr   )_torch_distributed_availabletorchdistributedis_initializedget_rank     Z/var/www/auris/envauris/lib/python3.13/site-packages/transformers/model_debugging_utils.py_is_rank_zeror   +   s9    ((U->->-M-M-O-O%%'1,,r   zobject at 0x[0-9A-Fa-f]+x_strreturnc                 .    [         R                  SU 5      $ )z
Replace memory addresses in an object's repr with a stable placeholder
so that beautiful JSON diffs won't be ruined by ephemeral addresses.
zobject at 0xXXXXXXXX)MEMORY_ADDRESS_REGEXsub)r   s    r   _sanitize_repr_for_diffr   5   s    
  ##$:EBBr   c                 R    [        5       (       a  S[        U R                  5       3$ g)z@Return a stable string representation for a DTensor-like object.zDTensor (rank0) -> zDTensor(non-rank0))r   repr_local_tensor)xs    r   _dtensor_reprr   =   s#    $T!//%:$;<<r   c                    [        U [        [        45      (       a  U  Vs/ s H  n[        U5      PM     sn$ [        U [        5      (       a/  U R                  5        VVs0 s H  u  p!U[        U5      _M     snn$ [        U S5      (       GaX  [        R                  " SS9  [        U 5      n[        U R                  R                  5      [        U R                  R                  5      US.nU R                  R                  [        R                  [        R                  [        R                   1;   a  U R                  R#                  5       n UR%                  ['        [        U R)                  5       5      5      ['        [        U R+                  5       5      5      ['        [        U R-                  5       5      5      ['        [        U R/                  5       5      5      S.5        U$ [        U [        R0                  5      (       Ga   [        R                  " SS9  [        U 5      n[        U R                  5      [        U R                  5      US.nU R                  [        R                  [        R                  [        R                   1;   a  UR%                  ['        [        U R)                  5       5      5      ['        [        U R+                  5       5      5      ['        [        U R-                  5       5      5      ['        [        U R/                  5       5      5      S.5        U$ ['        [        U 5      5      $ s  snf s  snnf )a  
Recursively build a JSON-serializable Python structure from `value`.
Tensors and DTensors become sanitized repr strings.
Lists/tuples/dicts are recursed into.
All memory addresses are replaced with a stable placeholder.

Args:
    value: Any Python object, often including torch Tensors, lists, dicts, etc.

Returns:
    A nested Python structure (list, dict, or sanitized string) that is safe to json.dump.
r   T)sci_mode)shapedtypevalue)meanstdminmax)
isinstancelisttuple_serialize_iodictitemshasattrr   set_printoptions_repr_to_listr   r   r"   r#   float16float32bfloat16cloneupdater   r%   r&   r'   r(   Tensor)r$   vkval_reprouts        r   r,   r,   D   sE    %$''*/0%Qa %00%05>=##>>uo&&- '%--334%--334

 $$u~~(VV''--/EJJ3D4FG24		3DE24		3DE24		3DE	 
%&&- '%++&%++&

 ;;5==%--HHJJ3D4FG24		3DE24		3DE24		3DE	 
"4;//[ 1 ?s   L: L?r$   c                 &   [         R                  " SSS9  [        5        n[        U5         [	        U 5        UR                  5       nSSS5        SSS5        [        W5      R                  5       $ ! , (       d  f       N/= f! , (       d  f       N8= f)z
Converts a tensor into a sanitized multi-line string representation.

Args:
    value (`torch.Tensor`): The tensor to represent.

Returns:
    `List[str]`: List of string lines representing the tensor.
Tx   )r!   	linewidthN)r   r0   r   r   printgetvaluer   
splitlines)r$   bufraws      r   r1   r1      s_     
DC8	sOC0elln 1 #3'2244 10s"   BA1B1
A?	;B
Bc                     U R                  S5      (       a*  U R                  SS 5        U S    H  n[        U5        M     g g )Nchildrenoutputs)getpopprune_outputs_if_children)nodechilds     r   rI   rI      s=     xx
D!*%E%e, & r   z(.*)\.(\d+)$c                    ^ [         R                  U R                  SS5      5      nU(       a  U R                  S5      (       d  gUR                  S5      m[	        U4S jU S    5       5      $ )z
Checks whether a node represents a layer block with submodules.

Args:
    node (`dict`): A node from the call tree.

Returns:
    `bool`: Whether the node is a layer block.
module_path rE   F   c              3   V   >#    U  H  nS T S 3UR                  SS5      ;   v   M      g7f).rM   rN   NrG   ).0rK   numbers     r   	<genexpr>!is_layer_block.<locals>.<genexpr>   s+     [JZ6(!}		- <<JZs   &))LAYER_SUFFIX_REmatchrG   groupany)rJ   rX   rT   s     @r   is_layer_blockr[      sW     !!$((=""=>E,,[[^F[$zJZ[[[r   c                    U R                  S5      (       d  g[        U S   5       VVs/ s H  u  p[        U5      (       d  M  X4PM     nnn[        U5      S:  aF  USS  VVs/ s H  u  pUPM	     nnn[        U S   5       VVs/ s H  u  pX;  d  M  UPM     snnU S'   U S    H  n[	        U5        M     gs  snnf s  snnf s  snnf )z
Recursively removes intermediate layers from the tree to improve readability.
Keeps at least the first and last layers if many consecutive layers are present.

Args:
    node (`dict`): The root or subnode to prune recursively.
rE   NrO   r   )rG   	enumerater[   lenprune_intermediate_layers)rJ   irK   layer_blocks_	to_removes         r   r`   r`      s     88J/8j9I/Jd/J81n]bNcJQJ/JLd
<1#/"#56#541Q#5	62;D<L2Md2MhaQRQcE2MdZj!!%( " e 7ds   B:B:$C CCc                   ^ U (       aC   [         R                  " U SS9  [         R                  R                  XR                  S-   5      nOUR                  S-   n[        R                  SU S35        US-   nUS	-   n[        UR                  5        [        US
5       n[        R                  " UR                  USS9  S S S 5        U4S jm[        R                  " [        R                  " UR                  5      5      nT" U5        [        US
5       n[        R                  " XvSS9  S S S 5        g ! [
         a  n[        SU  SU 35      eS nAff = f! , (       d  f       N= f! , (       d  f       g = f)NT)exist_ok_debug_treez"Unexpected or existing debug_path=z. zWriting model trace at z.jsonz_FULL_TENSORS.jsonz_SUMMARY.jsonwrO   )indentc                    >^ U4S jmT" U R                  S0 5      5        T" U R                  S0 5      5        U R                  S/ 5       H  nT" U5        M     g )Nc                    > [        U [        5      (       a2  U R                  SS 5        U R                  5        H  nT" U5        M     g [        U [        5      (       a  U  H  nT" U5        M     g g )Nr$   )r)   r-   rH   valuesr*   )valr8   itemcleans      r   ro   :log_model_debug_trace.<locals>.strip_values.<locals>.clean   sX    #t$$&A!H &C&&D$K   'r   inputsrF   rE   rR   )rJ   rK   ro   strip_valuess     @r   rr   +log_model_debug_trace.<locals>.strip_values   sM    	  	dhhx$%dhhy"%&XXj"-E .r   )osmakedirspathjoin_debugger_module_dump_name	Exception
ValueErrorloggerinforI   
_call_treeopenjsondumploadsdumps)	
debug_pathmodelbasee	full_pathsummary_pathf	tree_copyrr   s	           @r   log_model_debug_tracer      s8   	UKK
T277<<
,L,L},\]D //-?
KK)$u56++I/)Le../	i			%""Aa0 
   

4::e&6&678I	lC	 A		)q) 
!	 E  	UA*RPQsSTT	U 
	. 
!	 s0   AD5 !EE)5
E?EE
E&)
E7r   do_prune_layersc                 D  ^ ^^^^ T R                   R                  mTSS/ S.T l        / T l        TT l        U 4S jnT R                  5        H  u  pEUS:X  a  M  U" UT SU 35        M     T R                  m[        R                  " T5      UUUU U4S j5       nUT l        g)a}  
Attaches a debugging wrapper to every module in the model.

This records structured inputs and outputs during the forward pass into a call tree.

Args:
    model (`PreTrainedModel`, `nn.Module`): Model to wrap.
    debug_path (`str`): Optional directory to dump debug JSON files.
    do_prune_layers (`bool`, *optional*, defaults to `True`): Whether to prune intermediate layers.
NrM   rq   rF   rE   c                 v   >^ ^^ T R                   m[        R                  " T5      UUU U4S j5       nUT l         g )Nc                    > [        5       (       aW  XS.nU Vs0 s H  n[        X#   5      S:  d  M  X2U   _M     nnT[        U5      S / S.nTR                  R	                  U5        [
        R                  " 5          T
" U 0 UD6nS S S 5        [        5       (       a  [        S T	R                  5        5       5      S:  a  S WS'   O[        W5      WS'   TR                  R                  5       nUS   (       d  UR                  S5        TR                  (       a!  TR                  S   S   R	                  U5        W$ s  snf ! , (       d  f       N= f)Nargskwargsr   r   c              3   &   #    U  H  nS v   M	     g7f)r   Nr   )rS   rc   s     r   rU   X_attach_debugger_logic.<locals>.wrap_forward.<locals>.wrapped_forward.<locals>.<genexpr>  s     :"9Qq"9s   rF   rE   r]   )
r   r_   r,   _debugger_model_call_stackappendr   no_gradsumnamed_childrenrH   )inpskwsdict_inputsr9   rJ   r;   finishedr   r   moduleorig_forwards          r   wrapped_forwardE_attach_debugger_logic.<locals>.wrap_forward.<locals>.wrapped_forward  s(   '+;:Ea+Q[^I\_`I`0qa.0+a#,+K8# "	 0077="D0C0 ! :&"7"7"9::Q>&*DO&3C&8DO ;;??A
+LL,3344R8DKKHUJ1 b !s   D>	D>=	E
E)forward	functoolswraps)r   r   r   r   r   s   `` @r   wrap_forward,_attach_debugger_logic.<locals>.wrap_forward  s0    ~~		&	 
'	: )r   rN   rQ   c                    > [        5       (       a0  T S3[        XS.5      S / S.nT	R                  R                  U5        T
" U 0 UD6n[        5       (       a  T	R                  (       a  [        U5      WS'   T	R                  R	                  5       nUS   T	R
                  S'   US   T	R
                  S'   US   T	R
                  S'   [        T	R
                  R                  5       5       Vs/ s H5  nT	R
                  U   (       a  M  T	R
                  R	                  US 5      PM7       nT(       a  [        T	R
                  5        [        TT	S9  U$ s  snf )Nz (top-level)r   r   rF   rq   rE   )r   r   )
r   r,   r   r   rH   r}   r*   keysr`   r   )r   r   top_noder;   r   r9   
class_namer   r   r   real_top_forwards         r   top_wrapped_forward3_attach_debugger_logic.<locals>.top_wrapped_forward1  s7   ??",\:'(EF	H ,,33H=,,??u??"/"4HY77;;=H)1();EX&*29*=EY'+3J+?EZ(489I9I9N9N9P4Qm4QqY^YiYijkYl*U!!!T*4Qm )%*:*:;!ZuE
 ns   ,E E)		__class____name__r}   r   rx   named_modulesr   r   r   )	r   r   r   r   name	submoduler   r   r   s	   ```    @@r   _attach_debugger_logicr      s     ))J (2Td`bcE')E$'1E$!)H !..02:Y:,av 67 1 }}__%&  '6 (EMr   )r   )backendsc              #   8  #    U R                  5        VVs0 s H  u  p4XDR                  _M     nnnU R                  XP'   [        XU5         U v   UR                  5        H  u  pgXvl        M     gs  snnf ! UR                  5        H  u  pgXvl        M     f = f7f)a  
# Model addition debugger - context manager for model adders
This context manager is a power user tool intended for model adders.
It tracks all forward calls within a model forward and logs a slice of each input and output on a nested Json.
To note, this context manager enforces `torch.no_grad()`.

## Usage

add the context manager to a model to debug

```python
import torch
from PIL import Image
import requests
from transformers import LlavaProcessor, LlavaForConditionalGeneration
from transformers.model_debugging_utils import model_addition_debugger_context
torch.random.manual_seed(673)

# load pretrained model and processor
model_id = "llava-hf/llava-1.5-7b-hf"
processor = LlavaProcessor.from_pretrained(model_id)
model = LlavaForConditionalGeneration.from_pretrained(model_id, low_cpu_mem_usage=True)

# create random image input
random_image = Image.fromarray(torch.randint(0, 256, (224, 224, 3), dtype=torch.uint8).numpy())

# prompt
prompt = "<image>Describe this image."

# process inputs
inputs = processor(text=prompt, images=random_image, return_tensors="pt")

# call forward method (not .generate!)
with model_addition_debugger_context(model, debug_path="Your_debug_path", do_prune_layers=False):
    output = model.forward(**inputs)
```

N)r   r   r   r.   )r   r   r   rc   morig_forwardsmodule_instanceforward_methods           r   model_addition_debugger_contextr   P  s     R /4.A.A.CD.CdaQ		\.CMD ==M5o>5/</B/B/D+O&4# 0E E 0=/B/B/D+O&4# 0Es'   BA0BA6 &B6!BB)rQ   T)NT))r   r   rt   re
contextlibr   r   ior   typingr   transformers.utils.import_utilsr   utilsr	   r   torch.distributed.tensorr
   
get_loggerr   r{   r   is_availabler   r   compiler   strr   r   r,   r7   r1   rI   rW   r[   r`   r   boolr   r   r   r   r   <module>r      s4      	 	 6   4 % #  
		H	%  %00==? - zz"=> C3 C3 C ;0|5 5"- **_-\")((*Z !$&*^(^( d^^(B 
:.5x} .5^fgk^l .5  .5r   