
    eTh
                    
   S SK rS SKrS SKrS SKJr  S SKJr  S SKJr  S SK	J
r
JrJrJrJrJrJrJr  S SKrSSKJrJr  SSKJr  SS	KJr  \" S
\
5      r \" S\\\   /\\\
4   4   5      r " S S5      rS rS.S\\   S\\\
4   4S jjr \ " S S\5      5       r!S\\   S\\\
4   4S jr"S\\   S\\\
4   4S jr#S\\   S\\\
4   4S jr$\ " S S5      5       r%\ " S S\5      5       r&S/S\\'   4S jjr(S/S\\'   4S jjr)S/S\\'   4S jjr*\ " S S \5      5       r+\ " S! S"5      5       r,\ " S# S$\5      5       r-\ " S% S&\-5      5       r.S' r/\ " S( S)\-5      5       r0\ " S* S+\5      5       r1\ " S, S-\!5      5       r2g)0    N)Mapping)	dataclass)randint)AnyCallableDictListNewTypeOptionalTupleUnion   )BertTokenizerBertTokenizerFast)PreTrainedTokenizerBase)PaddingStrategyInputDataClassDataCollatorc                       \ rS rSrSS jrSrg)DataCollatorMixin'   Nc                     Uc  U R                   nUS:X  a  U R                  U5      $ US:X  a  U R                  U5      $ US:X  a  U R                  U5      $ [	        SU S35      e)NtfptnpzFramework 'z' not recognized!)return_tensorstf_call
torch_call
numpy_call
ValueErrorselffeaturesr   s      W/var/www/auris/envauris/lib/python3.13/site-packages/transformers/data/data_collator.py__call__DataCollatorMixin.__call__(   sn    !!00NT!<<))t#??8,,t#??8,,{>*::KLMM     N)__name__
__module____qualname____firstlineno__r%   __static_attributes__r(   r'   r$   r   r   '   s    
Nr'   r   c                 
   [        U S5      (       d  U R                  " U0 UD6$ U R                  R                  SS5      nSU R                  S'    U R                  " U0 UD6nX0R                  S'   U$ ! X0R                  S'   f = f)zr
Pads without triggering the warning about how using the pad function is sub-optimal when using a fast tokenizer.
deprecation_warningszAsking-to-pad-a-fast-tokenizerFT)hasattrpadr0   get)	tokenizerpad_args
pad_kwargswarning_statepaddeds        r$   "pad_without_fast_tokenizer_warningr9   5   s     9455}}h5*55 22667WY^_MGKI""#CDY7J7 LY&&'GHM LY&&'GHs   A2 2Br#   returnc                 j    US:X  a  [        U 5      $ US:X  a  [        U 5      $ US:X  a  [        U 5      $ g)a  
Very simple data collator that simply collates batches of dict-like objects and performs special handling for
potential keys named:

    - `label`: handles a single value (int or float) per object
    - `label_ids`: handles a list of values per object

Does not do any additional preprocessing: property names of the input object will be used as corresponding inputs
to the model. See glue and ner for example of how it's useful.
r   r   r   N)torch_default_data_collatortf_default_data_collatornumpy_default_data_collator)r#   r   s     r$   default_data_collatorr?   K   sB    " *844	4	'11	4	*844 
 r'   c                   X    \ rS rSr% SrSr\\S'   S
S\\	\\
4      S\	\\
4   4S jjrS	rg)DefaultDataCollatord   a  
Very simple data collator that simply collates batches of dict-like objects and performs special handling for
potential keys named:

    - `label`: handles a single value (int or float) per object
    - `label_ids`: handles a list of values per object

Does not do any additional preprocessing: property names of the input object will be used as corresponding inputs
to the model. See glue and ner for example of how it's useful.

This is an object (like other data collators) rather than a pure function like default_data_collator. This can be
helpful if you need to set a return_tensors value at initialization.

Args:
    return_tensors (`str`, *optional*, defaults to `"pt"`):
        The type of Tensor to return. Allowable values are "np", "pt" and "tf".
r   r   Nr#   r:   c                 6    Uc  U R                   n[        X5      $ r)   )r   r?   r!   s      r$   r%   DefaultDataCollator.__call__z   s    !!00N$X>>r'   r(   r)   )r*   r+   r,   r-   __doc__r   str__annotations__r	   r   r   r%   r.   r(   r'   r$   rA   rA   d   sC    $ NC?d38n!5 ?tTWY\T\~ ? ?r'   rA   c           
      J   SS K n[        U S   [        5      (       d  U  Vs/ s H  n[        U5      PM     n nU S   n0 nSU;   a  US   b  [        US   UR                  5      (       a  US   R                  5       OUS   n[        U[        5      (       a  UR                  OUR                  nUR                  U  Vs/ s H  o"S   PM	     snUS9US'   OSU;   a  US   b  [        US   UR                  5      (       a(  UR                  U  Vs/ s H  o"S   PM	     sn5      US'   OY[        US   S   [        5      (       a  UR                  OUR                  nUR                  U  Vs/ s H  o"S   PM	     snUS9US'   UR                  5        H  u  pxUS;  d  M  Uc  M  [        U[        5      (       a  M)  [        XR                  5      (       a(  UR                  U  Vs/ s H  o"U   PM	     sn5      XG'   Mk  [        U[        R                  5      (       a<  UR                  [        R                  " U  Vs/ s H  o"U   PM	     sn5      5      XG'   M  UR                  U  Vs/ s H  o"U   PM	     sn5      XG'   M     U$ s  snf s  snf s  snf s  snf s  snf s  snf s  snf Nr   labeldtypelabels	label_ids)rJ   rN   )torch
isinstancer   varsTensoritemintlongfloattensorstackitemsrF   r   ndarray
from_numpy)	r#   rO   ffirstbatchrJ   rL   kvs	            r$   r<   r<      s?   hqk7++%-.XDGX.QKEE
 %E'N6)3E'NELL)Q)Qg##%W\]dWe(44

%++,,H'EHq'
H'EU,Sh		%"4"@eK(%,,77#kk8*L8a[>8*LME(O",U;-?-BC"H"HEJJekkE#llH+MHqkNH+MUZl[E(O **q}ZPQSVEWEW!\\** ;;h'?h!h'?@Arzz** ++BHHH5MHqdH5M,NO <<x(@x!1x(@A  L; / (F +M ,N (@5M(@s)   J=JJ)J"J
9J%J 
c                    SS K n[        U S   [        5      (       d  U  Vs/ s H  n[        U5      PM     n nU S   n0 nSU;   a	  US   b  SnO SU;   a	  US   b  SnOSU;   a	  US   b  SnOS nUGb  [        X5   UR                  5      (       a7  X5   R
                  R                  (       a  UR                  OUR                  nGO[        X5   [        R                  5      (       d!  [        X5   [        R                  5      (       aO  [        R                  " X5   R
                  [        R                  5      (       a  UR                  OUR                  nO[        X5   [        [        45      (       a3  [        X5   S   [         5      (       a  UR                  OUR                  nO/[        X5   [         5      (       a  UR                  OUR                  nUR#                  U  Vs/ s H  o"U   PM	     snUS9US'   UR%                  5        H  u  pxUS;  d  M  Uc  M  [        U[&        5      (       a  M)  [        XR                  [        R                  45      (       a(  UR)                  U  Vs/ s H  o"U   PM	     sn5      XG'   M{  UR#                  U  Vs/ s H  o"U   PM	     sn5      XG'   M     U$ s  snf s  snf s  snf s  snf )Nr   rJ   rN   rM   rK   )rJ   rN   rM   )
tensorflowrP   r   rQ   rR   rL   
is_integerint64float32r   rZ   generic
issubdtypeintegertuplelistrT   convert_to_tensorrY   rF   rX   )	r#   r   r\   r]   r^   label_col_namerL   r_   r`   s	            r$   r=   r=      s@   hqk7++%-.XDGX.QKEE
 %E'N6 		%"4"@$	U	uX:!!e+RYY77 % 5 ; ; F FBHHBJJE-rzz::jI^`b`j`j>k>k "e.C.I.I2:: V VBHH\^\f\fE-t}== *5+@+CS I IBHHrzzE *5+@# F FBHHBJJE..8/T8a.0A8/T\a.bh 44zZ[]`OaOa!ii455888$<8aqT8$<=//x0Hx!1x0HI  LE /0 0U %=0Hs   J3%J8.J=
K
c                    [        U S   [        5      (       d  U  Vs/ s H  n[        U5      PM     n nU S   n0 nSU;   a  US   b  [        US   [        R                  5      (       a  US   R                  5       OUS   n[        U[        5      (       a  [        R                  O[        R                  n[        R                  " U  Vs/ s H  oS   PM	     snUS9US'   OSU;   a  US   b  [        US   [        R                  5      (       a-  [        R                  " U  Vs/ s H  oS   PM	     sn5      US'   Of[        US   S   [        5      (       a  [        R                  O[        R                  n[        R                  " U  Vs/ s H  oS   PM	     snUS9US'   UR                  5        H  u  pgUS;  d  M  Uc  M  [        U[        5      (       a  M)  [        U[        R                  5      (       a-  [        R                  " U  Vs/ s H  oU   PM	     sn5      X6'   Mu  [        R                  " U  Vs/ s H  oU   PM	     sn5      X6'   M     U$ s  snf s  snf s  snf s  snf s  snf s  snf rI   )rP   r   rQ   r   rZ   rS   rT   rd   re   arrayrX   rY   rF   )r#   r\   r]   r^   rJ   rL   r_   r`   s           r$   r>   r>      s   hqk7++%-.XDGX.QKEE
 %E'N6)3E'NBJJ)O)Og##%UZ[bUc&uc22

((#A1gJ#AOh		%"4"@eK("**55 hh'I1+'IJE(O *5+=a+@# F FBHHBJJE hh'I1+'IQVWE(O **q}ZPQSVEWEW!RZZ((888$<8aqT8$<=888$<8aqT8$<=  L7 / $B (J (J %=$<s#   I
I$I#I(I-
<I2
c                       \ rS rSr% Sr\\S'   Sr\\	\
\4   \S'   Sr\\   \S'   Sr\\   \S'   S	r\
\S
'   S\\\
\4      S\\
\4   4S jrSrg)DataCollatorWithPadding   ae  
Data collator that will dynamically pad the inputs received.

Args:
    tokenizer ([`PreTrainedTokenizer`] or [`PreTrainedTokenizerFast`]):
        The tokenizer used for encoding the data.
    padding (`bool`, `str` or [`~utils.PaddingStrategy`], *optional*, defaults to `True`):
        Select a strategy to pad the returned sequences (according to the model's padding side and padding index)
        among:

        - `True` or `'longest'` (default): Pad to the longest sequence in the batch (or no padding if only a single
          sequence is provided).
        - `'max_length'`: Pad to a maximum length specified with the argument `max_length` or to the maximum
          acceptable input length for the model if that argument is not provided.
        - `False` or `'do_not_pad'`: No padding (i.e., can output a batch with sequences of different lengths).
    max_length (`int`, *optional*):
        Maximum length of the returned list and optionally padding length (see above).
    pad_to_multiple_of (`int`, *optional*):
        If set will pad the sequence to a multiple of the provided value.

        This is especially useful to enable the use of Tensor Cores on NVIDIA hardware with compute capability >=
        7.0 (Volta).
    return_tensors (`str`, *optional*, defaults to `"pt"`):
        The type of Tensor to return. Allowable values are "np", "pt" and "tf".
r4   TpaddingN
max_lengthpad_to_multiple_ofr   r   r#   r:   c           	          [        U R                  UU R                  U R                  U R                  U R
                  S9nSU;   a  US   US'   US	 SU;   a  US   US'   US	 U$ )Nrr   rs   rt   r   rJ   rM   rN   )r9   r4   rr   rs   rt   r   )r"   r#   r^   s      r$   r%    DataCollatorWithPadding.__call__  sw    2NNLL#66..
 e#GnE(Og%#K0E(Ok"r'   r(   )r*   r+   r,   r-   rE   r   rG   rr   r   boolrF   r   rs   r   rT   rt   r   r	   r   r   r%   r.   r(   r'   r$   rp   rp      st    4 '&15GU4o-.5 $J$(,,NCd38n!5 $sCx. r'   rp   c                       \ rS rSr% Sr\\S'   Sr\\	\
\4   \S'   Sr\\   \S'   Sr\\   \S'   S	r\\S
'   Sr\
\S'   S rS rS rSrg)"DataCollatorForTokenClassificationi!  a'  
Data collator that will dynamically pad the inputs received, as well as the labels.

Args:
    tokenizer ([`PreTrainedTokenizer`] or [`PreTrainedTokenizerFast`]):
        The tokenizer used for encoding the data.
    padding (`bool`, `str` or [`~utils.PaddingStrategy`], *optional*, defaults to `True`):
        Select a strategy to pad the returned sequences (according to the model's padding side and padding index)
        among:

        - `True` or `'longest'` (default): Pad to the longest sequence in the batch (or no padding if only a single
          sequence is provided).
        - `'max_length'`: Pad to a maximum length specified with the argument `max_length` or to the maximum
          acceptable input length for the model if that argument is not provided.
        - `False` or `'do_not_pad'`: No padding (i.e., can output a batch with sequences of different lengths).
    max_length (`int`, *optional*):
        Maximum length of the returned list and optionally padding length (see above).
    pad_to_multiple_of (`int`, *optional*):
        If set will pad the sequence to a multiple of the provided value.

        This is especially useful to enable the use of Tensor Cores on NVIDIA hardware with compute capability >=
        7.0 (Volta).
    label_pad_token_id (`int`, *optional*, defaults to -100):
        The id to use when padding the labels (-100 will be automatically ignore by PyTorch loss functions).
    return_tensors (`str`, *optional*, defaults to `"pt"`):
        The type of Tensor to return. Allowable values are "np", "pt" and "tf".
r4   Trr   Nrs   rt   label_pad_token_idr   r   c                 N  ^ SS K mSUS   R                  5       ;   a  SOSnX!S   R                  5       ;   a  U Vs/ s H  o3U   PM	     snOS nU VVVs/ s H.  o3R                  5        VVs0 s H  u  pVXR:w  d  M  XV_M     snnPM0     nnnn[        U R                  UU R
                  U R                  U R                  SS9nUc  U$ US   R                  S   n	U R                  R                  n
U4S jnU
S	:X  a8  U Vs/ s H'  o" U5      U R                  /U	[        U5      -
  -  -   PM)     snX'   O7U Vs/ s H'  oR                  /U	[        U5      -
  -  U" U5      -   PM)     snX'   TR                  X   TR                  S
9X'   U$ s  snf s  snnf s  snnnf s  snf s  snf )Nr   rJ   rM   r   rv   	input_ids   c                 p   > [        U TR                  5      (       a  U R                  5       $ [        U 5      $ r)   )rP   rR   tolistrj   )tensor_or_iterablerO   s    r$   to_list>DataCollatorForTokenClassification.torch_call.<locals>.to_list]  s0    ,ell;;)0022*++r'   rightrK   )rO   keysrY   r9   r4   rr   rs   rt   shapepadding_sider|   lenrW   rd   )r"   r#   
label_namefeaturerM   r_   r`   no_labels_featuresr^   sequence_lengthr   r   rJ   rO   s                @r$   r   -DataCollatorForTokenClassification.torch_callF  s    '8A;+;+;+= =W8
BLYZP[P`P`PbBbX>X'*%X>hlcklckX_S1?tqtSckl2NNLL#66
 >L,2215~~22	,
 7"io!io`e$"9"9!:oPSTYPZ>Z![[io!E
 jp!io`e(()_s5z-IJWUZ^[io!E "LL):%++LNE ?Sl.!!s/   FF,F;FF .F8.F"Fc           
      (   SS K nSUS   R                  5       ;   a  SOSnX1S   R                  5       ;   a  U Vs/ s H  oDU   PM	     snOS n[        U R                  UU R                  U R
                  U R                  Uc  SOS S9nUc  U$ UR                  US   5      R                  S   nU R                  R                  nUS:X  a=  U V	s/ s H+  n	[        U	5      U R                  /U[        U	5      -
  -  -   PM-     sn	US'   O;U V	s/ s H*  oR                  /U[        U	5      -
  -  [        U	5      -   PM,     sn	US'   UR                  5        V
Vs0 s H  u  pXR                  XR                  S	9_M      nn
nU$ s  snf s  sn	f s  sn	f s  snn
f )
Nr   rJ   rM   r   rv   r~   r   r   rK   )rb   r   r9   r4   rr   rs   rt   rk   r   r   rj   r|   r   rY   rd   )r"   r#   r   r   r   rM   r^   r   r   rJ   r_   r`   s               r$   r   *DataCollatorForTokenClassification.tf_calln  s    '8A;+;+;+= =W8
BLYZP[P`P`PbBbX>X'*%X>hl2NNLL#66#)>4t
 >L..u[/ABHHK~~227"flfl]bUt667?SQVZ;WXXflE(O
 gmfl]b(()_s5z-IJTRW[XflE(O INV(((((;;V5 ?" Ws   E?2F1F	%Fc           
      @   SUS   R                  5       ;   a  SOSnX!S   R                  5       ;   a  U Vs/ s H  o3U   PM	     snOS n[        U R                  UU R                  U R                  U R
                  Uc  SOS S9nUc  U$ [        R                  " US   5      R                  S   nU R                  R                  nUS:X  a=  U Vs/ s H+  n[        U5      U R                  /U[        U5      -
  -  -   PM-     snUS'   O;U Vs/ s H*  oR                  /U[        U5      -
  -  [        U5      -   PM,     snUS'   UR                  5        V	V
s0 s H)  u  pU	[        R                  " U
[        R                  S	9_M+     nn	n
U$ s  snf s  snf s  snf s  sn
n	f )
NrJ   r   rM   r   rv   r~   r   r   rK   )r   r9   r4   rr   rs   rt   r   rn   r   r   rj   r|   r   rY   rd   )r"   r#   r   r   rM   r^   r   r   rJ   r_   r`   s              r$   r   -DataCollatorForTokenClassification.numpy_call  s    '8A;+;+;+= =W8
BLYZP[P`P`PbBbX>X'*%X>hl2NNLL#66#)>4t
 >L((5#56<<Q?~~227"flfl]bUt667?SQVZ;WXXflE(O
 gmfl]b(()_s5z-IJTRW[XflE(O =BKKMJMDABHHQbhh//MJ5 ?" Ks   F2F1F0Fr(   )r*   r+   r,   r-   rE   r   rG   rr   r   rx   rF   r   rs   r   rT   rt   r|   r   r   r   r   r.   r(   r'   r$   rz   rz   !  sh    8 '&15GU4o-.5 $J$(,,""NC&P@r'   rz   rt   c                 2  ^
 SSK n[        U S   [        [        [        R
                  45      (       a(  U  Vs/ s H  oCR                  XCR                  S9PM     n nU S   R                  S5      m
[        U
4S jU  5       5      nU(       a6  Ub	  T
U-  S:X  a*  [        XR                  5      (       d  UR                  U SS9$ UR                  c#  [        SUR                  R                   S35      e[!        S U  5       5      nUb  Xb-  S:w  a
  Xb-  S	-   U-  nU S   R#                  [%        U 5      U/UR&                  5      n[)        U 5       HA  u  pUR*                  S
:X  a  XUSU	R,                  S   24'   M,  XXR,                  S   * S24'   MC     U$ s  snf )_Collate `examples` into a batch, using the information in `tokenizer` for padding if necessary.r   NrK   c              3   J   >#    U  H  oR                  S 5      T:H  v   M     g7fr   Nsize.0xlength_of_firsts     r$   	<genexpr>'_torch_collate_batch.<locals>.<genexpr>  s     !Q1&&)">s    #)dimCYou are attempting to pad samples but the tokenizer you are using () does not have a pad token.c              3   B   #    U  H  oR                  S 5      v   M     g7fr   r   r   r   s     r$   r   r     s     11VVAYYs   r   r   )rO   rP   rj   ri   r   rZ   rW   rU   r   allrR   rX   	pad_tokenr    	__class__r*   maxnew_fullr   pad_token_id	enumerater   r   )examplesr4   rt   rO   eare_tensors_same_lengthrs   resultiexampler   s             @r$   _torch_collate_batchr     s    (1+eRZZ899?GHx!LL**L5xHqk&&q)O "!Q!QQ$6$>/TfBfjkBk(LL11;;xQ;// "$$--..JL
 	
 111J%:+Ja+O!71<@RR
a[!!3x=*"=y?U?UVF)
!!W,,31(a((()-41}}Q''))*	 *
 M9 Is   "Fc           	      >  ^ SS K n [        U S   [        [        45      (       a(  U  Vs/ s H  oCR	                  XCR
                  S9PM     n n[        U S   5      m[        U4S jU  5       5      nU(       a  Ub	  TU-  S:X  a  UR                  U SS9$ UR                  c#  [        SUR                  R                   S35      e[        S U  5       5      nUb  Xb-  S:w  a
  Xb-  S-   U-  n/ nUR                  U S   5      n[        R                   " US	4[        R"                  S9n	U  H_  n
UR$                  S
:X  a  U[        U
5      -
  U	S'   OU[        U
5      -
  U	S'   UR'                  UR)                  XUR*                  S95        Ma     UR                  USS9$ s  snf )Nr   rK   c              3   @   >#    U  H  n[        U5      T:H  v   M     g 7fr)   r   r   s     r$   r   $_tf_collate_batch.<locals>.<genexpr>       !NX#a&O";X   axisr   r   c              3   8   #    U  H  n[        U5      v   M     g 7fr)   r   r   s     r$   r   r          .XSVVX   r   r   r   )r   r   )r   r   )constant_values)rb   rP   rj   ri   rk   rd   r   r   rX   r   r    r   r*   r   rankr   zerosint32r   appendr2   r   )r   r4   rt   r   r   r   rs   r   r   paddingsr   r   s              @r$   _tf_collate_batchr     s   i(1+e}--EMNX(((((;XN (1+&O!!NX!NN$6$>/TfBfjkBkxxqx)) "$$--..JL
 	
 .X..J%:+Ja+O!71<@RR
F778A;Dxxq	2H!!W,'#g,6HTN'#g,6HTNbffW	@V@VfWX  88F8##; Os   "Fc                   ^	 [        U S   [        [        45      (       a3  U  Vs/ s H&  n[        R                  " U[        R
                  S9PM(     n n[        U S   5      m	[        U	4S jU  5       5      nU(       a!  Ub	  T	U-  S:X  a  [        R                  " U SS9$ UR                  c#  [        SUR                  R                   S35      e[        S U  5       5      nUb  XR-  S:w  a
  XR-  S	-   U-  n[        R                  " [        U 5      U4UR                  U S   R                   S
9n[#        U 5       HA  u  pxUR$                  S:X  a  XUSUR&                  S   24'   M,  XXxR&                  S   * S24'   MC     U$ s  snf )r   r   rK   c              3   @   >#    U  H  n[        U5      T:H  v   M     g 7fr)   r   r   s     r$   r   '_numpy_collate_batch.<locals>.<genexpr>   r   r   Nr   r   r   c              3   8   #    U  H  n[        U5      v   M     g 7fr)   r   r   s     r$   r   r     r   r   r   )r   
fill_valuerL   r   )rP   rj   ri   r   rn   rd   r   r   rX   r   r    r   r*   r   fullr   rL   r   r   r   )
r   r4   rt   r   r   rs   r   r   r   r   s
            @r$   _numpy_collate_batchr     s    (1+e}--9ABABHHQbhh/B (1+&O!!NX!NN$6$>/TfBfjkBkxxq)) "$$--..JL
 	
 .X..J%:+Ja+O!71<@RR
WWCM:69CYCYaijkalararsF)
!!W,,31(a((()-41}}Q''))*	 *
 M3 Cs   -E?c                       \ rS rSr% Sr\\S'   Sr\\	\
\4   \S'   Sr\\   \S'   Sr\\   \S'   S	r\
\S
'   S\\\
\4      4S jrS rSrg)DataCollatorForMultipleChoicei  a  
Data collator that dynamically pads a batch of nested examples for multiple choice, so that all choices
of all examples have the same length.

Args:
    tokenizer ([`PreTrainedTokenizer`] or [`PreTrainedTokenizerFast`]):
        The tokenizer used for encoding the data.
    padding (`bool`, `str` or [`~utils.PaddingStrategy`], *optional*, defaults to `True`):
        Select a strategy to pad the returned sequences according to the model's padding side and padding index
        among:

        - `True` or `'longest'`: Pad to the longest sequence in the batch (or no padding if only a single sequence
          is provided).
        - `'max_length'`: Pad to a maximum length specified with the argument `max_length` or to the maximum
          acceptable input length for the model if that argument is not provided.
        - `False` or `'do_not_pad'` (default): No padding (i.e., can output a batch with sequences of different
          lengths).
    max_length (`int`, *optional*):
        Maximum length of the returned list and optionally padding length (see above).
    pad_to_multiple_of (`int`, *optional*):
        Pad the sequence to a multiple of the provided value.

        This is especially useful to enable the use of Tensor Cores on NVIDIA hardware with compute capability >=
        7.5 (Volta).
    return_tensors (`str`, *optional*, defaults to `"pt"`):
        The type of Tensor to return. Allowable values are "np", "pt" and "tf".
r4   Trr   Nrs   rt   r   r   r   c           
        ^ SS K nSUS   R                  5       ;   a  SOSnU Vs/ s H  oDR                  U5      PM     nn[        U5      n[        US   S   5      m[	        U4S jU 5       / S9nU R
                  R                  UU R                  U R                  U R                  SS9nUR                  5        V	V
s0 s H  u  pXR                  UTS	5      _M     nn	n
UR                  XRR                  S
9US'   U$ s  snf s  sn
n	f )Nr   rJ   rM   r~   c              3      >#    U  HJ  n[        T5       VVVs/ s H*  o!R                  5        VVs0 s H
  u  p4X4U   _M     snnPM,     snnnv   ML     g s  snnf s  snnnf 7fr)   )rangerY   )r   r   r   r_   r`   num_choicess        r$   r   ;DataCollatorForMultipleChoice.torch_call.<locals>.<genexpr>I  sN     i`hU\U;=OP=O==?3?41qA$w?3=OPP`h3Ps'   A"A
AA
	A"A
A")startr   rv   rK   )rO   r   popr   sumr4   r2   rr   rs   rt   rY   viewrW   rd   )r"   r   rO   r   r   rM   
batch_sizeflat_examplesr^   r_   r`   r   s              @r$   r   (DataCollatorForMultipleChoice.torch_call<  s    !(8A;+;+;+= =W8
9ABg++j)B]
(1+k23 i`hiqs

 ""LL#66 # 
 EJKKMRMDAFF:{B77MR,,v[[,Ah/ C* Ss   C??Dc                    SS K nSUS   R                  5       ;   a  SOSnU Vs/ s H  oDR                  U5      PM     nn[        U5      n[        US   S   5      nU VVV	V
s/ s HH  n[	        U5       VV	V
s/ s H*  oR                  5        V	V
s0 s H
  u  pXU   _M     sn
n	PM,     sn
n	nPMJ     nn	nnn
[        U/ 5      nU R                  R                  UU R                  U R                  U R                  SS9nUR                  5        V	V
s0 s H  u  pXR                  XUS45      _M     nn	n
UR                  XRR                  S9US'   U$ s  snf s  sn
n	f s  sn
n	nf s  sn
n	nnf s  sn
n	f )	Nr   rJ   rM   r~   r   rv   r   rK   )rb   r   r   r   r   rY   r   r4   r2   rr   rs   rt   reshaperk   rd   )r"   r#   r   r   r   rM   r   r   r   r_   r`   flattened_featuresr^   s                r$   r   %DataCollatorForMultipleChoice.tf_callZ  sX    '8A;+;+;+= =W8
9ABg++j)B]
(1+k23_g
_gT[E+<NO<Nq--/2/$!a1g/2<NO_g 	 
 !!3R8""LL#66 # 
 NS[[][]TQJJq{B"?@@][..vXX.Fh% C 3O
 \s5   E#E$
8EE!E)	E$
 E,EE$
r(   )r*   r+   r,   r-   rE   r   rG   rr   r   rx   rF   r   rs   r   rT   rt   r   r	   r   r   r   r   r.   r(   r'   r$   r   r     sj    8 '&15GU4o-.5 $J$(,,NC4S#X#7 <r'   r   c                       \ rS rSr% Sr\\S'   Sr\\	   \S'   Sr
\\\\4   \S'   Sr\\   \S'   Sr\\   \S	'   S
r\\S'   Sr\\S'   SS jrSrg)DataCollatorForSeq2Seqis  aB  
Data collator that will dynamically pad the inputs received, as well as the labels.

Args:
    tokenizer ([`PreTrainedTokenizer`] or [`PreTrainedTokenizerFast`]):
        The tokenizer used for encoding the data.
    model ([`PreTrainedModel`], *optional*):
        The model that is being trained. If set and has the *prepare_decoder_input_ids_from_labels*, use it to
        prepare the *decoder_input_ids*

        This is useful when using *label_smoothing* to avoid calculating loss twice.
    padding (`bool`, `str` or [`~utils.PaddingStrategy`], *optional*, defaults to `True`):
        Select a strategy to pad the returned sequences (according to the model's padding side and padding index)
        among:

        - `True` or `'longest'` (default): Pad to the longest sequence in the batch (or no padding if only a single
          sequence is provided).
        - `'max_length'`: Pad to a maximum length specified with the argument `max_length` or to the maximum
          acceptable input length for the model if that argument is not provided.
        - `False` or `'do_not_pad'`: No padding (i.e., can output a batch with sequences of different lengths).
    max_length (`int`, *optional*):
        Maximum length of the returned list and optionally padding length (see above).
    pad_to_multiple_of (`int`, *optional*):
        If set will pad the sequence to a multiple of the provided value.

        This is especially useful to enable the use of Tensor Cores on NVIDIA hardware with compute capability >=
        7.0 (Volta).
    label_pad_token_id (`int`, *optional*, defaults to -100):
        The id to use when padding the labels (-100 will be automatically ignored by PyTorch loss functions).
    return_tensors (`str`, *optional*, defaults to `"pt"`):
        The type of Tensor to return. Allowable values are "np", "pt" and "tf".
r4   NmodelTrr   rs   rt   r{   r|   r   r   c                    Uc  U R                   nSUS   R                  5       ;   a  SOSnX1S   R                  5       ;   a  U Vs/ s H  oDU   PM	     snOS nUb  [        S U 5       5      (       a  S nU VVVs/ s H.  oDR                  5        VVs0 s H  u  pgXc:w  d  M  Xg_M     snnPM0     nnnn[	        U R
                  UU R                  U R                  U R                  US9n	U R                  SL =(       d    U R                  [        R                  :H  n
UGb8  U
(       aY  [        US   U   [        5      (       a  [        U5      U	S'   GOU Vs/ s H  n[        R                  " U/ /5      PM     snU	S'   GOU R                  [        R                  :H  =(       a    U R                  S LnU(       d  [!        S U 5       5      OU R                  nU R                  b+  XR                  -   S-
  U R                  -  U R                  -  nU R
                  R"                  n[        US   U   [        5      (       aX  U Vs/ s HF  nUS	:X  a  XR$                  /U['        U5      -
  -  -   OU R$                  /U['        U5      -
  -  U-   PMH     snU	S'   OU Vs/ s H  nUS	:X  aS  [        R                  " U[        R(                  " U R$                  /U['        U5      -
  -  [        R*                  S
9/5      OR[        R                  " [        R(                  " U R$                  /U['        U5      -
  -  [        R*                  S
9U/5      PM     snU	S'   U	R-                  SS 5      b  US:X  a%  SS KnUR1                  U	S   UR*                  S
9U	S'   OZUS:X  a%  SS KnUR5                  U	S   UR*                  S
9U	S'   O/[        R(                  " U	S   [        R*                  S
9U	S'   OS U	S'   UbI  U R6                  b<  [9        U R6                  S5      (       a!  U R6                  R;                  U	S   S9nUU	S'   U	$ s  snf s  snnf s  snnnf s  snf s  snf s  snf )NrJ   r   rM   c              3   (   #    U  H  oS L v   M
     g 7fr)   r(   )r   rJ   s     r$   r   2DataCollatorForSeq2Seq.__call__.<locals>.<genexpr>  s     %Htms   rv   Fc              3   8   #    U  H  n[        U5      v   M     g 7fr)   r   )r   ls     r$   r   r     s     &>v!s1vvvr   r   r   rK   r   r   %prepare_decoder_input_ids_from_labels)rM   decoder_input_ids)r   r   r   rY   r9   r4   rr   rs   rt   r   
DO_NOT_PADrP   rj   r   concatenate
MAX_LENGTHr   r   r|   r   rn   rd   r3   rO   rW   rb   constantr   r1   r   )r"   r#   r   r   r   rM   r_   r`   non_labels_featuresr^   
no_paddingrJ   max_paddingmax_label_lengthr   rO   r   r   s                     r$   r%   DataCollatorForSeq2Seq.__call__  s)   !!00N '8A;+;+;+= =W8
BLYZP[P`P`PbBbX>X'*%X>hl #%H%H"H"HFdlmdlY`TAOTdlm 3NNLL#66)
 \\U*Xdllo>X>X.X
hqk*5t<<&*6lE(OPV&WPVur~~ubk'BPV&WE(O"llo.H.HHhT__dhMhFQ3&>v&>#>W[WfWf **6),C,CCaG223112 %  $~~::hqk*5t<<
 &,	' &,E (72 !8!8 9=MPSTYPZ=Z [["556:JSQVZ:WX[``a &,	'E(O* &,' &,E (72  % "$*A*A)BFVY\]bYcFc)dlnltlt u  ^^ "$*A*A)BFVY\]bYcFc)dlnltlt u % &,'E(O$ 99Xt$0%"',,uXekk,"Rh4''"$++eHoRXX+"Nh"$((5?"(("Kh"E(O 

&

$KLL $

 P PX]^fXg P h):E%&k ?
  Um& 'X''s8   P=:QQ!Q'Q"QAQB6QQr(   r)   )r*   r+   r,   r-   rE   r   rG   r   r   r   rr   r   rx   rF   r   rs   rT   rt   r|   r   r%   r.   r(   r'   r$   r   r   s  sl    B '&E8C=15GU4o-.5 $J$(,,""NCZr'   r   c            	           \ rS rSr% Sr\\S'   Sr\\S'   Sr	\
\S'   Sr\
\S	'   S
r\
\S'   Sr\\   \S'   Sr\\S'   Sr\\S'   Sr\\   \S'   S rS rS r\S"S j5       r S"S\S\\   S\\\4   4S jjrS\\\\   \\\\4   4      S\\\4   4S jrS\\\\   \\\\4   4      S\\\4   4S jr S"S\S\\   S\\\4   4S jjr!S\\\\   \\\\4   4      S\\\4   4S jr"S"S\S\\   S\\\4   4S  jjr#S!r$g)#DataCollatorForLanguageModelingi  a  
Data collator used for language modeling. Inputs are dynamically padded to the maximum length of a batch if they
are not all of the same length.

Args:
    tokenizer ([`PreTrainedTokenizer`] or [`PreTrainedTokenizerFast`]):
        The tokenizer used for encoding the data.
    mlm (`bool`, *optional*, defaults to `True`):
        Whether or not to use masked language modeling. If set to `False`, the labels are the same as the inputs
        with the padding tokens ignored (by setting them to -100). Otherwise, the labels are -100 for non-masked
        tokens and the value to predict for the masked token.
    mlm_probability (`float`, *optional*, defaults to 0.15):
        The probability with which to (randomly) mask tokens in the input, when `mlm` is set to `True`.
    mask_replace_prob (`float`, *optional*, defaults to 0.8):
        The probability with which masked tokens are replaced by the tokenizer's mask token (e.g., `[MASK]`).
        Defaults to 0.8, meaning 80% of the masked tokens will be replaced with `[MASK]`.
        Only works when `mlm` is set to `True`.
    random_replace_prob (`float`, *optional*, defaults to 0.1):
        The probability with which masked tokens are replaced by random tokens from the tokenizer's vocabulary.
        Defaults to 0.1, meaning 10% of the masked tokens will be replaced with random tokens. The remaining
        masked tokens (1 - mask_replace_prob - random_replace_prob) are left unchanged.
        Only works when `mlm` is set to `True`.
    pad_to_multiple_of (`int`, *optional*):
        If set, will pad the sequence to a multiple of the provided value.
    return_tensors (`str`):
        The type of Tensor to return. Allowable values are "np", "pt" and "tf".
    seed (`int`, *optional*):
        The seed to use for the random number generator for masking. If not provided, the global RNG will be used.

<Tip>

For best performance, this data collator should be used with a dataset having items that are dictionaries or
BatchEncoding, with the `"special_tokens_mask"` key, as returned by a [`PreTrainedTokenizer`] or a
[`PreTrainedTokenizerFast`] with the argument `return_special_tokens_mask=True`.

<Example Options and Expectations>

1. Default Behavior:
    - `mask_replace_prob=0.8`, `random_replace_prob=0.1`.
    - Expect 80% of masked tokens replaced with `[MASK]`, 10% replaced with random tokens, and 10% left unchanged.

2. All masked tokens replaced by `[MASK]`:
    - `mask_replace_prob=1.0`, `random_replace_prob=0.0`.
    - Expect all masked tokens to be replaced with `[MASK]`. No tokens are left unchanged or replaced with random tokens.

3. No `[MASK]` replacement, only random tokens:
    - `mask_replace_prob=0.0`, `random_replace_prob=1.0`.
    - Expect all masked tokens to be replaced with random tokens. No `[MASK]` replacements or unchanged tokens.

4. Balanced replacement:
    - `mask_replace_prob=0.5`, `random_replace_prob=0.4`.
    - Expect 50% of masked tokens replaced with `[MASK]`, 40% replaced with random tokens, and 10% left unchanged.

Note:
    The sum of `mask_replace_prob` and `random_replace_prob` must not exceed 1. If their sum is less than 1, the
    remaining proportion will consist of masked tokens left unchanged.

</Tip>
r4   Tmlmg333333?mlm_probability皙?mask_replace_probg?random_replace_probNrt   Ftf_experimental_compiler   r   seedc                    U R                   (       a"  U R                  R                  c  [        S5      eU R                  S:  d  U R                  S:  a  [        S5      eU R
                  U R                  -   S:  a  [        S5      eU R
                  S:  d  U R
                  S:  a  [        S5      eU R                  S:  d  U R                  S:  a  [        S5      e[        U R                  5      U l        [        U R
                  5      U l        [        U R                  5      U l        U R                  (       a#  SS K	nUR                  U R                  SS	9U l        S U l        g )
NzThis tokenizer does not have a mask token which is necessary for masked language modeling. You should pass `mlm=False` to train on causal language modeling instead.r   r   z*mlm_probability should be between 0 and 1.zHThe sum of mask_replace_prob and random_replace_prob should not exceed 1z,mask_replace_prob should be between 0 and 1.z.random_replace_prob should be between 0 and 1.T)jit_compile)r   r4   
mask_tokenr    r   r   r   rV   r   rb   functiontf_mask_tokens	generator)r"   r   s     r$   __post_init__-DataCollatorForLanguageModeling.__post_init__C  s.   88119\  !#t';';a'?IJJ!!D$<$<<q@ghh!!A%)?)?!)CKLL##a'4+C+Ca+GMNN$T%9%9:!&t'='=!>#()A)A#B ''#"$++d.A.At+"TDr'   c                    U R                   S:X  a#  SS KnUR                  5       R                  U5      $ U R                   S:X  a)  SS KnUR
                  R                  R                  U5      $ SS KnUR
                  R                  U5      $ )Nr   r   r   )	r   rO   	Generatormanual_seedrb   random	from_seednumpydefault_rng)r"   r   rO   r   r   s        r$   get_generator-DataCollatorForLanguageModeling.get_generator]  so    $&??$0066  D(#99&&006699((..r'   c                 V   [         R                  " 5       R                  S:X  a!  U R                  U R                  5      U l        g SS KnUR                  R                  R                  5       nUc  Sn[        U5      eU R                  U R                  UR                  -   5      U l        g )NMainProcessr   )zZWorker process information is not available for seeding the generator. This may be becausezZyou are using multiprocessing without using a PyTorch DataLoader. The `seed` parameter canzVonly be used when using multiprocessing with a PyTorch DataLoader. Please either use azAsingle process or use a PyTorch DataLoader with multiple workers.)mpcurrent_processnamer	  r   r   rO   utilsdataget_worker_infor    id)r"   rO   worker_infoerror_strings       r$   
create_rng*DataCollatorForLanguageModeling.create_rngk  s    $$5!//		:DN ++**::<K"  !..!//		KNN0JKDNr'   c                    SS K nUR                  X5      nU(       a2  UR                  XBR                  U SS5      -
  S:  UR                  5      $ UR                  XCR
                  R                  U SS5      -
  S:  UR                  5      $ )Nr   r   )rb   fillcastuniformrx   r  )r   probabilityr   r   prob_matrixs        r$   tf_bernoulli,DataCollatorForLanguageModeling.tf_bernoulli  st    gge1 77;):):5!Q)GG1LbggVV77;):):5!Q)GG1LbggVVr'   inputsspecial_tokens_maskr:   c                    SSK nUR                  X1R                  5      nUR                  U5      nU R	                  X`R
                  U R                  5      U) -  nUR                  XqS5      nU R	                  X`R                  U R                  5      U-  n	UR                  XU5      nU R                  S:X  d  U R                  S:X  a  X4$ SU R                  -
  n
U R                  U
-  nU R	                  XkU R                  5      U-  U	) -  nU R                  (       a%  U R                  R                  XbUR                  S9nO$UR                  R                  XbUR                  S9nUR                  XU5      nX4$ )g
Prepare masked tokens inputs/labels for masked language modeling: 80% MASK, 10% random, 10% original.
r   Nr{   r   maxvalrL   )rb   r  rL   r   r  r   r   wherer   r   r  r  )r"   r   
vocab_sizemask_token_idr!  r   input_shapemasked_indicesrM   indices_replacedremaining_probrandom_replace_prob_scaledindices_randomrandom_wordss                 r$   r   .DataCollatorForLanguageModeling.tf_mask_tokens  sn    	 ||<hhv& **;8L8Ldnn]at`tt.$7  ,,[:P:PRVR`R`adrr*6B!!Q&$*B*Ba*G>!T333 &*%=%=%N" kt~~V  	 >>>>11+X^XdXd1eL99,,[SYS_S_,`L.? ~r'   r   c                    SS K nU R                  (       a  U R                  c  U R                  5         [	        US   [
        5      (       a!  [        U R                  USU R                  S9nO S[        XR                  U R                  S90nUR                  SS 5      nU R                  (       a  Uc}  US   R                  5       R                  5        Vs/ s H  nU R                  R                  USS9PM     nnUR                  UR!                  XBR"                  S	9UR$                  5      nOUR                  XBR$                  5      nU R'                  UR                  US   UR"                  5      UU R                  R(                  [+        U R                  5      S
9u  US'   US'   U$ US   nU R                  R,                  b*  UR/                  X`R                  R,                  :H  SU5      nOUR1                  U5      nXcS'   U$ s  snf )Nr   r   r   rt   r~   rt   r!  Talready_has_special_tokensrK   )r!  r(  r'  rM   r{   )rb   r   r   r  rP   r   r9   r4   rt   r   r   r   r  r   get_special_tokens_maskr  rk   rd   rx   r   r(  r   r   r&  identity)r"   r   r   r^   r!  valrM   s          r$   r   'DataCollatorForLanguageModeling.tf_call  s   99/ OO hqk7++6RVRiRiE
 .x\`\s\stE
 $ii(=tD88"*  %[1779@@B'B NN::3[_:`B $ '
 ')ggb.B.BCV^f^f.B.gikipip&q#&(gg.A77&K#262E2Ek*BHH5$7"nn::t~~.	 3F 3/E+h  ;'F~~**6&NN,G,G"GvVV,$(O-'s   $Hc                 (   U R                   (       a  U R                  c  U R                  5         [        US   [        5      (       a!  [        U R                  USU R                  S9nO S[        XR                  U R                  S90nUR                  SS 5      nU R                  (       a  U R                  US   US9u  US'   US'   U$ US   R                  5       nU R                  R                  b  S	XDU R                  R                  :H  '   XBS'   U$ )
Nr   r   r2  r~   r3  r!  r!  rM   r{   )r   r   r  rP   r   r9   r4   rt   r   r   r   torch_mask_tokenscloner   r"   r   r^   r!  rM   s        r$   r   *DataCollatorForLanguageModeling.torch_call  s	    99/ OOhqk7++6RVRiRiE
 1(NN_c_v_vwE
 $ii(=tD88262H2Hk"8K 3I 3/E+h 	 ;'--/F~~**6@D!<!<<=$(Or'   c                 l   SSK nUR                  5       nUR                  UR                  U R                  5      nUcQ  UR                  5        Vs/ s H  o`R                  R                  USS9PM     nnUR                  X#R                  S9nOUR                  5       nUR                  USS9  UR                  XPR                  S9R                  5       nS	XG) '   UR                  UR                  UR                  U R                  5      U R                  S9R                  5       U-  nU R                  R                  U R                  R                  5      X'   U R                  S
:X  d  U R                   S:X  a  X4$ S
U R                  -
  n	U R                   U	-  n
UR                  UR                  UR                  U
5      U R                  S9R                  5       U-  U) -  nUR#                  [%        U R                  5      UR                  UR&                  U R                  S9nX   X'   X4$ s  snf )r#  r   NTr4  rK           valuer   r{   r   rL   r   )rO   r=  r   r   r   r   r4   r6  rW   rx   masked_fill_	bernoullir   r   convert_tokens_to_idsr   r   r   r   rU   )r"   r   r!  rO   rM   probability_matrixr8  r*  r+  r,  r-  r.  r/  s                r$   r<  1DataCollatorForLanguageModeling.torch_mask_tokens
  s    	"ZZd6J6JK&hnhuhuhw#hwad66sW[6\hw   # #(,,/B**,"U"5":":"<''(;3'G);~~V[[]"& OOEJJv||T5K5KLX\XfXfOglln 	 $(>>#G#GHaHa#b !!Q&$*B*Ba*G>!T333 &*%=%=%N" OOEJJv||5OP\`\j\jOkppr  	
 }}S%8&,,ejjdhdrdr}s!-!= ~K#s   #H1c                 4   U R                   (       a  U R                  c  U R                  5         [        US   [        5      (       a!  [        U R                  USU R                  S9nO S[        XR                  U R                  S90nUR                  SS 5      nU R                  (       a  U R                  US   US9u  US'   US'   U$ [        R                  " US   5      nU R                  R                  b  S	XDU R                  R                  :H  '   XBS'   U$ )
Nr   r   r2  r~   r3  r!  r;  rM   r{   )r   r   r  rP   r   r9   r4   rt   r   r   r   numpy_mask_tokensr   copyr   r>  s        r$   r   *DataCollatorForLanguageModeling.numpy_call;  s	    99/ OOhqk7++6RVRiRiE
 1(NN_c_v_vwE
 $ii(=tD88262H2Hk"8K 3I 3/E+h 	 WWU;/0F~~**6@D!<!<<=$(Or'   c                    [         R                  " U5      n[         R                  " UR                  U R                  5      nUcQ  UR                  5        Vs/ s H  oPR                  R                  USS9PM     nn[         R                  " U[        S9nOUR                  [        5      nSXB'   U R                  (       a8  U R                  R                  SXDR                  S9R                  [        5      nO;[         R                  R                  SXDR                  S9R                  [        5      nSX6) '   U R                  (       aF  U R                  R                  SU R                  UR                  S9R                  [        5      U-  nOI[         R                  R                  SU R                  UR                  S9R                  [        5      U-  nU R                  R                  X'   U R                  S:X  d  U R                   S:X  a  X4$ SU R                  -
  nU R                   U-  n	U R                  (       a  U R                  R                  SXR                  S9R                  [        5      U-  U) -  n
U R                  R#                  S[%        U R                  5      [         R&                  " U
5      [         R(                  S9nO[         R                  R                  SXR                  S9R                  [        5      U-  U) -  n
[         R                  R+                  S[%        U R                  5      [         R&                  " U
5      [         R(                  S9nXU
'   X4$ s  snf )	r#  Tr4  rK   r   r   r   r{   lowhighr   rL   )r   rM  r   r   r   r   r4   r6  rn   rx   astyper   binomialr  r   r(  r   integersr   count_nonzerord   r   )r"   r   r!  rM   rI  r8  r*  r+  r,  r-  r.  r/  s               r$   rL  1DataCollatorForLanguageModeling.numpy_mask_tokensY  s    WWV\\43G3GH&hnhuhuhw#hwad66sW[6\hw   # #%((+>d"K"5"<"<T"B23/>>!^^44Q8JQiQi4jqqrvwNYY//3ELdLd/ellmqrN"& >>''4+A+A'U\\]abess 
 		""1d&<&<6<<"PWWX\]`nn  $(>>#?#? !!Q&$*B*Ba*G>!T333 &*%=%=%N">>''+ELL'Y``aef !##$ 
  >>22C/b6F6F~6V^`^f^f 3 L
 		""1&@||"T[[\`a !##$ 
 99,,C/b6F6F~6V^`^f^f - L ".~ ~o#s   #M0)r   r   r   r   r   r)   )%r*   r+   r,   r-   rE   r   rG   r   rx   r   rV   r   r   rt   r   rT   r   r   rF   r   r   r	  r  staticmethodr  r   r   r   r	   r   r   r   r   r<  r   rL  r.   r(   r'   r$   r   r     s   :x '&C!OU!"u"!$$(,,$)T)NCD(3-4/L0 	W 	W \`--KSTW=-	sCx-^,U49c4S>+I%J K ,PTUXZ]U]P^ ,\4d3id38n.L(M#N SWX[]`X`Sa </ /(3- /[`adfiai[j /b4d3id38n.L(M#N SWX[]`X`Sa <? ?(3- ?[`adfiai[j ? ?r'   r   c                   P   \ rS rSrSrS\\\\   \\	\
\4   4      S\	\
\4   4S jrS\\\\   \\	\
\4   4      S\	\
\4   4S jrS\\\\   \\	\
\4   4      S\	\
\4   4S jrS rSS	\\
   4S
 jjrS\S\S\\\4   4S jrS\S\S\\\4   4S jrS\S\S\\\4   4S jrSrg)DataCollatorForWholeWordMaski  a  
Data collator used for language modeling that masks entire words.

- collates batches of tensors, honoring their tokenizer's pad_token
- preprocesses batches for masked language modeling

<Tip>

This collator relies on details of the implementation of subword tokenization by [`BertTokenizer`], specifically
that subword tokens are prefixed with *##*. For tokenizers that do not adhere to this scheme, this collator will
produce an output that is roughly equivalent to [`.DataCollatorForLanguageModeling`].

</Tip>r   r:   c                    U R                   (       a  U R                  c  U R                  5         [        US   [        5      (       a  U Vs/ s H  o"S   PM	     nnOUnU Vs/ s H  nSU0PM	     nn[        X0R                  U R                  S9n/ nU H  n/ n[        US   5       H/  nU R                  R                  U5      nUR                  U5        M1     SU;   a>  [        US   5      n	[        US   5      n
[        U
5       H  nX;   d  M
  SXk   -   Xk'   M     UR                  U R                  U5      5        M     [        XPR                  U R                  S9nU R                  XL5      u  pXS.$ s  snf s  snf Nr   r~   r3  chinese_ref##r~   rM   )r   r   r  rP   r   r   r4   rt   r   _convert_id_to_tokenr   r   r   _whole_word_maskr<  r"   r   r   r~   batch_inputmask_labels
ref_tokensr  tokenref_poslen_seqr   
batch_maskr   rM   s                  r$   r   'DataCollatorForWholeWordMask.torch_call  f   99/ OOhqk7++19:A;I:I I2:;(Qa((H;*9nnY]YpYpqAJQ{^,;;B?!!%( -
 ! =!12an-wA|(,z}(<
 ( t44Z@A  *+~~Z^ZqZqr
//H#661 ; <   E7"E<c                 >   SS K nU R                  (       a  U R                  c  U R                  5         [	        US   [
        5      (       a  U Vs/ s H  o3S   PM	     nnOUnU Vs/ s H  nSU0PM	     nn[        X@R                  U R                  S9n/ nU H  n/ n[        US   5       H/  nU R                  R                  U5      n	UR                  U	5        M1     SU;   a>  [        US   5      n
[        US   5      n[        U5       H  nX;   d  M
  SX|   -   X|'   M     UR                  U R                  U5      5        M     [        X`R                  U R                  S9nU R                  UR!                  XRR"                  5      U5      u  pXS.$ s  snf s  snf r\  )rb   r   r   r  rP   r   r   r4   rt   r   r`  r   r   r   ra  r   r  rd   )r"   r   r   r   r~   rc  rd  re  r  rf  rg  rh  r   ri  r   rM   s                   r$   r   $DataCollatorForWholeWordMask.tf_call  sx   99/ OOhqk7++19:A;I:I I2:;(Qa((H;'	>>VZVmVmnAJQ{^,;;B?!!%( -
 ! =!12an-wA|(,z}(<
 ( t44Z@A  '{NNW[WnWno
,,RWW[((-KZX#661 ; <s   F&Fc                    U R                   (       a  U R                  c  U R                  5         [        US   [        5      (       a  U Vs/ s H  o"S   PM	     nnOUnU Vs/ s H  nSU0PM	     nn[        X0R                  U R                  S9n/ nU H  n/ n[        US   5       H/  nU R                  R                  U5      nUR                  U5        M1     SU;   a>  [        US   5      n	[        US   5      n
[        U
5       H  nX;   d  M
  SXk   -   Xk'   M     UR                  U R                  U5      5        M     [        XPR                  U R                  S9nU R                  XL5      u  pXS.$ s  snf s  snf r\  )r   r   r  rP   r   r   r4   rt   r   r`  r   r   r   ra  rL  rb  s                  r$   r   'DataCollatorForWholeWordMask.numpy_call  rk  rl  c                    U R                   c  [        R                  " U5        U$ U R                  S:X  a<  SS KnUR                  [        U5      U R                  S9nU Vs/ s H  oAU   PM	     sn$ U R                  S:X  a  SS KnU R                  R                  S5      S   nUR                  R                  R                  UR                  [        U5      5      US9R                  5       R                  5       nU Vs/ s H  oAU   PM	     sn$ U R                  S:X  a  U R                  R                  U5        U$ g s  snf s  snf )Nr   r   rD  r   r   )r   r   )r   r  shuffler   rO   randpermr   r   rb   
make_seedsexperimentalstateless_shuffler   r  r   )r"   cand_indexesrO   indicesr   r   r   s          r$   _shuffle%DataCollatorForWholeWordMask._shuffle  s   99NN<( $&nnS%6$..nQG-45WOW55  D(#>>,,Q/2Dii,,>>rxxLHY?Zae>fllnuuwG-45WOW55  D(NN""<0 ) 6 6s   !D? Einput_tokensc                 ~   [        U R                  [        [        45      (       d  [        R
                  " S5        / n[        U5       H`  u  pEUS:X  d  US:X  a  M  [        U5      S:  a,  UR                  S5      (       a  US   R                  U5        MN  UR                  U/5        Mb     U R                  U5      n[        U[        S[        [        [        U5      U R                  -  5      5      5      5      n/ n[!        5       nU H\  n	[        U5      U:  a    OL[        U5      [        U	5      -   U:  a  M1  U	 H%  n
UR#                  U
5        UR                  U
5        M'     M^     [        U5      [        U5      :w  a  [%        S5      e['        [        U5      5       Vs/ s H  oDU;   a  SOSPM     nnU$ s  snf )	z=
Get 0/1 labels for masked tokens with whole word mask proxy
zDataCollatorForWholeWordMask is only suitable for BertTokenizer-like tokenizers. Please refer to the documentation for more information.z[CLS]z[SEP]r   r^  r   z?Length of covered_indexes is not equal to length of masked_lms.r   )rP   r4   r   r   warningswarnr   r   
startswithr   ry  minr   rT   roundr   setaddr    r   )r"   r{  max_predictionsrw  r   rf  num_to_predict
masked_lmscovered_indexes	index_setindexrd  s               r$   ra  -DataCollatorForWholeWordMask._whole_word_mask(  s    $..=:K*LMMMMJ
 !,/HA5G#3< A%%*:*:4*@*@R ''*##QC( 0 }}\2_c!Ss<?PSWSgSg?g9h5i.jk
%%I:.0 :Y/.@"##E*!!%( # & 3z?2^__AFs<GXAYZAYA0qa7AYZ [s   #F:r   rd  c                    SSK nU R                  R                  c  [        S5      eUR	                  5       nUnUR                  5        Vs/ s H  o`R                  R                  USS9PM     nnUR                  UR                  XsR                  S9SS9  U R                  R                  b5  UR                  U R                  R                  5      nUR                  USS9  UR                  5       n	S	XI) '   UR                  UR                  UR                  U R                   5      U R"                  S
9R                  5       U	-  n
U R                  R%                  U R                  R                  5      X'   U R                   S:X  d  U R&                  S:X  a  X4$ SU R                   -
  nU R&                  U-  nUR                  UR                  UR                  U5      U R"                  S
9R                  5       U	-  U
) -  nUR)                  [+        U R                  5      UR                  UR,                  U R"                  S9nX   X'   X4$ s  snf )
Prepare masked tokens inputs/labels for masked language modeling: 80% MASK, 10% random, 10% original. Set
'mask_labels' means we use whole word mask (wwm), we directly mask idxs according to it's ref.
r   NThis tokenizer does not have a mask token which is necessary for masked language modeling. Remove the --mlm flag if you want to use this tokenizer.Tr4  rK   rA  rB  r{   rD  r   rE  )rO   r4   r   r    r=  r   r6  rF  rW   rx   r   eqr   rG  r   r   r   r   rH  r   r   r   rU   )r"   r   rd  rO   rM   rI  r8  r!  padding_maskr*  r+  r,  r-  r.  r/  s                  r$   r<  .DataCollatorForWholeWordMask.torch_mask_tokensP  s:   
 	>>$$,A   ) ekdqdqds
ds]`NN223SW2Xds 	 
 	''5HPZPZ([cf'g>>##/!99T^^%@%@AL++L+D+002"& OOEJJv||T5K5KLX\XfXfOglln 	 $(>>#G#GHaHa#b !!Q&$*B*Ba*G>!T333 &*%=%=%N" OOEJJv||5OP\`\j\jOkppr  	
 }}S%8&,,ejjdhdrdr}s!-!= ~K
s   #Ic                 |   SSK nUR                  U5      nU R                  R                  c  [	        S5      eUR                  U5      nUR                  X#R                  5      nU Vs/ s H  opR                  R                  USS9PM     nnXcR                  XR                  S9) -  nU R                  R                  b  XR                  R                  :H  n	Xi) -  nUR                  XaS5      nU R                  X@R                  U R                  5      U-  n
UR                  XR                  R                  U5      nU R                  S:X  d  U R                   S:X  a  X4$ SU R                  -
  nU R                   U-  nU R                  XLU R                  5      U-  U
) -  nU R                  (       a9  U R                  R#                  U[%        U R                  5      UR&                  S	9nO8UR(                  R#                  U[%        U R                  5      UR&                  S	9nUR                  XU5      nX4$ s  snf )
r  r   Nr  Tr4  rK   r{   r   r$  )rb   r   r4   r   r    r7  r  rx   r6  r   r   r&  r  r   r   r(  r   r  r   rd   r  )r"   r   rd  r   r)  rM   r*  r8  r!  r  r+  r,  r-  r.  r/  s                  r$   r   +DataCollatorForWholeWordMask.tf_mask_tokens  s"   
 	 hhv&>>$$,A  V$ gg6 ek
dj]`NN223SW2Xdj 	 
 (773Fgg7+V*VV>>##/!^^%@%@@L+m;N .$7  ,,[:P:PRVR`R`adrr*NN,H,H&Q!!Q&$*B*Ba*G>!T333 &*%=%=%N" kt~~V  	 >>>>11+c$..FYacaiai1jL99,,[T^^AT\^\d\d,eL.? ~Q
s   (#H9c                 :   U R                   R                  c  [        S5      e[        R                  " U5      nUR                  [        5      nUR                  5        Vs/ s H  oPR                   R                  USS9PM     nnSU[        R                  " U[        S9'   U R                   R                  b  X0R                   R                  :H  nSXG'   SX4) '   U R                  (       aF  U R                  R                  SU R                  UR                  S9R                  [        5      U-  nOI[        R                   R                  SU R                  UR                  S9R                  [        5      U-  nU R                   R#                  U R                   R                  5      X'   U R                  S:X  d  U R$                  S:X  a  X4$ SU R                  -
  n	U R$                  U	-  n
U R                  (       a  U R                  R                  SXR                  S9R                  [        5      U-  U) -  nU R                  R'                  S[)        U R                   5      UR                  [        R*                  S	9nO[        R                   R                  SXR                  S9R                  [        5      U-  U) -  n[        R                   R-                  S[)        U R                   5      UR                  [        R*                  S	9nX   X'   X4$ s  snf )
r  r  Tr4  r   rK   r{   r   r   rP  )r4   r   r    r   rM  rS  rx   r   r6  rn   r   r   r   rT  r   r   r  rH  r   rU  r   rd   r   )r"   r   rd  rM   r*  r8  r!  r  r+  r,  r-  r.  r/  s                r$   rL  .DataCollatorForWholeWordMask.numpy_mask_tokens  s   
 >>$$,A   %++D1 ekdqdqds
ds]`NN223SW2Xds 	 
 EFrxx 34@A>>##/!^^%@%@@L+,N("& >>''4+A+A'U\\]abess 
 		""1d&<&<6<<"PWWX\]`nn  $(>>#G#GHaHa#b !!Q&$*B*Ba*G>!T333 &*%=%=%N">>''+ELL'Y``aef !##$ 
  >>22qs4>>?RY_YeYemomumu2vL 		""1&@||"T[[\`a !##$ 
 99,,T^^9LSYS_S_gigogo,pL!-!= ~a
s    #Lr(   N)i   )r*   r+   r,   r-   rE   r	   r   rT   r   r   rF   r   r   r   ry  ra  r   r<  r   rL  r.   r(   r'   r$   rZ  rZ    s?   74d3id38n.L(M#N 7SWX[]`X`Sa 7B!7U49c4S>+I%J K !7PTUXZ]U]P^ !7F74d3id38n.L(M#N 7SWX[]`X`Sa 7B 0&T#Y &P6 6# 6%S/ 6p:S :s :uS#X :x? ?# ?%S/ ?r'   rZ  c                     [        U [        5      (       a  U $ [        U S5      (       a  U R                  5       n U R	                  5       $ )Nr  )rP   rj   r1   r  r   )r   s    r$   r   r     s8    !T	G		GGI88:r'   c                   h    \ rS rSrSrS rS\\\\	4      S\\\	4   4S jr
S\	S\\	\	\	4   4S jrS	rg
)DataCollatorForSOPi  z
Data collator used for sentence order prediction task.

- collates batches of tensors, honoring their tokenizer's pad_token
- preprocesses batches for both masked language modeling and sentence order prediction
c                 :    [         R                  " S[        5        g )NzDataCollatorForSOP is deprecated and will be removed in a future version, you can now use DataCollatorForLanguageModeling instead.)r}  r~  FutureWarning)r"   argskwargss      r$   __init__DataCollatorForSOP.__init__  s    7	
r'   r   r:   c                 h   SS K nSSKJn  U Vs/ s H  oDS   PM	     nn[        XPR                  5      nU R                  U5      u  pVnU Vs/ s H  oDS   PM	     nnU" USU R                  R                  S9nU Vs/ s H  oDS   PM	     n	nUR                  U	5      n
UUUUU
S.$ s  snf s  snf s  snf )	Nr   )pad_sequencer~   token_type_idsT)batch_firstpadding_valuesentence_order_label)r~   rM   attention_maskr  r  )rO   torch.nn.utils.rnnr  r   r4   mask_tokensr   rX   )r"   r   rO   r  r   r~   rM   r  r  sop_label_listr  s              r$   r%   DataCollatorForSOP.__call__  s    39ABg[)	B(NNC	,0,<,<Y,G)	>CKL8"238L%n$VZVdVdVqVqrIQRg"89R${{>: #,,$8
 	
 C M Ss   B%B*<B/r   c                    SSK nU R                  R                  c  [        S5      eUR	                  5       nUR                  UR                  U R                  5      nUR                  5        Vs/ s H  oPR                  R                  USS9PM     nnUR                  UR                  XbR                  S9SS9  U R                  R                  b5  UR                  U R                  R                  5      nUR                  USS9  UR!                  U5      R                  5       nU) R#                  5       n	U R                  R                  b5  UR                  U R                  R                  5      n
U	R                  U
S	S9  S
X8) '   UR!                  UR                  UR                  S5      5      R                  5       U-  nU R                  R%                  U R                  R                  5      X'   UR!                  UR                  UR                  S5      5      R                  5       U-  U) -  nUR'                  [)        U R                  5      UR                  UR*                  S9nX   X'   XU	4$ s  snf )z
Prepare masked tokens inputs/labels/attention_mask for masked language modeling: 80% MASK, 10% random, 10%
original. N-gram not applied yet.
r   Nr  Tr4  rK   rA  rB  g      ?r{   r   g      ?)rO   r4   r   r    r=  r   r   r   r   r6  rF  rW   rx   r   r  r   rG  rV   rH  r   r   rU   )r"   r   rO   rM   rI  r8  r!  r  r*  r  attention_padding_maskr+  r.  r/  s                 r$   r  DataCollatorForSOP.mask_tokens5  s'   
 	>>$$,A 
 "ZZd6J6JKdjdqdqds
ds]`NN223SW2Xds 	 
 	''5HPZPZ([cf'g>>##/!99T^^%@%@AL++L+D);<AAC)/002>>##/%+YYt~~/J/J%K"''(>c'J"& !??5::fllC+HINNPSaa#'>>#G#GHaHa#b  FLL#)FGLLNQ__csbss}}S%8&,,ejj}Y!-!= ~--3
s   /#I7r(   N)r*   r+   r,   r-   rE   r  r	   r   rF   r   r%   r   r  r.   r(   r'   r$   r  r    sR    

d38n!5 
$sCx. 
.).# ).%S#*> ).r'   r  c                   h   \ rS rSr% Sr\\S'   Sr\\S'   Sr	\
\S'   Sr\\S	'   S
\\\\
   \\\\4   4      S\\\4   4S jrS
\\\\
   \\\\4   4      S\\\4   4S jrS
\\\\
   \\\\4   4      S\\\4   4S jrS\S\\\\\4   4S jrS\S\\\\\4   4S jrS\S\\\\\4   4S jrSrg)*DataCollatorForPermutationLanguageModelingia  z
Data collator used for permutation language modeling.

- collates batches of tensors, honoring their tokenizer's pad_token
- preprocesses batches for permutation language modeling with procedures specific to XLNet
r4   gUUUUUU?plm_probability   max_span_lengthr   r   r   r:   c                     [        US   [        5      (       a  U Vs/ s H  o"S   PM	     nn[        XR                  5      nU R	                  U5      u  pEpgXEXgS.$ s  snf Nr   r~   )r~   	perm_masktarget_mappingrM   )rP   r   r   r4   r<  r"   r   r   r^   r   r  r  rM   s           r$   r   5DataCollatorForPermutationLanguageModeling.torch_callo  \    hqk7++0891+H9$X~~>484J4J54Q1>#~pp :   Ac                     [        US   [        5      (       a  U Vs/ s H  o"S   PM	     nn[        XR                  5      nU R	                  U5      u  pEpgXEXgS.$ s  snf r  )rP   r   r   r4   r   r  s           r$   r   2DataCollatorForPermutationLanguageModeling.tf_callv  s\    hqk7++0891+H9!(NN;484G4G4N1>#~pp :r  c                     [        US   [        5      (       a  U Vs/ s H  o"S   PM	     nn[        XR                  5      nU R	                  U5      u  pEpgXEXgS.$ s  snf r  )rP   r   r   r4   rL  r  s           r$   r   5DataCollatorForPermutationLanguageModeling.numpy_call}  r  r  r   c           
         SSK nU R                  R                  c  [        S5      eUR	                  S5      S-  S:w  a  [        S5      eUR                  5       nUR                  UR                  SUR                  S9nUR                  UR	                  S5      UR	                  S5      UR	                  S5      4UR                  S9n[        UR	                  S5      5       H  nSnUR	                  S5      nXx:  a  UR                  SU R                  S-   S5      R                  5       n	[        XR                   -  5      n
XrR                  X-
  S-   S5      R                  5       -   nSXFXU	-   24'   Xz-  nXx:  a  M  UR#                  UR	                  S5      5      XV'   M     UR%                  UR'                  5        Vs/ s H  oR                  R)                  US	S
9PM     snUR                  S9nUR+                  USS9  U R                  R,                  b5  UR/                  U R                  R0                  5      nUR+                  USS9  WU-  ) nU R                  R2                  X'   SX4) '   UR                  UR	                  S5      UR	                  S5      UR	                  S5      4UR                  S9n[        UR	                  S5      5       GH  nUR5                  UR	                  S5      5      nUR7                  SUR	                  S5      S-  45      R9                  SS5      nUUR;                  UR	                  S5      S-  5         nUR=                  UR9                  SS5      5      nUR+                  XF   ) X   -  S5        UR7                  UR	                  S5      S45      UR7                  SUR	                  S5      45      :*  XF   -  UU'   GM
     UR?                  5       UXSR?                  5       4$ s  snf )  
The masked tokens to be predicted for a particular sequence are determined by the following algorithm:

    0. Start from the beginning of the sequence by setting `cur_len = 0` (number of tokens processed so far).
    1. Sample a `span_length` from the interval `[1, max_span_length]` (length of span of tokens to be masked)
    2. Reserve a context of length `context_length = span_length / plm_probability` to surround span to be
       masked
    3. Sample a starting point `start_index` from the interval `[cur_len, cur_len + context_length -
       span_length]` and mask tokens `start_index:start_index + span_length`
    4. Set `cur_len = cur_len + context_length`. If `cur_len < max_len` (i.e. there are tokens remaining in the
       sequence to be processed), repeat from Step 1.
r   NThis tokenizer does not have a mask token which is necessary for permutation language modeling. Please add a mask token if you want to use this tokenizer.r   r   This collator requires that sequence lengths be even to create a leakage-free perm_mask. Please see relevant comments in source code for details.rK   )r   Tr4  rA  rB  r{   r   ) rO   r4   r   r    r   r=  r   r   rx   r   re   r   r   r  rS   rT   r  eyerW   r   r6  rF  r   r  r   r(  aranger   	transposers  flattenrU   )r"   r   rO   rM   r*  r  r   cur_lenmax_lenspan_lengthcontext_lengthstart_indexr8  r!  r  non_func_maskr  
perm_indexs                     r$   r<  <DataCollatorForPermutationLanguageModeling.torch_mask_tokens  s    	>>$$,N 
 ;;q>A"A 
 FLL!5::Ffkk!nfkk!nfkkRSn%U]b]j]jkv{{1~&AGkk!nG##mmAt/C/Ca/GNSSU!$[3G3G%G!H%n6RUV6VX\(](b(b(ddMN+k0I"IIJ) # !&		&++a. 9N% '( $llekereretuet^a^^33CTX3Yetu** + 
 	##$7s#C>>##/!99T^^%@%@AL''C'@ ')<<=!%!=!="&KKQQQ PX]XeXeKf	v{{1~&A fkk!n5J#++RQ11D,EFPPQRTUVJ#ENN6;;q>Q3F$GHJz';';Aq'ABJ
 ##^%6$69I$I2N
 ""FKKNA#67:;M;MqRXR]R]^_R`Na;bb!"IaL1 '8 {{}iFFY vs   #Pc           
         SSK nU R                  R                  c  [        S5      eUR	                  U5      S   S-  S:w  a  [        S5      eUR                  U5      n[        R                  " UR                  R                  5       S[        S9nUR	                  U5      n[        R                  " US   US   US   4[        R                  S9n[        [        U5      5       H  nSnUR	                  U5      S   n	X:  aY  [        SU R                  S-   5      n
[!        XR"                  -  5      nU[        SX-
  S-   5      -   nSXGXU
-   24'   X-  nX:  a  MY  [        R$                  " US   5      Xg'   M     UR'                  UR)                  U5      UR                  S9nUR)                  U5      nUR)                  UR+                  5       R-                  5        Vs/ s H  nU R                  R/                  USS	9PM     sn5      nUR'                  XR                  S9nXN) -  nU R                  R0                  b  X0R                  R2                  :H  nXO) -  nWU-  ) nUR5                  X@R                  R6                  U5      nUR5                  XCS
5      n/ n[        [        U5      5       H  nUR                  US   5      nUR9                  UR;                  USUS   S-  45      5      nUR<                  R?                  U5      nUR;                  UR9                  U5      S5      nUR5                  XG   ) UU   -  SU5      nURA                  UR;                  UUS   S45      UR;                  USUS   45      :*  XG   -  5        M     URC                  USS9nUR'                  XRD                  5      UR'                  UUR                  5      XbR'                  X2RD                  5      4$ s  snf )r  r   Nr  r   r   r  rK   Tr4  r{   r   )r   r   )#rb   r4   r   r    r   r7  r   r   as_listrx   r   re   r   r   r   r  rT   r  r  r  rk   r  r   r6  r   r   r&  r(  r  r   r  rr  r   rX   rd   )r"   r   r   rM   r*  labels_shaper  r   r  r  r  r  r  r8  r!  r  r  r  r  s                      r$   r   9DataCollatorForPermutationLanguageModeling.tf_mask_tokens  s    	 >>$$,N 
 88FA"a'A 
 V$!5!5!7$Gxx'<?LO\RS_"U]_]g]ghs6{#AGhhv&q)G#%a)=)=)AB!$[3G3G%G!H%>3ORS3S(TTMN+k0I"IIJ) # !#|A 7N% $& !5!5n!ERWWU--n= 22 "<<>0022C 66sW[6\2
 !gg&9gI'*>>>>##/!^^%@%@@L+m;N ')<<=...*F*FO.$7	s6{#A ,q/2Jbjjb,q/UVBV=W&XYJ**:6JBLL$<eDJ
 >#4"4}Q7G"GZXJ Ja!(<=JYZ\hij\kXlAmm #$3 $: HHYQH/	wwvxx("'')RZZ*H.ZaZabhjrjrZssses   $O0c           
      j   U R                   R                  c  [        S5      eUR                  S   S-  S:w  a  [        S5      e[        R
                  " U5      n[        R                  " UR                  S[        S9n[        R                  " UR                  S   UR                  S   UR                  S   4[        R                  S9n[        UR                  S   5       H  nSnUR                  S   nXg:  aY  [        SU R                  S-   5      n[        XR                  -  5      n	U[        SX-
  S-   5      -   n
SX5XU-   24'   Xi-  nXg:  a  MY  [        R                  " UR                  S   5      XE'   M     [        R                   " UR#                  5        Vs/ s H  oR                   R%                  USS9PM     sn[        S9nSX<'   U R                   R&                  b  X R                   R(                  :H  nS	X='   WU-  ) nU R                   R*                  X'   S
X#) '   [        R                  " UR                  S   UR                  S   UR                  S   4[        R                  S9n[        UR                  S   5       H  n[        R,                  " UR                  S   5      nUR/                  SUR                  S   S-  45      R0                  n[        R2                  R5                  U5        UR0                  R7                  5       nSUX5   ) X   -  '   UR/                  UR                  S   S45      UR/                  SUR                  S   45      :*  X5   -  X'   M     UR9                  [        R:                  5      XUR9                  [        R:                  5      4$ s  snf )r  r  r   r   r   r  rK   Tr4  rA  r{   r   )r4   r   r    r   r   rM  r   rx   r   re   r   r   r  rT   r  r  rn   r   r6  r   r   r(  r  r   Tr  rr  r  rS  rd   )r"   r   rM   r*  r  r   r  r  r  r  r  r8  r!  r  r  r  r  s                    r$   rL  <DataCollatorForPermutationLanguageModeling.numpy_mask_tokensR  sT    >>$$,N 
 <<?Q!#A 
 q=6<<?FLLOV\\RS_"U]_]g]ghv||A'AGll1oG#%a)=)=)AB!$[3G3G%G!H%>3ORS3S(TTMN+k0I"IIJ) # !#v||A 7N% (( !hhekereretuet^a^^33CTX3Yetu
 /0+>>##/!^^%@%@@L+.N( ')<<=!%!=!="&HHfll1ov||AQPXZXbXbc	v||A'A 6<<?3J#++RaA1E,FGIIJIIj)#--/J
 ACJ))M,<<=
 ""FLLOQ#78J<N<NPQSYS_S_`aSbOc<dd!"IL1 (8 }}RXX&	6==QSQYQYCZZZY vs    #N0r(   N)r*   r+   r,   r-   rE   r   rG   r  rV   r  rT   r   rF   r	   r   r   r   r   r   r   r   r<  r   rL  r.   r(   r'   r$   r  r  a  sd    '&"OU"OSNCq4d3id38n.L(M#N qSWX[]`X`Sa qqU49c4S>+I%J K qPTUXZ]U]P^ qq4d3id38n.L(M#N qSWX[]`X`Sa qaG aGc3S6H0I aGFitS itU3S#3E-F itV_[ _[c3S6H0I _[r'   r  c                   D   ^  \ rS rSrSrSSSSS.U 4S jjrS
S jrS	rU =r$ )DataCollatorWithFlatteningi  a  
Data collator used for padding free approach. Does the following:

- concatenates the entire mini batch into single long sequence of shape [1, total_tokens]
- uses `separator_id` to separate sequences within the concatenated `labels`, default value is -100
- no padding will be added, returns `input_ids`, `labels` and `position_ids` by default
- optionally returns the kwargs contained in FlashAttentionKwargs
- optionally returns seq_idx indicating which sequence each token belongs to

<Tip warning={true}>

Using `DataCollatorWithFlattening` will flatten the entire mini batch into single long sequence.
Make sure your attention computation is able to handle it!

</Tip>
Tr{   F)return_position_idsseparator_idreturn_flash_attn_kwargsreturn_seq_idxc                   > [         TU ]  " U0 UD6  Xl        X l        X0l        X@l        1 SkU l        1 SkU l        SS1U l        g )N>   rM   r~   position_ids>   rM   seq_idxr~   r  max_length_qmax_length_k)	superr  r  r  r  r  _int_64_keys_batch_dim_keys_py_int_keys)r"   r  r  r  r  r  r  r   s          r$   r  #DataCollatorWithFlattening.__init__  sL     	$)&)#6 ((@%,CQ+^<r'   c                 F   Uc  U R                   nUc  U R                  nSUS   ;   n/ / S.nU R                  (       a  UR                  S/ 05        U R                  (       a  UR                  S/ 05        U R
                  (       a  S/nSn[        U5       GH  u  pU	S   n
US==   U
-  ss'   U(       a  US==   U/U	S   SS  -   -  ss'   OUS==   U/U
SS  -   -  ss'   U R                  (       a(  US==   [        [        [        U
5      5      5      -  ss'   U R                  (       a/  US==   [        [        U
5      5       Vs/ s H  oPM     sn-  ss'   U R
                  (       d  M  WR                  US   [        U
5      -   5        [        W[        U
5      5      nGM     U R
                  (       a  W=US	'   US
'   W=US'   US'   US:X  a)  SS KnUR                  nUR                  nUR                  nOGUS:X  a1  [         R"                  n[         R                  n[         R                  nO[%        SU< S35      eUR'                  5        HG  u  nnUU R(                  ;   a  U/nUU R*                  ;  d  M+  U" UUU R,                  ;   a  UOUS9UU'   MI     U$ s  snf )NrM   r   r_  r  r  r~   r   r   cu_seq_lens_qcu_seq_lens_kr  r  r   r   z;return_tensors must be one of ("pt", "np"), return_tensors=z not suportedrK   )r   r  r  updater  r  r   rj   r   r   r   r   rO   rW   rd   r   r   rn   r    rY   r  r  r  )r"   r#   r   r  is_labels_providedr^   cu_seq_lensrs   r  sampler~   _rO   data_clsdtype_64dtype_32r_   r`   s                     r$   r%   #DataCollatorWithFlattening.__call__  sy   !!00N,,L%!4 B/##LL."-.LL)R)((#KJ(2OG{+I+)+!hL>F84DQR4H#HHhL>IabM#AA''n%eC	N.C)DD%""i eC	N6K$L6KW6K$LL ,,,"";r?S^#CD S^<
  3 ((>IIE/"U?%;<FFE.!E.$9 T!||H{{H{{Ht#xxHxxHxxH[NK\\ijkkKKMDAqD(((C)))#Ad>O>O9OXU]^a " = %Ms   J)r  r  r  r  r  r  r  )NN)	r*   r+   r,   r-   rE   r  r%   r.   __classcell__)r   s   @r$   r  r    s*    ( !!&= =$6 6r'   r  )r   r)   )3multiprocessingr  r  r}  collections.abcr   dataclassesr   r   typingr   r   r   r	   r
   r   r   r   r  r   models.bertr   r   tokenization_utils_baser   r  r   r   rF   r   r   r9   r?   rA   r<   r=   r>   rp   rz   rT   r   r   r   r   r   r   rZ  r   r  r  r  r(   r'   r$   <module>r     sq      # !  M M M  : = # )3/ ~xn1E0FSRUX0V'WXN N,5D$8 5RVWZ\_W_R` 52 ?+ ? ?6!$~*> !4S> !H&tN'; &S#X &R$~*> 4S> @ 0 0 0f H): H HV"(3- "J#$x} #$L(3- @ W$5 W Wt D D DN \&7 \ \~ g#B g gT O.8 O. O.d O[1B O[ O[d
 Z!4 Z Zr'   