o
    ŽZŽh7
  ã                   @   s,  d dl mZ d dlmZmZmZ eeef ZeZ		 eee ee f Z
	 ee	ee	e	f ee	 f Z	 ee
ee
e
f ee
 f Z	 ee	e
f Z	 eeef Z	 G dd„ deƒZG dd„ deƒZG dd„ deƒZd	d
lmZmZmZmZmZmZmZmZmZmZmZmZmZm Z  d	dl!m"Z"m#Z#m$Z$m%Z%m&Z& dS )é    )ÚEnum)ÚListÚTupleÚUnionc                   @   ó   e Zd ZdZdZdS )ÚOffsetReferentialÚoriginalÚ
normalizedN)Ú__name__Ú
__module__Ú__qualname__ZORIGINALZ
NORMALIZED© r   r   úB/var/www/auris/lib/python3.10/site-packages/tokenizers/__init__.pyr   <   ó    r   c                   @   r   )Ú
OffsetTypeÚbyteÚcharN)r
   r   r   ZBYTEZCHARr   r   r   r   r   A   r   r   c                   @   s    e Zd ZdZdZdZdZdZdS )ÚSplitDelimiterBehaviorÚremovedÚisolatedZmerged_with_previousZmerged_with_nextÚ
contiguousN)r
   r   r   ZREMOVEDZISOLATEDZMERGED_WITH_PREVIOUSZMERGED_WITH_NEXTZ
CONTIGUOUSr   r   r   r   r   F   s    r   é   )Ú
AddedTokenÚEncodingÚNormalizedStringÚPreTokenizedStringÚRegexÚTokenÚ	TokenizerÚdecodersÚmodelsÚnormalizersÚpre_tokenizersÚ
processorsÚtrainersÚ__version__)ÚBertWordPieceTokenizerÚByteLevelBPETokenizerÚCharBPETokenizerÚSentencePieceBPETokenizerÚSentencePieceUnigramTokenizerN)'Úenumr   Útypingr   r   r   ÚintZOffsetsÚstrZTextInputSequenceZPreTokenizedInputSequenceZTextEncodeInputZPreTokenizedEncodeInputZInputSequenceZEncodeInputr   r   r   Z
tokenizersr   r   r   r   r   r   r   r   r    r!   r"   r#   r$   r%   Zimplementationsr&   r'   r(   r)   r*   r   r   r   r   Ú<module>   s<    
þÿ	
þÿ	@ 