o
    Zh'k                     @   s  d Z ddlmZ ddlZddlmZmZmZmZm	Z	 ddl
mZmZ ddlmZ ddlmZmZmZmZmZ dd	lmZ dd
lmZ eeZi dddddddddddddddd
dddddddddddd
dddddddddddd
dddddddddddddd d!ddddddddddd
d"d#d$d%d&d'd(ddddddddddd
d)d*dd+dd,d-d.d/d0d1d2d3d4dd,d5d6dd7d8dddddddd9dd:	d;d*d<d=d>d+d,d?d@dddddddAdBdCdddd,ddDdEdFddG	dHdddddddddIdd
dJdddddddddddKddLdMdddddddddddKddLZdNdOdPdQdRd#d$d%d&dSdT
dUdd#d$d%d&dVdWZdXdY ZG dZd[ d[ZG d\d] d]eZ G d^d_ d_eZ!G d`da daeZ"G dbdc dceZ#G ddde deeZ$G dfdg dgeZ%e e!e!e"e#e#e#e#e#e$e#e#e%e%dhZ&diefdjdkZ'dS )lz
Integration with GGML / The file is copied and adapted from https://github.com/99991/pygguf
with extra methods beings exposed
    )arrayN)	Tokenizerdecodersnormalizerspre_tokenizers
processors)BPEUnigram   
AddedToken)GemmaConverterGPT2ConverterLlamaConverterQwen2ConverterT5Converter)logging)tqdmZgeneralZ
model_typeZ_model_name_or_path)architecturenamellamaZmax_position_embeddingsZnum_hidden_layersZintermediate_sizeZhidden_sizeZhead_dimZ
rope_thetaZnum_attention_headsZnum_key_value_headsZrms_norm_eps
vocab_size)
context_lengthblock_countfeed_forward_lengthembedding_lengthrope.dimension_countrope.freq_baseattention.head_countattention.head_count_kv attention.layer_norm_rms_epsilonr   Zmistralqwen2Zqwen2moeZnum_expertsZnum_experts_per_tok)r   r   r   r   r   r   r   r   r    r   Zexpert_countZexpert_used_countfalcon	tokenizerbos_token_ideos_token_idunk_token_idpad_token_id)ggml.bos_token_idggml.eos_token_idggml.unknown_token_idggml.padding_token_idphi3bloomZn_layerZn_headZlayer_norm_epsilon)r   r   r   r   attention.layer_norm_epsilont5Zn_positionsZ
num_layersZd_ffZd_modelZd_kvZ	num_headsZrelative_attention_num_bucketsdecoder_start_token_id)r   r   r   r   attention.key_lengthr   r   r.   z attention.relative_buckets_countr0   r   stablelmZlayer_norm_eps)	r   r   r   r   r   r   r   r.   r   gpt2Zn_ctxZn_embdr   )r   r   r   r   r   r.   
starcoder2Znorm_epsilon)r   r   r   r   r   r   r.   mambaZconv_kernelZ
state_sizeZtime_step_rank)	r   r   r   r    r   zssm.conv_kernelzssm.state_sizezssm.time_step_rankzssm.inner_sizenemotronZnorm_epsgemma2Zsliding_window)r   r   r   r   r   r   r1   r   r   r    zattention.sliding_windowr   Zgemma3tokenizer_typetokensscores
token_typemergesadd_prefix_space)

ggml.modelzggml.tokenszggml.scoreszggml.token_typezggml.mergesr(   r)   r*   r+   zggml.add_space_prefixchat_template)r?   r>   r(   r)   r*   r+   )r#   Ztokenizer_configc                 C   s   t |ts|g}t|dkr|d }d }n|d dkrtd|\}}|dv r/t| d } | S |dv r;t| d } | S |dv rGt| d } | S |dv rXtd	t|  	 } | S |d
v rat
| |} | S )N   r   	   zPReceived multiple types, therefore expected the first type to indicate an array.)r   r@   r
            
      )      )   )   B)rA   )
isinstancelistlen
ValueErrorintfloatboolr   tobytesdecode_gguf_parse_value)_valueZ	data_typeZarray_data_type rW   M/var/www/auris/lib/python3.10/site-packages/transformers/integrations/ggml.pyrU      s.   
	
rU   c                   @   s   e Zd Zdd ZdS )GGUFTokenizerSkeletonc                    s  |  D ]
\}}t| || qt| dst| drt| ds"td| j}| j  fddt|D td g }t	  D ]=\}}g }t
dt|D ]}	|d |	 ||	d  }
}|
|v rl||v rl||
||f qMt|fd	d
dd}|| q@t|dd
 dd}dd |D }|| _ndd | jD | _t| dsdd t
t| jD | _t| dsg | _t| dsd | _t| dr| jd u r| j| _d S d S d S )Nr<   r9   r:   z\tokens and scores need to be passed for a LLaMa tokenizer without merges to be instantiated.c                    s   i | ]	\}}| | qS rW   rW   ).0it)r:   rW   rX   
<dictcomp>       z2GGUFTokenizerSkeleton.__init__.<locals>.<dictcomp>z:Merges were not in checkpoint, building merges on the fly.r@   c                    s    | d   | d  fS )Nr   r@   rW   )x)vocabrW   rX   <lambda>*  s    z0GGUFTokenizerSkeleton.__init__.<locals>.<lambda>T)keyreversec                 S   s   | d S )Nr
   rW   )valrW   rW   rX   ra   ,  s    c                 S   s   g | ]
}|d  |d fqS )r   r@   rW   )rZ   rd   rW   rW   rX   
<listcomp>-  s    z2GGUFTokenizerSkeleton.__init__.<locals>.<listcomp>c                 S   s   g | ]	}t |d qS ) )tuplesplit)rZ   mergerW   rW   rX   re   0  r^   c                 S   s   g | ]}d qS NrW   )rZ   _rW   rW   rX   re   2  s    added_tokensr&   unknown_token_id)itemssetattrhasattrrO   r9   r:   	enumerateloggerwarningr   rangerN   appendsortedextendr<   rl   r&   rm   )selfZdict_kvr9   r<   ri   Zpiece_scorelocalindexZpiece_lZpiece_rrW   )r:   r`   rX   __init__  sD   




zGGUFTokenizerSkeleton.__init__N)__name__
__module____qualname__r}   rW   rW   rW   rX   rY     s    rY   c                   @   s<   e Zd Zdd Zdd Zdd Zdd Zd	d
 Zdd ZdS )GGUFLlamaConverterc                 C   s0   t || _| j| _i | _t| jdddk| _d S )Nr8   r   )rY   protooriginal_tokenizeradditional_kwargsgetattris_llama_3_tokenizerrx   tokenizer_dictrW   rW   rX   r}   @  s   
zGGUFLlamaConverter.__init__c                 C      t t|j|jS rj   rM   zipr9   r:   rx   r   rW   rW   rX   r`   F     zGGUFLlamaConverter.vocabc                 C      |j S rj   r<   r   rW   rW   rX   r<   I     zGGUFLlamaConverter.mergesc                 C   s  |  | j}| | j}dd t|D }|jd ur |j|j nd }t|dd d ur0|j|j nd }t|dd d ur@|j|j nd }tt	|||ddd}g }	t
| jds|d urc|	t|ddd	 |d urq|	t|ddd	 |d ur|	t|ddd	 n!tt| jjd
kd }
|
D ]}|	t| jj| ddd	 qt|	dkr||	 t| jjdkr|dd | jjD  || jd< || jd< || jd< | jrd | jd< d| jd< d| jd< d| j_|S )Nc                 S      i | ]	\}\}}||qS rW   rW   rZ   r[   wordZ_scorerW   rW   rX   r]   O  r^   z0GGUFLlamaConverter.tokenizer.<locals>.<dictcomp>r$   r%   T)	unk_tokenZfuse_unkbyte_fallbackr;   F
normalizedspecialrB   r   c                 S   s   g | ]	}t |d d dqS )Fr   r   )rZ   Zadded_tokenrW   rW   rX   re   v  r^   z0GGUFLlamaConverter.tokenizer.<locals>.<listcomp>r   	eos_token	bos_tokenr=   Zclean_up_tokenization_spaceslegacy)r`   r   r<   rq   r&   r9   r   r$   r   r   rp   ru   r   npwherer   r;   rN   add_special_tokensrl   Z
add_tokensr   r   r   r   )rx   r   vocab_scoresr<   	bpe_vocabr   r   r   r#   special_tokensZspecial_tokens_idxidxrW   rW   rX   r#   L  sT     







zGGUFLlamaConverter.tokenizerc                 C   sX   t  t  t ddg}| jr|t jddddg7 }|r'|t jdddg7 }t |S )N   ▁rf   FTr=   Ztrim_offsets	use_regexr@   contentleft)r   ByteFallbackFuseReplacer   	ByteLevelStripSequencerx   replacementr=   sequencerW   rW   rX   decoder  s   

zGGUFLlamaConverter.decoderc                 C   s   |  | j}| | j}|d ur||_d}d}t| jdr!| jj}| ||}|d ur.||_| |||_|  }|r>||_| j	rPt
jdddd|_tg |_|S )Nr   Tr=   Fr   )r#   r   
normalizerrp   r   r=   pre_tokenizerr   post_processorr   r   r   r   r   )rx   r#   r   r   r=   r   r   rW   rW   rX   	converted  s*   zGGUFLlamaConverter.convertedN)	r~   r   r   r}   r`   r<   r#   r   r   rW   rW   rW   rX   r   ?  s    :r   c                       *   e Zd Zdd Zdef fddZ  ZS )GGUFQwen2Converterc                 C      t || _i | _d S rj   rY   r   r   r   rW   rW   rX   r}        

zGGUFQwen2Converter.__init__returnc              	      s^   dd t | jjD }| jj}t ||}|tddddtddddtddddg |S )	Nc                 S      i | ]\}}||qS rW   rW   rZ   r[   r   rW   rW   rX   r]         z0GGUFQwen2Converter.converted.<locals>.<dictcomp><|endoftext|>FTr   z<|im_start|>z
<|im_end|>)rq   r   r9   r<   superr   r   r   rx   r`   r<   r#   	__class__rW   rX   r     s   zGGUFQwen2Converter.convertedr~   r   r   r}   r   r   __classcell__rW   rW   r   rX   r         r   c                   @   sB   e Zd Zdd Zdd Zdd Zdd Zd	d
 ZdefddZ	dS )GGUFPhi3Converterc                 C   s   t || _| j| _i | _d S rj   rY   r   r   r   r   rW   rW   rX   r}     s   

zGGUFPhi3Converter.__init__c                 C   r   rj   r   r   rW   rW   rX   r`     r   zGGUFPhi3Converter.vocabc                 C   r   rj   r   r   rW   rW   rX   r<     r   zGGUFPhi3Converter.mergesc                 C   sn  |  | j}| | j}dd t|D }tt||}|tddddddtddddtd	dddd
tddddd
tddddd
tddddd
tddddd
tddddd
tddddd
tddddd
tddddd
tddddd
g |jd ur|j	|j nd | j
d< |jd ur|j	|j nd | j
d< |jd ur|j	|j nd | j
d< |jd ur|j	|j nd | j
d< |S )Nc                 S   r   rW   rW   r   rW   rW   rX   r]     r^   z/GGUFPhi3Converter.tokenizer.<locals>.<dictcomp></s>TF)rstriplstripr   r   r   r   z<|assistant|>)r   r   r   z<|placeholder1|>z<|placeholder2|>z<|placeholder3|>z<|placeholder4|>z
<|system|>z<|end|>z<|placeholder5|>z<|placeholder6|>z<|user|>r   r   r   Z	pad_token)r`   r   r<   rq   r   r   r   r   r&   r9   r   r%   r$   r'   )rx   r   r   r<   r   r#   rW   rW   rX   r#     s8   zGGUFPhi3Converter.tokenizerc                 C   s<   t  t  t |dg}|r|t jdddg7 }t |S )Nrf   r@   r   )r   r   r   r   r   r   r   rW   rW   rX   r     s   

zGGUFPhi3Converter.decoderr   c                 C   s:   |  | j}d}d}t| jdr| jj}| |||_|S )Nr   Tr=   )r#   r   rp   r   r=   r   )rx   r#   r   r=   rW   rW   rX   r   
  s   zGGUFPhi3Converter.convertedN)
r~   r   r   r}   r`   r<   r#   r   r   r   rW   rW   rW   rX   r     s    'r   c                       r   )GGUFGPTConverterc                 C   r   rj   r   r   rW   rW   rX   r}     r   zGGUFGPTConverter.__init__r   c                    s0   dd t | jjD }| jj}t ||}|S )Nc                 S   r   rW   rW   r   rW   rW   rX   r]     r   z.GGUFGPTConverter.converted.<locals>.<dictcomp>)rq   r   r9   r<   r   r   r   r   rW   rX   r     s   zGGUFGPTConverter.convertedr   rW   rW   r   rX   r     r   r   c                   @   :   e Zd Zdd Zdd Zdd Zdd Zd	efd
dZdS )GGUFT5Converterc                 C   s>   dg|d< t || _dd t| jjD | _| j| _i | _d S )N
dummy textr<   c                 S   r   rW   rW   )rZ   rz   ry   rW   rW   rX   r]   )  r   z,GGUFT5Converter.__init__.<locals>.<dictcomp>)rY   r   rq   r9   token2idr   r   r   rW   rW   rX   r}   $  s
   


zGGUFT5Converter.__init__c                 C   r   rj   r   r   rW   rW   rX   r`   -  r   zGGUFT5Converter.vocabc                 C   sT   t | jddr(g }t | jddr|tjddg7 }|tjdddg7 }t|S d S )Nr   Tr=   r   )prependrf   )patternr   )r   r   r   ZPrependr   r   )rx   r   r   rW   rW   rX   r   0  s   
zGGUFT5Converter.normalizerc                 C   s$   t jddgg dd| jd fgdS )N$Ar   )r   r   z$Br   )singlepairr   )r   ZTemplateProcessingr   )rx   rW   rW   rX   r   9  s   zGGUFT5Converter.post_processorr   c                 C   s   |  | j}tt|| jjdd}| | j}|d ur||_d}d}t| jdr,| jj}| 	||}|d ur9||_	| 
|||_
|  }|rI||_|S )NFZunk_idr   r   Tr=   )r`   r   r   r	   r&   r   rp   r   r=   r   r   r   )rx   r   r#   r   r   r=   r   r   rW   rW   rX   r   B  s.   	zGGUFT5Converter.convertedN)	r~   r   r   r}   r`   r   r   r   r   rW   rW   rW   rX   r   #  s    			r   c                   @   r   )GGUFGemmaConverterc                 C   s&   dg|d< t || _| j| _i | _d S )Nr   r<   r   r   rW   rW   rX   r}   c  s   


zGGUFGemmaConverter.__init__c                 C   s   t t|j|j}g }|D ]1\}}|dkr|d|f qd|v r7t| dkr7dt| }|||f q|||f q|S )Nz<0x09>	rf   r   r   )rM   r   r9   r:   ru   rN   strip)rx   r   Zoriginal_vocabZupdated_vocabtokenZscoreZunderscoresrW   rW   rX   r`   k  s   zGGUFGemmaConverter.vocabc                 C   s   t ddS )Nrf   r   )r   r   r   rW   rW   rX   r   z  s   zGGUFGemmaConverter.normalizerc                 C   s<   t ddt  t  g}|r|t jdddg7 }t |S )Nr   rf   r@   r   )r   r   r   r   r   r   r   rW   rW   rX   r   }  s   

zGGUFGemmaConverter.decoderr   c                 C   s   |  | j}tt|| jj| jd}| | j}|d ur||_d}d}t| jdr-| jj	}| 
|||_
| ||}|d urA||_|S )Nr   r   Tr=   )r`   r   r   r	   r&   Zhandle_byte_fallbackr   rp   r   r=   r   r   )rx   r   r#   r   r   r=   r   rW   rW   rX   r     s(   zGGUFGemmaConverter.convertedN)	r~   r   r   r}   r`   r   r   r   r   rW   rW   rW   rX   r   b  s    r   )r   r!   Z	qwen2_moer,   r-   r"   r2   r3   r4   r/   r5   r6   r7   Zgemma3_textr   c                 C   s"   | }t | |}| }||jfS )a6  
    Utilities to convert a slow tokenizer instance in a fast tokenizer instance.

    Args:
        architecture (`str`): The model architecture derived from gguf file.
        transformer_tokenizer ([`~tokenization_utils_base.PreTrainedTokenizer`]):
            Instance of a slow tokenizer to convert in the backend tokenizer for
            [`~tokenization_utils_base.PreTrainedTokenizerFast`].

    Return:
        A instance of [`~tokenizers.Tokenizer`] to be used as the backend tokenizer of a
        [`~tokenization_utils_base.PreTrainedTokenizerFast`]
    )GGUF_TO_FAST_CONVERTERSr   r   )r   r   Ztokenizer_class_name	converterZfast_tokenizerrW   rW   rX   convert_gguf_tokenizer  s   
r   )(__doc__r   numpyr   Z
tokenizersr   r   r   r   r   Ztokenizers.modelsr   r	    r   Zconvert_slow_tokenizerr   r   r   r   r   utilsr   Zutils.loggingr   Z
get_loggerr~   rr   ZGGUF_CONFIG_MAPPINGZGGUF_TOKENIZER_MAPPINGrU   rY   r   r   r   r   r   r   r   r   rW   rW   rW   rX   <module>   s  
+9EKW^kv~       /  C+yK?B