
    fThL>                       S r SSKrSSKJr  SSKJrJrJrJrJ	r	  SSK
r
SSKr
SSK
Jr  SSKJr  SSKJr  SS	KJr  SS
KJrJrJrJr  SSKJrJr  SSKJr  SSKJrJrJ r   SSK!J"r"J#r#J$r$J%r%J&r&J'r'  SSK(J)r)J*r*J+r+  SSK,J-r-J.r.J/r/  \&R`                  " \15      r2\ " S S\#5      5       r3 " S S\Rh                  5      r5 SGS\Rh                  S\
Rl                  S\
Rl                  S\
Rl                  S\\
Rl                     S\7S\74S jjr8 " S S \Rh                  5      r9 " S! S"\Rh                  5      r: " S# S$\Rh                  5      r;\$ " S% S&\5      5       r< " S' S(\Rh                  5      r= " S) S*\<5      r> " S+ S,\Rh                  5      r? " S- S.\Rh                  5      r@ " S/ S0\Rh                  5      rA " S1 S2\Rh                  5      rB " S3 S4\Rh                  5      rC " S5 S6\Rh                  5      rD " S7 S8\Rh                  5      rE " S9 S:\Rh                  5      rF " S; S<\<5      rG " S= S>\\"5      rH\$" S?S@9 " SA SB\<5      5       rI\$" SCS@9 " SD SE\<\5      5       rJ/ SFQrKg)HzPyTorch InstructBLIP model.    N)	dataclass)AnyCallableOptionalTupleUnion)nn   )ACT2FN)GenerationMixin)FlashAttentionKwargs)BaseModelOutput)BaseModelOutputWithPastAndCrossAttentionsBaseModelOutputWithPooling,BaseModelOutputWithPoolingAndCrossAttentions)ALL_ATTENTION_FUNCTIONSPreTrainedModel)Unpack)apply_chunking_to_forward find_pruneable_heads_and_indicesprune_linear_layer)
LossKwargsModelOutputauto_docstringcan_return_tuplelogging	torch_int   )	AutoModelAutoModelForCausalLMAutoModelForSeq2SeqLM   )InstructBlipConfigInstructBlipQFormerConfigInstructBlipVisionConfigc                      \ rS rSr% SrSr\\\R                        \
S'   Sr\\\R                        \
S'   Sr\\R                     \
S'   Sr\\\R                        \
S'   Sr\\\R                        \
S'   S	\\   4S
 jrSrg)/InstructBlipForConditionalGenerationModelOutput-   a  
Class defining the outputs of [`InstructBlipForConditionalGeneration`].

Args:
    loss (`torch.FloatTensor`, *optional*, returned when `labels` is provided, `torch.FloatTensor` of shape `(1,)`):
        Language modeling loss from the language model.
    logits (`torch.FloatTensor` of shape `(batch_size, sequence_length, config.vocab_size)`):
        Prediction scores of the language modeling head of the language model.
    vision_outputs (`BaseModelOutputWithPooling`):
        Outputs of the vision encoder.
    qformer_outputs (`BaseModelOutputWithPoolingAndCrossAttentions`):
        Outputs of the Q-Former (Querying Transformer).
    language_model_outputs (`CausalLMOutputWithPast` or `Seq2SeqLMOutput`):
        Outputs of the language model.
Nlosslogitsvision_outputsqformer_outputslanguage_model_outputsreturnc                 J   ^  [        U 4S jT R                  5        5       5      $ )Nc              3   n   >#    U  H*  nUS ;  a  TU   O[        TU5      R                  5       v   M,     g7f)r+   r,   r-   N)getattrto_tuple).0kselfs     n/var/www/auris/envauris/lib/python3.13/site-packages/transformers/models/instructblip/modeling_instructblip.py	<genexpr>KInstructBlipForConditionalGenerationModelOutput.to_tuple.<locals>.<genexpr>G   sC      
 ! WW Gq!**,- !s   25)tuplekeysr6   s   `r7   r3   8InstructBlipForConditionalGenerationModelOutput.to_tupleF   s%     
 YY[	
 
 	
     )__name__
__module____qualname____firstlineno____doc__r)   r   r   torchFloatTensor__annotations__r*   r+   r,   r-   r   r3   __static_attributes__r?   r>   r7   r'   r'   -   s      04D(5**+
,315FHU5,,-.526NHU../6:>OXeE$5$567>AEHU5+<+<%=>E
%* 
r>   r'   c                      ^  \ rS rSrS\4U 4S jjrS\R                  S\S\S\R                  4S jr	SS	\R                  S
\S\R                  4S jjrSrU =r$ )InstructBlipVisionEmbeddingsP   configc                 r  > [         TU ]  5         Xl        UR                  U l        UR
                  U l        UR                  U l        [        R                  " [        R                  " SSU R                  5      5      U l        [        R                  " SU R                  U R                  U R                  S9U l        U R
                  U R                  -  S-  U l        U R                  S-   U l        [        R                  " [        R                  " SU R                  U R                  5      5      U l        g )Nr"   r
   )in_channelsout_channelskernel_sizestrider   )super__init__rL   hidden_size	embed_dim
image_size
patch_sizer	   	ParameterrE   randnclass_embeddingConv2dpatch_embeddingnum_patchesnum_positionsposition_embeddingr6   rL   	__class__s     r7   rS   %InstructBlipVisionEmbeddings.__init__Q   s    ++ ++ ++!||EKK1dnn,MN!yyDOO\`\k\k 
 !OOt>1D!--1"$,,u{{1d>P>PRVR`R`/a"br>   
embeddingsheightwidthr.   c                    UR                   S   S-
  nU R                  R                   S   S-
  n[        R                  R	                  5       (       d  XE:X  a  X#:X  a  U R                  $ U R                  SS2SS24   nU R                  SS2SS24   nUR                   S   nX R
                  -  n	X0R
                  -  n
[        US-  5      nUR                  SXU5      nUR                  SSSS5      n[        R                  R                  UX4SS	S
9nUR                  SSSS5      R                  SSU5      n[        R                  " Xg4SS9$ )a  
This method allows to interpolate the pre-trained position encodings, to be able to use the model on higher resolution
images. This method is also adapted to support torch.jit tracing.

Adapted from:
- https://github.com/facebookresearch/dino/blob/de9ee3df6cf39fac952ab558447af1fa1365362a/vision_transformer.py#L174-L194, and
- https://github.com/facebookresearch/dinov2/blob/e1277af2ba9496fbadf7aec6eba56e8d882d1e35/dinov2/models/vision_transformer.py#L179-L211
r"   Ng      ?r   r
   r   bicubicF)sizemodealign_cornersdim)shaper_   rE   jit
is_tracingrW   r   reshapepermuter	   
functionalinterpolateviewcat)r6   rc   rd   re   r]   r^   class_pos_embedpatch_pos_embedrm   
new_height	new_widthsqrt_num_positionss               r7   interpolate_pos_encoding5InstructBlipVisionEmbeddings.interpolate_pos_encodingc   sS    !&&q)A-//55a81< yy##%%+*F6?***11!RaR%811!QR%8r".
__,	&}c'9:)11!5G]`a)11!Q1=--33(	 4 
 *11!Q1=BB1b#Nyy/;CCr>   pixel_valuesr|   c                    UR                   u  p4pVU R                  R                  R                  nU R                  UR	                  US95      nUR                  S5      R                  SS5      nU R                  R                  USS5      R	                  U5      n	[        R                  " X/SS9n
U(       a  U R                  XU5      nOU R                  nXS S 2S U
R                  S5      2S S 24   R	                  U5      -   n
U
$ )Ndtyper   r"   rg   rl   )rn   r\   weightr   toflatten	transposerZ   expandrE   rv   r|   r_   ri   )r6   r~   r|   
batch_size_rd   re   target_dtypepatch_embedsclass_embedsrc   r_   s               r7   forward$InstructBlipVisionEmbeddings.forward   s    '3'9'9$
v++2288++LOO,O,OP#++A.88A>++22:q"EHHVYY;C
#!%!>!>zSX!Y!%!8!8Q8L*//!:L8La5O"P"S"ST`"aa
r>   )	rZ   rL   rU   rV   r]   r^   r\   rW   r_   F)r@   rA   rB   rC   r%   rS   rE   Tensorintr|   rF   boolr   rH   __classcell__ra   s   @r7   rJ   rJ   P   sr    c7 c$&D5<< &D &DUX &D]b]i]i &DPE$5$5 QU bgbnbn  r>   rJ   modulequerykeyvalueattention_maskscalingdropoutc                 `   [         R                  " XR                  SS5      5      U-  nUb  X-   n[        R                  R                  USS9n[        R                  R                  XU R                  S9n[         R                  " X5      n	U	R                  SS5      R                  5       n	X4$ )Nrg   rl   )ptrainingr"   r   )	rE   matmulr   r	   rs   softmaxr   r   
contiguous)
r   r   r   r   r   r   r   kwargsattn_weightsattn_outputs
             r7   eager_attention_forwardr      s     <<}}R'<=GL!#4==((2(>L==((6??([L,,|3K''1-88:K$$r>   c                     ^  \ rS rSrSrU 4S jrS\R                  S\S\4S jr	  SS\R                  S	\
\R                     S
\
\   S\\R                  \
\R                     \
\\R                        4   4S jjrSrU =r$ )InstructBlipAttention   z=Multi-headed attention from 'Attention Is All You Need' paperc                   > [         TU ]  5         Xl        UR                  U l        UR
                  U l        U R                  U R                  -  U l        U R                  U R                  -  U R                  :w  a&  [        SU R                   SU R                   S35      eU R                  S-  U l	        SU l
        UR                  U l        [        R                  " U R                  SU R                  -  SS9U l        UR                  (       ai  [        R                   " ["        R$                  " U R                  5      5      n[        R                   " ["        R$                  " U R                  5      5      nOS nS nUbQ  ["        R&                  " U["        R(                  " USS9U45      n[        R                   " U5      U R                  l        [        R                  " U R                  U R                  5      U l        g )	Nz;embed_dim must be divisible by num_heads (got `embed_dim`: z and `num_heads`: z).g      Fr
   )bias)requires_grad)rR   rS   rL   rT   rU   num_attention_heads	num_headshead_dim
ValueErrorscale	is_causalattention_dropoutr	   Linearqkvqkv_biasrX   rE   zerosrv   
zeros_liker   
projection)r6   rL   q_biasv_biasr   ra   s        r7   rS   InstructBlipAttention.__init__   ss   ++33$..8==4>>)T^^;MdnnM] ^NN#2'  ]]D(
!'!9!9 99T^^Q-?eL??\\%++dnn"=>F\\%++dnn"=>FFFyy&%*:*:6QV*WY_!`aHLL2DHHM))DNNDNNCr>   tensorseq_lenbszc                     UR                  X2U R                  U R                  5      R                  SS5      R	                  5       $ )Nr"   r   )ru   r   r   r   r   )r6   r   r   r   s       r7   _shapeInstructBlipAttention._shape   s5    {{3GQQRSUVWbbddr>   hidden_states	head_maskoutput_attentionsr.   c                    UR                  5       u  pVnU R                  U5      nUR                  XVSU R                  XpR                  -  5      R	                  SSSSS5      nUS   US   US   pn	[
        nU R                  R                  S:w  aT  U R                  R                  S:X  a  U(       a  [        R                  S5        O[        U R                  R                     nU" U U	U
U4S	U R                  (       d  S
OU R                  U R                  S.UD6u  pUR                  XVS5      R                  5       nU R                  U5      nU(       a  X4nU$ US	4nU$ )z#Input shape: Batch x Time x Channelr
   r   r   r"      eagersdpaz`torch.nn.functional.scaled_dot_product_attention` does not support `output_attentions=True`. Falling back to eager attention. This warning can be removed using the argument `attn_implementation="eager"` when loading the model.N        )r   r   r   rg   )ri   r   rq   r   rr   r   rL   _attn_implementationloggerwarning_oncer   r   r   r   r   r   )r6   r   r   r   r   r   tgt_lenrU   	mixed_qkvquery_states
key_statesvalue_statesattention_interfacer   r   outputss                   r7   r   InstructBlipAttention.forward   sc    #0"4"4"6iHH]+	%%cAt~~yTbTbGbckkq!Q
	 2;1y|YWX\,(?;;++w6{{//69>O##L
 '>dkk>^>^&_#$7		%

  #}}C$2H2HJJ	%
 	%
! "))#;FFHook21B;- JUVZH[r>   )	r   rL   rU   r   r   r   r   r   r   NF)r@   rA   rB   rC   rD   rS   rE   r   r   r   r   r   r   r   rH   r   r   s   @r7   r   r      s    GD>eU\\ eC ec e -1,1	,||, ELL), $D>	, 
u||Xell3XeELL>Q5RR	S, ,r>   r   c                   b   ^  \ rS rSrU 4S jrS\R                  S\R                  4S jrSrU =r	$ )InstructBlipMLPi  c                   > [         TU ]  5         Xl        [        UR                     U l        [        R                  " UR                  UR                  5      U l
        [        R                  " UR                  UR                  5      U l        g N)rR   rS   rL   r   
hidden_actactivation_fnr	   r   rT   intermediate_sizefc1fc2r`   s     r7   rS   InstructBlipMLP.__init__	  sb    #F$5$5699V//1I1IJ99V55v7I7IJr>   r   r.   c                 l    U R                  U5      nU R                  U5      nU R                  U5      nU$ r   )r   r   r   r6   r   s     r7   r   InstructBlipMLP.forward  s4    /**=9/r>   )r   rL   r   r   
r@   rA   rB   rC   rS   rE   r   r   rH   r   r   s   @r7   r   r     s)    KU\\ ell  r>   r   c            
          ^  \ rS rSrS\4U 4S jjr S
S\R                  S\R                  S\\	   S\
\R                     4S jjrS	rU =r$ )InstructBlipEncoderLayeri  rL   c                 <  > [         TU ]  5         UR                  U l        [	        U5      U l        [        R                  " U R                  UR                  S9U l	        [        U5      U l        [        R                  " U R                  UR                  S9U l        g Neps)rR   rS   rT   rU   r   	self_attnr	   	LayerNormlayer_norm_epslayer_norm1r   mlplayer_norm2r`   s     r7   rS   !InstructBlipEncoderLayer.__init__  sm    ++.v6<<F<Q<QR"6*<<F<Q<QRr>   r   r   r   r.   c                     UnU R                  U5      nU R                  UUUS9u  pX-   nUnU R                  U5      nU R                  U5      nX-   nU4nU(       a  Xe4-  nU$ )a  
Args:
    hidden_states (`torch.FloatTensor`): input to the layer of shape `(batch, seq_len, embed_dim)`
    attention_mask (`torch.FloatTensor`): attention mask of size
        `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values.
        `(config.encoder_attention_heads,)`.
    output_attentions (`bool`, *optional*):
        Whether or not to return the attentions tensors of all attention layers. See `attentions` under
        returned tensors for more detail.
)r   r   r   )r   r   r   r   )r6   r   r   r   residualr   r   s          r7   r    InstructBlipEncoderLayer.forward!  s      !((7&*nn'$/ '5 '
#
 &0 ((7/%0 "&Gr>   )rU   r   r   r   r   r   )r@   rA   rB   rC   r#   rS   rE   r   r   r   r   rF   r   rH   r   r   s   @r7   r   r     s^    S1 S -2	$||$ $ $D>	$
 
u  	!$ $r>   r   c                   J    \ rS rSr\rSrSrSrSr	Sr
SrSrSrSr/ SQrS rSrg)	InstructBlipPreTrainedModeliH  blipTF)InstructBlipQFormerEmbeddingsr   %InstructBlipQFormerMultiHeadAttentionInstructBlipQFormerSelfOutputc                    U R                   R                  n[        U[        R                  [        R
                  45      (       aW  UR                  R                  R                  SUS9  UR                  b%  UR                  R                  R                  5         gg[        U[        R                  5      (       a%  UR                  R                  R                  SUS9  g[        U[        R                  5      (       aJ  UR                  R                  R                  5         UR                  R                  R                  S5        g[        U[        5      (       aS  [        R                  R!                  UR"                  SUS9  [        R                  R!                  UR$                  SUS9  g[        U[&        [(        45      (       a%  UR*                  R                  R                  5         gg)zInitialize the weightsr   )meanstdN      ?)rL   initializer_range
isinstancer	   r   r[   r   datanormal_r   zero_	Embeddingr   fill_rJ   inittrunc_normal_r_   rZ   $InstructBlipForConditionalGenerationInstructBlipModelquery_tokens)r6   r   factors      r7   _init_weights)InstructBlipPreTrainedModel._init_weights\  sX   ..fryy"))455MM&&CV&<{{&  &&( '--MM&&CV&<--KK""$MM$$S) <==GG!!&";";#6!RGG!!&"8"8s!O!EGX YZZ$$**, [r>   r?   N)r@   rA   rB   rC   r#   config_classbase_model_prefixsupports_gradient_checkpointing_supports_attention_backend_supports_flash_attn_2_supports_sdpa_supports_flex_attn_supports_cache_class_supports_static_cache_supports_quantized_cache_no_split_modulesr  rH   r?   r>   r7   r   r   H  sI    %L&*#"&!N ! %-r>   r   c                      ^  \ rS rSrSrS\4U 4S jjr    SS\\R                     S\\
   S\\
   S\\
   S	\\\4   4
S
 jjrSrU =r$ )InstructBlipEncoderiq  z
Transformer encoder consisting of `config.num_hidden_layers` self attention layers. Each layer is a
[`InstructBlipEncoderLayer`].

Args:
    config (`InstructBlipConfig`):
        The corresponding vision configuration for the `InstructBlipEncoder`.
rL   c                    > [         TU ]  5         Xl        [        R                  " [        UR                  5       Vs/ s H  n[        U5      PM     sn5      U l        SU l	        g s  snf r   )
rR   rS   rL   r	   
ModuleListrangenum_hidden_layersr   layersgradient_checkpointing)r6   rL   r   ra   s      r7   rS   InstructBlipEncoder.__init__{  sT    mmuU[UmUmOn$oOn!%=f%EOn$op&+# %p   A&r   r   output_hidden_statesreturn_dictr.   c                 H   Ub  UOU R                   R                  nUb  UOU R                   R                  nUb  UOU R                   R                  nU(       a  SOSnU(       a  SOSnUn[	        U R
                  5       Hp  u  pU(       a  Xh4-   nU R                  (       a0  U R                  (       a  U R                  U
R                  UUU5      nOU
" UUUS9nUS   nU(       d  Mh  X{S   4-   nMr     U(       a  Xh4-   nU(       d  [        S XU4 5       5      $ [        XUS9$ )a  
Args:
    inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`):
        Embedded representation of the inputs. Should be float, not int tokens.
    attention_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*):
        Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`:

        - 1 for tokens that are **not masked**,
        - 0 for tokens that are **masked**.

        [What are attention masks?](../glossary#attention-mask)
    output_attentions (`bool`, *optional*):
        Whether or not to return the attentions tensors of all attention layers. See `attentions` under
        returned tensors for more detail.
    output_hidden_states (`bool`, *optional*):
        Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors
        for more detail.
    return_dict (`bool`, *optional*):
        Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple.
Nr?   r   r   r"   c              3   .   #    U  H  oc  M  Uv   M     g 7fr   r?   r4   vs     r7   r8   .InstructBlipEncoder.forward.<locals>.<genexpr>  s     e$Sq$Ss   	)last_hidden_stater   
attentions)rL   r   r"  use_return_dict	enumerater  r  r   _gradient_checkpointing_func__call__r:   r   )r6   inputs_embedsr   r   r"  r#  encoder_statesall_attentionsr   idxencoder_layerlayer_outputss               r7   r   InstructBlipEncoder.forward  s1   8 2C1N-TXT_T_TqTq$8$D $++JjJj 	 &1%<k$++B]B]30d%"+DKK"8C#!/2B!B**t}} $ A A!**!"%	! !.!"&7! *!,M  !/3C2E!E) #9,  +.>>Ne]N$Seee+Vd
 	
r>   )rL   r  r  )NNNN)r@   rA   rB   rC   rD   r#   rS   r   rE   r   r   r   r   r   r   rH   r   r   s   @r7   r  r  q  s    ,1 , 26,0/3&*C
 !.C
 $D>	C

 'tnC
 d^C
 
uo%	&C
 C
r>   r  c                      ^  \ rS rSrSr\rS\4U 4S jjr\     SS\	\
R                     S\	\   S\	\   S\	\   S\S	\\\4   4S
 jj5       rS rSrU =r$ )InstructBlipVisionModeli  r~   rL   c                    > [         TU ]  U5        Xl        UR                  n[	        U5      U l        [        U5      U l        [        R                  " X!R                  S9U l        U R                  5         g r   )rR   rS   rL   rT   rJ   rc   r  encoderr	   r   r   post_layernorm	post_init)r6   rL   rU   ra   s      r7   rS    InstructBlipVisionModel.__init__  sY     &&	6v>*62 ll9:O:OPr>   r   r"  r#  r|   r.   c                    Ub  UOU R                   R                  nUb  UOU R                   R                  nUb  UOU R                   R                  nUc  [	        S5      eU R                  XS9nU R                  UUUUS9nUS   nU R                  U5      nUS S 2SS S 24   n	U R                  U	5      n	U(       d	  X4USS  -   $ [        UU	UR                  UR                  S9$ )Nz You have to specify pixel_values)r|   )r0  r   r"  r#  r   r"   )r*  pooler_outputr   r+  )rL   r   r"  r,  r   rc   r:  r;  r   r   r+  )
r6   r~   r   r"  r#  r|   r   encoder_outputsr*  pooled_outputs
             r7   r   InstructBlipVisionModel.forward  s    2C1N-TXT_T_TqTq$8$D $++JjJj 	 &1%<k$++B]B]?@@h,,'/!5#	 ' 
 ,A. //0AB)!Q'2++M:%58KKK)/')77&11	
 	
r>   c                     U R                   $ r   )rc   r<   s    r7   get_input_embeddings,InstructBlipVisionModel.get_input_embeddings  s    r>   )rL   rc   r:  r;  NNNNF)r@   rA   rB   rC   main_input_namer%   r  rS   r   r   rE   rF   r   r   r   r   r   rD  rH   r   r   s   @r7   r8  r8    s    $O+L	7 	  59,0/3&*).(
u001(
 $D>(
 'tn	(

 d^(
 #'(
 
u00	1(
 (
T r>   r8  c                   `   ^  \ rS rSrS
U 4S jjrS rS rS rS rS r	      SS jr
S	rU =r$ )r   i  c                   > [         TU ]  5         Xl        UR                  UR                  -  S:w  a5  [        US5      (       d$  [        SUR                  UR                  4-  5      eUR                  U l        [        UR                  UR                  -  5      U l        U R                  U R                  -  U l	        [        R                  " UR                  U R                  5      U l        U(       aa  [        R                  " UR                  U R                  5      U l        [        R                  " UR                  U R                  5      U l        O`[        R                  " UR                  U R                  5      U l        [        R                  " UR                  U R                  5      U l        [        R                   " UR"                  5      U l        ['        USS5      U l        U R(                  S:X  d  U R(                  S:X  aG  UR*                  U l        [        R,                  " SUR*                  -  S	-
  U R                  5      U l        S
U l        g )Nr   embedding_sizezLThe hidden size (%d) is not a multiple of the number of attention heads (%d)position_embedding_typeabsoluterelative_keyrelative_key_queryr   r"   F)rR   rS   rL   rT   r   hasattrr   r   attention_head_sizeall_head_sizer	   r   r   encoder_hidden_sizer   r   Dropoutattention_probs_dropout_probr   r2   rK  max_position_embeddingsr  distance_embeddingsave_attentionr6   rL   is_cross_attentionra   s      r7   rS   .InstructBlipQFormerMultiHeadAttention.__init__  s    : ::a?PVXhHiHi^%%v'A'ABC 
 $*#=#= #&v'9'9F<V<V'V#W !558P8PPYYv1143E3EF
yy!;!;T=O=OPDH6#=#=t?Q?QRDJyy!3!3T5G5GHDH6#5#5t7I7IJDJzz&"E"EF'.v7PR\']$''>9T=Y=Y]q=q+1+I+ID(&(ll1v7U7U3UXY3Y[_[s[s&tD##r>   c                     Xl         g r   attn_gradients)r6   r]  s     r7   save_attn_gradients9InstructBlipQFormerMultiHeadAttention.save_attn_gradients#  s    ,r>   c                     U R                   $ r   r\  r<   s    r7   get_attn_gradients8InstructBlipQFormerMultiHeadAttention.get_attn_gradients&  s    """r>   c                     Xl         g r   attention_map)r6   re  s     r7   save_attention_map8InstructBlipQFormerMultiHeadAttention.save_attention_map)  s    *r>   c                     U R                   $ r   rd  r<   s    r7   get_attention_map7InstructBlipQFormerMultiHeadAttention.get_attention_map,  s    !!!r>   c                     UR                  5       S S U R                  U R                  4-   nUR                  " U6 nUR	                  SSSS5      $ )Nrg   r   r   r"   r
   )ri   r   rP  ru   rr   )r6   xnew_x_shapes      r7   transpose_for_scores:InstructBlipQFormerMultiHeadAttention.transpose_for_scores/  sL    ffhsmt'?'?AYAY&ZZFFK yyAq!$$r>   c                    US LnU(       aC  U R                  U R                  U5      5      n	U R                  U R                  U5      5      n
UnOUbu  U R                  U R                  U5      5      n	U R                  U R                  U5      5      n
[        R                  " US   U	/SS9n	[        R                  " US   U
/SS9n
O@U R                  U R                  U5      5      n	U R                  U R                  U5      5      n
U R                  U5      nU R                  U5      nX4n[        R                  " XR                  SS5      5      nU R                  S:X  d  U R                  S:X  GaD  UR                  5       S   n[        R                  " U[        R                  UR                  S	9R                  SS5      n[        R                  " U[        R                  UR                  S	9R                  SS5      nUU-
  nU R                  UU R                  -   S-
  5      nUR!                  UR"                  S
9nU R                  S:X  a  [        R$                  " SUU5      nUU-   nOHU R                  S:X  a8  [        R$                  " SUU5      n[        R$                  " SU	U5      nUU-   U-   nU[&        R(                  " U R*                  5      -  nUR"                  nUb  X-   n[,        R.                  " SS9" U5      R!                  U5      nU(       a=  U R0                  (       a,  U R3                  U5        UR5                  U R6                  5        U R9                  U5      nUb  UU-  n[        R                  " UU
5      nUR;                  SSSS5      R=                  5       nUR                  5       S S U R>                  4-   nUR                  " U6 nU(       a  UU4OU4nUU4-   nU$ )Nr   r   rl   r"   rg   r   rM  rN  r   devicer   zbhld,lrd->bhlrzbhrd,lrd->bhlrr
   ) rn  r   r   rE   rv   r   r   r   rK  ri   arangelongrr  ru   rV  rU  r   r   einsummathsqrtrP  r	   SoftmaxrW  rf  register_hookr^  r   rr   r   rQ  )r6   r   r   r   encoder_hidden_statesencoder_attention_maskpast_key_valuer   rY  	key_layervalue_layermixed_query_layerquery_layerattention_scores
seq_lengthposition_ids_lposition_ids_rdistancepositional_embeddingrelative_position_scoresrelative_position_scores_queryrelative_position_scores_keyattention_scores_dtypeattention_probsattention_probs_droppedcontext_layernew_context_layer_shaper   s                               r7   r   -InstructBlipQFormerMultiHeadAttention.forward4  s    3$>11$((;P2QRI33DJJ?T4UVK3N'11$((=2IJI33DJJ}4MNK		>!#4i"@aHI))^A%6$D!LK11$((=2IJI33DJJ}4MNK JJ}5//0AB#1 !<<5H5HR5PQ''>9T=Y=Y]q=q&++-a0J"\\*EJJ}OcOcdiijlnopN"\\*EJJ}OcOcdiijkmopN%6H#'#:#:8dFbFb;bef;f#g #7#:#:ARAR#:#S ++~=+0<<8H+Wk+l(#36N#N --1EE16>NP[]q1r./4||<LiYm/n,#36T#TWs#s +dii8P8P.QQ!1!7!7%/@ **,-=>AABXY$"5"5##O4))$*B*BC #',,"?  &=	&I#%<kJ%--aAq9DDF"/"4"4"6s";t?Q?Q>S"S%**,CD6G=/2mM]^--r>   )rQ  rP  re  r]  rL   rV  r   r   rU  r   rK  r   rW  r   r   NNNNNF)r@   rA   rB   rC   rS   r^  ra  rf  ri  rn  r   rH   r   r   s   @r7   r   r     sA    $8-#+"% "#T Tr>   r   c                   z   ^  \ rS rSrU 4S jrS\R                  S\R                  S\R                  4S jrSrU =r	$ )r   i  c                 (  > [         TU ]  5         [        R                  " UR                  UR                  5      U l        [        R                  " UR                  UR                  S9U l        [        R                  " UR                  5      U l
        g r   )rR   rS   r	   r   rT   denser   r   rS  hidden_dropout_probr   r`   s     r7   rS   &InstructBlipQFormerSelfOutput.__init__  s`    YYv1163E3EF
f&8&8f>S>STzz&"<"<=r>   r   input_tensorr.   c                 p    U R                  U5      nU R                  U5      nU R                  X-   5      nU$ r   r  r   r   r6   r   r  s      r7   r   %InstructBlipQFormerSelfOutput.forward  5    

=1]3}'CDr>   r   r  r   r   r   s   @r7   r   r     6    >U\\  RWR^R^  r>   r   c                   .  ^  \ rS rSrSU 4S jjrS r      SS\R                  S\\R                     S\\R                     S\\R                     S\\R                     S	\\
\
\R                           S
\\   S\
\R                     4S jjrSrU =r$ )InstructBlipQFormerAttentioni  c                    > [         TU ]  5         [        X5      U l        [	        U5      U l        [        5       U l        g r   )rR   rS   r   	attentionr   outputsetpruned_headsrX  s      r7   rS   %InstructBlipQFormerAttention.__init__  s0    >vZ3F;Er>   c                 6   [        U5      S:X  a  g [        XR                  R                  U R                  R                  U R
                  5      u  p[        U R                  R                  U5      U R                  l        [        U R                  R                  U5      U R                  l        [        U R                  R                  U5      U R                  l	        [        U R                  R                  USS9U R                  l        U R                  R                  [        U5      -
  U R                  l        U R                  R                  U R                  R                  -  U R                  l        U R
                  R                  U5      U l        g )Nr   r"   rl   )lenr   r  r   rP  r  r   r   r   r   r  r  rQ  union)r6   headsindexs      r7   prune_heads(InstructBlipQFormerAttention.prune_heads  s   u:?7>>55t~~7Y7Y[_[l[l

  2$..2F2FN/0B0BEJ1$..2F2FN.t{{/@/@%QO .2^^-O-ORUV[R\-\*'+~~'I'IDNNLnLn'n$ --33E:r>   r   r   r   rz  r{  r|  r   r.   c           	      p    U R                  UUUUUUU5      nU R                  US   U5      n	U	4USS  -   n
U
$ )Nr   r"   )r  r  )r6   r   r   r   rz  r{  r|  r   self_outputsattention_outputr   s              r7   r   $InstructBlipQFormerAttention.forward  sW     ~~!"
  ;;|AF#%QR(88r>   )r  r  r  r   r  )r@   rA   rB   rC   rS   r  rE   r   r   rF   r   r   r   rH   r   r   s   @r7   r  r    s    ";* 7;15=A>BDH,1|| !!2!23 E--.	
  ((9(9: !)):): ; !uU->->'?!@A $D> 
u||	 r>   r  c                   b   ^  \ rS rSrU 4S jrS\R                  S\R                  4S jrSrU =r	$ )InstructBlipQFormerIntermediatei  c                   > [         TU ]  5         [        R                  " UR                  UR
                  5      U l        [        UR                  [        5      (       a  [        UR                     U l        g UR                  U l        g r   )rR   rS   r	   r   rT   r   r  r   r   strr   intermediate_act_fnr`   s     r7   rS   (InstructBlipQFormerIntermediate.__init__  s`    YYv1163K3KL
f''--'-f.?.?'@D$'-'8'8D$r>   r   r.   c                 J    U R                  U5      nU R                  U5      nU$ r   r  r  r   s     r7   r   'InstructBlipQFormerIntermediate.forward  s&    

=100?r>   r  r   r   s   @r7   r  r    s(    9U\\ ell  r>   r  c                   z   ^  \ rS rSrU 4S jrS\R                  S\R                  S\R                  4S jrSrU =r	$ )InstructBlipQFormerOutputi  c                 (  > [         TU ]  5         [        R                  " UR                  UR
                  5      U l        [        R                  " UR
                  UR                  S9U l        [        R                  " UR                  5      U l        g r   )rR   rS   r	   r   r   rT   r  r   r   rS  r  r   r`   s     r7   rS   "InstructBlipQFormerOutput.__init__  s`    YYv779K9KL
f&8&8f>S>STzz&"<"<=r>   r   r  r.   c                 p    U R                  U5      nU R                  U5      nU R                  X-   5      nU$ r   r  r  s      r7   r   !InstructBlipQFormerOutput.forward  r  r>   r  r   r   s   @r7   r  r    r  r>   r  c                   L   ^  \ rS rSrU 4S jr       SS jrS rS rSrU =r	$ )InstructBlipQFormerLayeri  c                 ^  > [         TU ]  5         UR                  U l        SU l        [	        U5      U l        X l        X!R                  -  S:X  a  [	        USS9U l        SU l	        OSU l	        [        U5      U l        [        U5      U l        [        U5      U l        [        U5      U l        g )Nr"   r   T)rY  F)rR   rS   chunk_size_feed_forwardseq_len_dimr  r  	layer_idxcross_attention_frequencycrossattentionhas_cross_attentionr  intermediater  r  intermediate_queryoutput_queryr6   rL   r  ra   s      r7   rS   !InstructBlipQFormerLayer.__init__  s    '-'E'E$5f="7771<">vZ^"_D'+D$',D$;FC/7"A&"I5f=r>   c	           
         Ub  US S OS n	U R                  UUUUU	S9n
U
S   nU
SS nU
S   nUS:  a  US S 2S U2S S 24   nU R                  (       a.  Uc  [        S5      eU R                  UUUUUUS9nUS   nXSS -   n[	        U R
                  U R                  U R                  U5      nUR                  S   U:  aO  [	        U R                  U R                  U R                  US S 2US 2S S 24   5      n[        R                  " UU/SS9nO,[	        U R                  U R                  U R                  U5      nU4U-   nX4-   nU$ )	Nr   )r   r|  r   r"   rg   z>encoder_hidden_states must be given for cross-attention layersr%  rl   )r  r  r   r  r   feed_forward_chunk_queryr  r  rn   feed_forward_chunkrE   rv   )r6   r   r   r   rz  r{  r|  r   query_lengthself_attn_past_key_valueself_attention_outputsr  r   present_key_valuequery_attention_outputcross_attention_outputslayer_outputlayer_output_texts                     r7   r    InstructBlipQFormerLayer.forward   s    :H9S>"1#5Y] !%/3 "0 "
 2!4(2.226!%5a,6I%J"''(0$%eff*.*=*=*")*&7 +> +' *A)C&!Ab$AA4--,,  &	L  %%a(<7$=++00$$$Qq%89	%!  %yy,8I)JPQR4'',,   	L  /G+00r>   c                 J    U R                  U5      nU R                  X!5      nU$ r   )r  r  r6   r  intermediate_outputr  s       r7   r  +InstructBlipQFormerLayer.feed_forward_chunkG  s)    "//0@A{{#6Ir>   c                 J    U R                  U5      nU R                  X!5      nU$ r   )r  r  r  s       r7   r  1InstructBlipQFormerLayer.feed_forward_chunk_queryL  s+    "556FG(()<Or>   )
r  r  r  r  r  r  r  r  r  r  )NNNNNFr   )
r@   rA   rB   rC   rS   r   r  r  rH   r   r   s   @r7   r  r    s5    >. "#EN
 r>   r  c                   F   ^  \ rS rSrU 4S jr          SS jrSrU =r$ )InstructBlipQFormerEncoderiS  c           	         > [         TU ]  5         Xl        [        R                  " [        UR                  5       Vs/ s H  n[        X5      PM     sn5      U l        SU l	        g s  snf r   )
rR   rS   rL   r	   r  r  r  r  layerr  r  s      r7   rS   #InstructBlipQFormerEncoder.__init__T  sY    ]]JOPVPhPhJijJiY%f8Jij

 ',# kr!  c                    U	(       a  SOS nU(       a  SOS nU(       a  SOS nU(       a  SOS n[        U R                  R                  5       H  nU R                  U   nU	(       a  X4-   nUb  UU   OS nUb  UU   OS n[	        U R                  SS5      (       aP  U R
                  (       a?  U(       a  [        R                  S5        SnU R                  UR                  UUUUU5      nOU" UUUUUUUU5      nUS   nU(       a	  UUS   4-  nU(       d  M  UUS   4-   nUR                  (       d  M  UUS   4-   nM     U	(       a  X4-   nU
(       d  [        S	 UUUUU4 5       5      $ [        UUUUUS
9$ )Nr?   r  FzZ`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`...r   rg   r"   r   c              3   0   #    U  H  nUc  M  Uv   M     g 7fr   r?   r'  s     r7   r8   5InstructBlipQFormerEncoder.forward.<locals>.<genexpr>  s"      
A  s   	)r*  past_key_valuesr   r+  cross_attentions)r  rL   r  r  r2   r   r   warningr.  r/  r  r:   r   )r6   r   r   r   rz  r{  r  	use_cacher   r"  r#  r  all_hidden_statesall_self_attentionsall_cross_attentionsnext_decoder_cacheilayer_modulelayer_head_maskr|  r5  s                        r7   r   "InstructBlipQFormerEncoder.forward\  s    #7BD$5b4%6rD#,R$t{{445A::a=L#$58H$H!.7.CilO3B3N_Q/TXNt{{$<eDDNNt !&I $ A A ))!"#)*! !-!"#)*"% 	! *!,M"}R'8&::"  &9]1=M<O&O#333+?=QRCSBU+U(Q 6T   14D D 
 "&%'(
 
 
 9+.+*1
 	
r>   )rL   r  r  )
NNNNNNFFTr   )r@   rA   rB   rC   rS   r   rH   r   r   s   @r7   r  r  S  s4    , "#"S
 S
r>   r  c                   >   ^  \ rS rSrSrU 4S jr    SS jrSrU =r$ )r   i  z;Construct the embeddings from word and position embeddings.c                 F  > [         TU ]  5         [        R                  " UR                  UR
                  UR                  S9U l        [        R                  " UR                  UR
                  5      U l	        [        R                  " UR
                  UR                  S9U l        [        R                  " UR                  5      U l        U R!                  S["        R$                  " UR                  5      R'                  S5      SS9  [)        USS5      U l        Xl        g )	N)padding_idxr   position_ids)r"   rg   F)
persistentrK  rL  )rR   rS   r	   r  
vocab_sizerT   pad_token_idword_embeddingsrU  position_embeddingsr   r   	layernormrS  r  r   register_bufferrE   rs  r   r2   rK  rL   r`   s     r7   rS   &InstructBlipQFormerEmbeddings.__init__  s    !||F,=,=v?Q?Q_e_r_rs#%<<0N0NPVPbPb#c f&8&8f>S>STzz&"<"<= 	ELL)G)GHOOPWXej 	 	
 (/v7PR\']$r>   c                    Ub  UR                  5       S   nOSnUc%  U R                  S S 2XEU-   24   R                  5       nUbi  U R                  U5      nU R                  S:X  a.  U R                  UR                  UR                  5      5      nXg-   nUb  [        R                  " X64SS9nOUnUR                  U R                  R                  R                  5      nU R                  U5      nU R                  U5      nU$ )Nr"   r   rL  rl   )ri   r  cloner  rK  r  r   rr  rE   rv   r  r   r   r   )r6   	input_idsr  query_embedspast_key_values_lengthr  rc   r  s           r7   r   %InstructBlipQFormerEmbeddings.forward  s      ")!,JJ,,Q0FVlIl0l-lmssuL --i8J++z9&*&>&>|zO`O`?a&b#'=
'"YY'AqI
%J]]4>>#8#8#>#>?
^^J/
\\*-
r>   )rL   r   r  rK  r  r  )NNNr   )	r@   rA   rB   rC   rD   rS   r   rH   r   r   s   @r7   r   r     s#    E$   r>   r   c                   &  ^  \ rS rSrSrSrSrSrSrS\	4U 4S jjr
S rS rS r SS	\R                  S
\\   S\R$                  S\S\R                  4
S jjr           SS\R*                  S	\\R.                     S\\R*                     S\\R                     S\\R.                     S\\R.                     S\\R.                     S\\\\R.                           S\\   S\\   S\\   S\\   S\\\R.                     \4   4S jjrSrU =r$ )InstructBlipQFormerModeli  z
Querying Transformer (Q-Former), used in InstructBLIP. Slightly modified from BLIP-2 as it also takes the
instruction as input.
FrL   c                    > [         TU ]  U5        Xl        [        U5      U l        [        U5      U l        U R                  5         g r   )rR   rS   rL   r   rc   r  r:  r<  r`   s     r7   rS   !InstructBlipQFormerModel.__init__  s7     7?1&9r>   c                 .    U R                   R                  $ r   rc   r  r<   s    r7   rD  -InstructBlipQFormerModel.get_input_embeddings  s    ...r>   c                 $    XR                   l        g r   r  r6   r   s     r7   set_input_embeddings-InstructBlipQFormerModel.set_input_embeddings  s    */'r>   c                     UR                  5        H7  u  p#U R                  R                  U   R                  R	                  U5        M9     g)z
Prunes heads of the model. heads_to_prune: dict of {layer_num: list of heads to prune in this layer} See base
class PreTrainedModel
N)itemsr:  r  r  r  )r6   heads_to_pruner  r  s       r7   _prune_heads%InstructBlipQFormerModel._prune_heads   s<    
 +002LELLu%//;;EB 3r>   r   input_shaperr  	has_queryr.   c                    UR                  5       S:X  a  USS2SSS2SS24   nO>UR                  5       S:X  a  USS2SSSS24   nO[        SU SUR                   S35      eUR                  U R                  S9nSU-
  S	-  nU$ )
a  
Makes broadcastable attention and causal masks so that future and masked tokens are ignored.

Arguments:
    attention_mask (`torch.Tensor`):
        Mask with ones indicating tokens to attend to, zeros for tokens to ignore.
    input_shape (`Tuple[int]`):
        The shape of the input to the model.
    device: (`torch.device`):
        The device of the input to the model.

Returns:
    `torch.Tensor` The extended attention mask, with a the same dtype as `attention_mask.dtype`.
r
   Nr   z!Wrong shape for input_ids (shape z) or attention_mask (shape )r   r   g     )rm   r   rn   r   r   )r6   r   r  rr  r  extended_attention_masks         r7   get_extended_attention_mask4InstructBlipQFormerModel.get_extended_attention_mask  s    . 1$&4Qa]&C#!Q& '5QdA5E&F#3K=@[\j\p\p[qqrs  #:"<"<4::"<"N#&)@#@H"L&&r>   r  r  r  r   rz  r{  r  r  r   r"  r#  c                    U
b  U
OU R                   R                  n
Ub  UOU R                   R                  nUb  UOU R                   R                  nUc  Uc  [	        S5      eUb,  US   S   R
                  S   U R                   R                  -
  OSnUb  UR
                  S   OSnU R                  UUUUS9nUR                  5       SS nUu  nnUR                  nUc  [        R                  " UUU-   4US9nU R                  UUU5      nUb  [        U[        5      (       a  US   R                  5       u  nnnOUR                  5       u  nnnUU4n[        U[        5      (       a"  U Vs/ s H  nU R                  U5      PM     nnO>Uc'  [        R                  " UUS9nU R                  U5      nOU R                  U5      nOSnU R!                  XPR                   R"                  5      nU R%                  UUUUUUU	U
UUUS	9nUS   nUSS2SSS24   nU(       d
  UU4USS -   $ ['        UUUR(                  UR*                  UR,                  UR.                  S
9$ s  snf )a  
encoder_hidden_states  (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*):
    Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention if
    the model is configured as a decoder.
encoder_attention_mask (`torch.FloatTensor` of shape `(batch_size, sequence_length)`, *optional*):
    Mask to avoid performing attention on the padding token indices of the encoder input. This mask is used in
    the cross-attention if the model is configured as a decoder. Mask values selected in `[0, 1]`:
    - 1 for tokens that are **not masked**,
    - 0 for tokens that are **masked**.
past_key_values (`tuple(tuple(torch.FloatTensor))` of length `config.n_layers` with each tuple having 4 tensors of:
    shape `(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`): Contains precomputed key and
    value hidden states of the attention blocks. Can be used to speed up decoding. If `past_key_values` are
    used, the user can optionally input only the last `decoder_input_ids` (those that don't have their past key
    value states given to this model) of shape `(batch_size, 1)` instead of all `decoder_input_ids` of shape
    `(batch_size, sequence_length)`.
use_cache (`bool`, *optional*):
    If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding (see
    `past_key_values`).
Nz7You have to specify query_embeds when input_ids is Noner   r   r"   )r  r  r  r  rg   )rr  )
r   r   rz  r{  r  r  r   r"  r#  r  )r*  r?  r  r   r+  r  )rL   r   r"  r,  r   rn   r  rc   ri   rr  rE   onesr  r   listinvert_attention_maskget_head_maskr  r:  r   r  r   r+  r  )r6   r  r   r  r  r   rz  r{  r  r  r   r"  r#  r  r  embedding_outputr  r   r  rr  r  encoder_batch_sizeencoder_sequence_lengthr   encoder_hidden_shapemaskencoder_extended_attention_maskr@  sequence_outputrA  s                                 r7   r    InstructBlipQFormerModel.forward3  s   D 2C1N-TXT_T_TqTq$8$D $++JjJj 	 &1%<k$++B]B]!5VWW JYIdOAq!''*T[[-E-EEjk 	 1=0H|))!,a??%%#9	 + 
 '++-cr2!,
J!((!"ZZ*jCY6Y)ZdjkN #'"B"B>S^`f"g !,/66AVWXAYA^A^A`>"$;QAVA[A[A]>"$;Q$68O#P 0$77`v2w`vX\43M3Md3S`v/2w/'/).4HQW)X&262L2LMc2d/262L2LMc2d/.2+ &&y++2O2OP	,,2"7#B+/!5#% ' 
 *!,'1a0#]3oab6III;-'+;;)77&11,==
 	
G 3xs   /I!)rL   rc   r:  r   )NNNNNNNNNNN)r@   rA   rB   rC   rD   r  r  r  r  r$   rS   rD  r  r  rE   r   r   r   rr  r   r  
LongTensorr   rF   r   r   r   rH   r   r   s   @r7   r  r    s   
 #("N8 /0C  )')' 3Z)' 	)'
 )' 
)'\ 7;37/315=A>BEI$(,0/3&*x
##x
 !!2!23x
 u//0	x

 u||,x
 E--.x
  ((9(9:x
 !)):): ;x
 "%e.?.?(@"ABx
 D>x
 $D>x
 'tnx
 d^x
 
uU&&')UU	Vx
 x
r>   r  c                       \ rS rSrSrg)KwargsForCausalLMi  r?   N)r@   rA   rB   rC   rH   r?   r>   r7   r,  r,    s    3r>   r,  z[
    InstructBLIP base Model consisting of language model, qformer and vision encoder.
    )custom_introc            !         ^  \ rS rSrSrS/rS\4U 4S jjrS rS r	S r
S	 r\\          SS\R                  S
\R                  S\\R"                     S\\R                     S\\R"                     S\\R"                     S\\R"                     S\\   S\\   S\\   S\S\\   S\\   S\\\4   4S jj5       5       rSrU =r$ )r  i  r~   r	  rL   c                 4  > [         TU ]  U5        [        UR                  5      U l        [
        R                  " [        R                  " SUR                  UR                  R                  5      5      U l        [        UR                  5      U l        [
        R                  " UR                  R                  UR                   R                  5      U l        [$        R&                  " UR                   5      U l        U R(                  R*                  b/  U R*                  R-                  U R(                  R*                  5        U R(                  R.                  b/  U R.                  R-                  U R(                  R.                  5        U R1                  5         g Nr"   )rR   rS   r8  vision_configvision_modelr	   rX   rE   r   num_query_tokensqformer_configrT   r	  r  qformerr   text_configlanguage_projectionr   from_configlanguage_modelr  extend_keep_in_fp32_modulesr<  r`   s     r7   rS   InstructBlipModel.__init__  s    3F4H4HILLQ8O8OQWQfQfQrQr)st/0E0EF#%99V-B-B-N-NPVPbPbPnPn#o '33F4F4FG00<""))$*=*=*O*OP44@&&--d.A.A.W.WX 	r>   c                 6    U R                   R                  5       $ r   r9  rD  r<   s    r7   rD  &InstructBlipModel.get_input_embeddings      ""7799r>   c                 :    U R                   R                  U5        g r   r9  r  r  s     r7   r  &InstructBlipModel.set_input_embeddings      007r>   c                     U R                   R                  (       d_  U R                  R                  U R                  R                  l        U R                  R                  U R                  R                  l        g g r   rL   use_decoder_only_language_modelr9  sharedr:  embed_tokensdecoderr<   s    r7   _tie_weightsInstructBlipModel._tie_weights  T    {{::7;7J7J7Q7QD''47;7J7J7Q7QD''4 ;r>   c                 "   U R                   n[        U5      S:  a=  SU;  a7  [        R                  R	                  5       S:  a  [
        R                  S5        [        U R                  S5      (       a  SU R                  R                  l
        ggz
Some pre-processing hacks to make the model `accelerate` compatible. Check
https://github.com/huggingface/transformers/pull/21707 for more details.
r"   r9  a  The `language_model` is not in the `hf_device_map` dictionary and you are running your script in a multi-GPU environment. this may lead to unexpected behavior when using `accelerate`. Please pass a `device_map` that contains `language_model` to remove this warning. Please refer to https://github.com/huggingface/blog/blob/main/accelerate-large-models.md for more details on creating a `device_map` for large models._hf_hookTNhf_device_mapr  rE   cudadevice_countr   r  rO  r9  rP  io_same_devicer6   rR  s     r7   _preprocess_accelerate(InstructBlipModel._preprocess_accelerate  |    
 **}!&6m&KPUPZPZPgPgPilmPmNNM 4&&
33:>D((7 4r>   qformer_input_idsqformer_attention_maskr  r   decoder_input_idsdecoder_attention_maskr   r"  r#  r|   r  r   r.   c                 `   U
b  U
OU R                   R                  n
U R                  UUU	U
US9nUS   n[        R                  " UR                  5       SS [        R                  UR                  S9nU R                  R                  UR                  S   SS5      n[        R                  " UR                  5       SS [        R                  UR                  S9nUc  [        R                  " U5      n[        R                  " UU/SS9nU R                  UUUUUUU	U
S9nUS   SS2SUR                  S5      2SS24   nU R                  U5      nU R                  R!                  5       " U5      nUc  [        R                  " U5      nX@R                   R"                  :H  R%                  S5      R'                  U5      nUR)                  5       UU'   U R                   R*                  (       a  U R                  " SUUUU	U
US	.UD6nOU R                  " SUUUUUU	U
US
.UD6n[-        UUUS9$ )a  
qformer_input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
    Indices of input sequence tokens in the vocabulary of the Q-Former. Input tokens can optionally be provided
    to serve as text prompt, which the Q-Former model will encode.

    Indices can be obtained using [`InstructBlipProcessor`]. See [`InstructBlipProcessor.__call__`] for
    details.

    [What are input IDs?](../glossary#input-ids)
qformer_attention_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*):
    Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`:

    - 1 for tokens that are **not masked**,
    - 0 for tokens that are **masked**.

    [What are attention masks?](../glossary#attention-mask)
input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
    Indices of input sequence tokens in the vocabulary of the language model. Input tokens can optionally be
    provided to serve as text prompt, which the language model can continue.

    Indices can be obtained using [`InstructBlipProcessor`]. See [`InstructBlipProcessor.__call__`] for
    details.

    [What are input IDs?](../glossary#input-ids)
decoder_attention_mask (`torch.BoolTensor` of shape `(batch_size, target_sequence_length)`, *optional*):
    Default behavior: generate a tensor that ignores pad tokens in `decoder_input_ids`. Causal mask will also
    be used by default.

    Only relevant in case an encoder-decoder language model (like T5) is used.
N)r~   r   r"  r#  r|   r   rg   rq  r"   rl   )r  r   r  rz  r{  r   r"  r#  r0  r   r   r"  r#  r  )r0  r   r\  r]  r   r"  r#  r  r1   r?   )rL   r,  r2  rE   r  ri   rt  rr  r	  r   rn   	ones_likerv   r5  r7  r9  rD  image_token_id	unsqueeze	expand_asr   rG  r'   )r6   r~   rZ  r[  r  r   r\  r]  r   r"  r#  r|   r  r   r+   image_embedsimage_attention_maskr	  query_attention_maskquery_outputsquery_outputlanguage_model_inputsr0  special_image_maskr   s                            r7   r   InstructBlipModel.forward  sc   ` &1%<k$++B]B] **%/!5#%= + 
 &a(  %zz,*;*;*=cr*B%**]i]p]pq ((//0B0B10Er2N$zz,*;*;*=cr*B%**]i]p]pq!)%*__5F%G"!&,@BX+Y_`!a'1%".#7/!5# % 	
 %Q'+A\->->q-A+A1(DE !% 8 8 F++@@B9M!"__Y7N';;+E+EEPPQST^^_lm,A,I,I,K();;66)) +-"3%9'# G )) 
+-"3'="3%9'#
 
G ?))#*
 	
r>   r9  r7  r5  r	  r2  )
NNNNNNNNFN)r@   rA   rB   rC   rG  r;  r#   rS   rD  r  rK  rW  r   r   rE   rF   r   r*  r   r   r   r   r   r'   r   rH   r   r   s   @r7   r  r    sp    %O+,1 &:8R
?( 
 >B15598<=A,0/3&*).$(t
''t
 !,,t
 !))9)9 :	t

 E--.t
 !!1!12t
 $E$4$45t
 !))9)9 :t
 $D>t
 'tnt
 d^t
 #'t
 D>t
 -.t
 
uEE	Ft
  t
r>   r  a  
    InstructBLIP Model for generating text given an image and an optional text prompt. The model consists of a vision
    encoder, Querying Transformer (Q-Former) and a language model.

    One can optionally pass `input_ids` to the model, which serve as a text prompt, to make the language model continue
    the prompt. Otherwise, the language model starts generating text from the [BOS] (beginning-of-sequence) token.
    c            #       X  ^  \ rS rSr\rSrSrSrSr	S/r
S\4U 4S jjrS rS	 rS
 rS\R                   4S jrS rS rS rS r   S"S\R.                  S\R0                  S\\R0                     S\\   S\\   4
S jjr\\           S#S\R.                  S\R.                  S\\R0                     S\\R.                     S\\R0                     S\\R0                     S\\R0                     S\\   S\\   S\\R0                     S\\   S\S\\   S\\   S\ \!\"4   4S jj5       5       r#\RH                  " 5            S$S\R.                  S\\R0                     S\\R0                     S\\R0                     S\\R0                     S\S\R0                  4S  jj5       r%S!r&U =r'$ )%r  ie  r~   TFr	  rL   c                 r  > [         TU ]  U5        [        R                  UR                  5      U l        [        R                  " [        R                  " SUR                  UR                  R                  5      5      U l        [        R                  UR                  5      U l        [        R                   " UR                  R                  UR"                  R                  5      U l        UR&                  (       a!  [(        R*                  " UR"                  5      nO [,        R*                  " UR"                  5      nUR.                  b%  U R.                  R1                  UR.                  5        UR2                  b%  U R2                  R1                  UR2                  5        X l        U R7                  5         g r0  )rR   rS   r8  _from_configr1  r2  r	   rX   rE   r   r3  r4  rT   r	  r  r5  r   r6  r7  rG  r    r8  r!   r  r:  r;  r9  r<  )r6   rL   r9  ra   s      r7   rS   -InstructBlipForConditionalGeneration.__init__v  s1    3@@AUAUVLLQ8O8OQWQfQfQrQr)st/<<V=R=RS#%99V-B-B-N-NPVPbPbPnPn#o 111==f>P>PQN2>>v?Q?QRN++7"")).*J*JK//;&&--n.R.RS, 	r>   c                 6    U R                   R                  5       $ r   r>  r<   s    r7   rD  9InstructBlipForConditionalGeneration.get_input_embeddings  r@  r>   c                 :    U R                   R                  U5        g r   rB  r  s     r7   r  9InstructBlipForConditionalGeneration.set_input_embeddings  rD  r>   c                 :    U R                   R                  U5        g r   )r9  set_output_embeddings)r6   new_embeddingss     r7   rv  :InstructBlipForConditionalGeneration.set_output_embeddings  s    11.Ar>   r.   c                 6    U R                   R                  5       $ r   )r9  get_output_embeddingsr<   s    r7   rz  :InstructBlipForConditionalGeneration.get_output_embeddings  s    ""88::r>   c                 6    U R                   R                  5       $ r   )r9  get_encoderr<   s    r7   r}  0InstructBlipForConditionalGeneration.get_encoder      ""..00r>   c                 6    U R                   R                  5       $ r   )r9  get_decoderr<   s    r7   r  0InstructBlipForConditionalGeneration.get_decoder  r  r>   c                     U R                   R                  (       d_  U R                  R                  U R                  R                  l        U R                  R                  U R                  R                  l        g g r   rF  r<   s    r7   rK  1InstructBlipForConditionalGeneration._tie_weights  rM  r>   c                 "   U R                   n[        U5      S:  a=  SU;  a7  [        R                  R	                  5       S:  a  [
        R                  S5        [        U R                  S5      (       a  SU R                  R                  l
        ggrO  rQ  rV  s     r7   rW  ;InstructBlipForConditionalGeneration._preprocess_accelerate  rY  r>   rZ  r[  r|   r#  c           	         U R                  UUSS9nUS   n[        R                  " UR                  5       SS [        R                  UR
                  S9nU R                  R                  UR                  S   SS5      n	[        R                  " U	R                  5       SS [        R                  UR
                  S9n
Uc  [        R                  " U5      n[        R                  " X/SS9nU R                  UUU	UUSS	9nUS   SS2SU	R                  S5      2SS24   nU R                  U5      nU(       a  XU4$ U$ )
z
Encodes images into continuous embeddings that can be forwarded to the language model.

Args:
    pixel_values (`torch.FloatTensor` of shape `(batch_size, num_channels, image_size, image_size)`):
        The tensors corresponding to the input images.
T)r~   r|   r#  r   Nrg   rq  r"   rl   )r  r   r  rz  r{  r#  )r2  rE   r  ri   rt  rr  r	  r   rn   r`  rv   r5  r7  )r6   r~   rZ  r[  r|   r#  r+   rd  re  r	  rf  rg  rh  ri  s                 r7   get_image_features7InstructBlipForConditionalGeneration.get_image_features  sT   " **%%= + 

 &a(  %zz,*;*;*=cr*B%**]i]p]pq ((//0B0B10Er2N$zz,*;*;*=cr*B%**]i]p]pq!)%*__5F%G"!&,@+Y_`!a'1%".#7 % 
 %Q'+A\->->q-A+A1(DE !% 8 8 F(-GG$$r>   r  r   r\  r]  r   r"  labelsr  r   c                 $   Ub  UOU R                   R                  nU R                  UUUUSS9u  nnnU(       d  UR                  5       OUnU(       d  UR                  5       OUn[        R
                  " UR                  5       SS [        R                  UR                  S9nU R                  R                  5       " U5      nUc  [        R                  " U5      n[        U R                   SS5      bJ  X@R                   R                  :H  R                  S5      R                  U5      nUR!                  5       UU'   Ou["        R%                  S5        [        R&                  " UUR)                  UR                  5      /SS	9n[        R&                  " UUR)                  UR                  5      /SS	9nU R                   R*                  (       aj  U R                  " SUUUU	UUS
.UD6nU(       a  UR,                  OUS   nSnU
b3  U R.                  " SUXR                   R0                  R2                  S.UD6nOLU R                  " SUUUUUU	UU
US.	UD6nU(       a  UR4                  OUS   nU(       a  UR,                  OUS   n[7        UUUUUS9$ )a{  
qformer_input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
    Indices of input sequence tokens in the vocabulary of the Q-Former. Input tokens can optionally be provided
    to serve as text prompt, which the Q-Former model will encode.

    Indices can be obtained using [`InstructBlipProcessor`]. See [`InstructBlipProcessor.__call__`] for
    details.

    [What are input IDs?](../glossary#input-ids)
qformer_attention_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*):
    Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`:

    - 1 for tokens that are **not masked**,
    - 0 for tokens that are **masked**.

    [What are attention masks?](../glossary#attention-mask)
input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
    Indices of input sequence tokens in the vocabulary of the language model. Input tokens can optionally be
    provided to serve as text prompt, which the language model can continue.

    Indices can be obtained using [`InstructBlipProcessor`]. See [`InstructBlipProcessor.__call__`] for
    details.

    [What are input IDs?](../glossary#input-ids)
decoder_attention_mask (`torch.BoolTensor` of shape `(batch_size, target_sequence_length)`, *optional*):
    Default behavior: generate a tensor that ignores pad tokens in `decoder_input_ids`. Causal mask will also
    be used by default.

    Only relevant in case an encoder-decoder language model (like T5) is used.
labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*):
    Labels for computing the language modeling loss. Indices should be in `[-100, 0, ..., config.vocab_size -
    1]`. All labels set to `-100` are ignored (masked), the loss is only computed for labels in `[0, ...,
    config.vocab_size]`

Examples:

```python
>>> from transformers import InstructBlipProcessor, InstructBlipForConditionalGeneration
>>> import torch
>>> from PIL import Image
>>> import requests

>>> model = InstructBlipForConditionalGeneration.from_pretrained("Salesforce/instructblip-vicuna-7b")
>>> processor = InstructBlipProcessor.from_pretrained("Salesforce/instructblip-vicuna-7b")

>>> device = "cuda" if torch.cuda.is_available() else "cpu"
>>> model.to(device)  # doctest: +IGNORE_RESULT

>>> url = "https://raw.githubusercontent.com/salesforce/LAVIS/main/docs/_static/Confusing-Pictures.jpg"
>>> image = Image.open(requests.get(url, stream=True).raw).convert("RGB")
>>> prompt = "What is unusual about this image?"
>>> inputs = processor(images=image, text=prompt, return_tensors="pt").to(device)

>>> outputs = model.generate(
...     **inputs,
...     do_sample=False,
...     num_beams=5,
...     max_length=256,
...     min_length=1,
...     top_p=0.9,
...     repetition_penalty=1.5,
...     length_penalty=1.0,
...     temperature=1,
... )
>>> generated_text = processor.batch_decode(outputs, skip_special_tokens=True)[0].strip()
>>> print(generated_text)
The unusual aspect of this image is that a man is ironing clothes on the back of a yellow SUV, which is parked in the middle of a busy city street. This is an unconventional approach to ironing clothes, as it requires the man to balance himself and his ironing equipment on top of the vehicle while navigating through traffic. Additionally, the presence of taxis and other vehicles in the scene further emphasizes the unusual nature of this situation.
```NTrZ  r[  r|   r#  rg   rq  ra  A  Expanding inputs for image tokens in InstructBLIP should be done in processing. Please follow instruction here (https://gist.github.com/zucchini-nlp/e9f20b054fa322f84ac9311d9ab67042) to update your InstructBLIP model. Using processors without these attributes in the config is deprecated and will throw an error in v4.50.r"   rl   r_  r   )r*   r  r  )	r0  r   r\  r]  r   r"  r#  r  r  )r)   r*   r+   r,   r-   r?   )rL   r,  r  r3   rE   r  ri   rt  rr  r9  rD  r`  r2   ra  rb  rc  r   r   r   rv   r   rG  r*   loss_functionr6  r  r)   r'   )r6   r~   rZ  r[  r  r   r\  r]  r   r"  r  r#  r|   r  r   ri  r+   rg  language_model_attention_maskr0  rj  r   r*   r)   s                           r7   r   ,InstructBlipForConditionalGeneration.forward  s   n &1%<k$++B]B]?C?V?V/#9%= @W @
<~} ;F002>8C..0(-

!&&("-UZZH]HdHd)
% ++@@B9M!"__Y7N 4;; 0$7C"+{{/I/I"I!T!TUW!X!b!bcp!q0E0M0M0OM,-z
 "II'<m>N>NOdOkOk>l&mstuM"YY.0A0AB_BfBf0ghnoN ;;66)) +-"3%9'# G (3W^^
FD!)) !&[[=T=T=_=_ci
 )) +-"3'="3%9'# G $/7<<GAJD'2W^^
F>))#*
 	
r>   c                     [        U S5      (       a  U R                  5         UR                  S   nU R                  UUUUSS9u  pn[        R
                  " U	R                  5       SS [        R                  U	R                  S9nUc  U R                  R                  R                  /n[        U R                  SS5      b1  U R                  R                  /U R                  R                  -  U-   n[        R                  " U/[        R                  UR                  S9nUR!                  US	5      nUc  [        R"                  " U5      nU R%                  5       " U5      n[        U R                  SS5      bb  X@R                  R                  :H  R'                  S5      R)                  U5      nU	R+                  5       R-                  UR                  5      X'   O[.        R1                  S
5        [        R2                  " XR-                  U	R                  5      /S	S9n[        R2                  " XR-                  UR                  5      /S	S9nU R4                  R                  R6                  (       dM  UR9                  SS5      U	R                  S	   -   S	-
  US'   UR9                  SS5      U	R                  S	   -   US'   XS.nU R4                  R                  R6                  (       d  UUS'   U R4                  R:                  " S0 UDUD6nU$ )aO  
Overrides `generate` function to be able to use the model as a conditional generator.

Args:
    pixel_values (`torch.FloatTensor` of shape (batch_size, num_channels, height, width)):
        Input images to be processed.
    qformer_input_ids (`torch.LongTensor` of shape (batch_size, sequence_length), *optional*):
        The sequence used as a prompt to be fed to the Q-Former module.
    qformer_attention_mask (`torch.LongTensor` of shape (batch_size, sequence_length), *optional*):
        Mask to avoid performing attention on padding token indices.
    input_ids (`torch.LongTensor` of shape (batch_size, sequence_length), *optional*):
        The sequence used as a prompt for the generation.
    attention_mask (`torch.LongTensor` of shape (batch_size, sequence_length), *optional*):
        Mask to avoid performing attention on padding token indices.
    interpolate_pos_encoding (`bool`, *optional*, defaults to `False`):
        Whether to interpolate the positional encoding of the image embeddings.

Returns:
    captions (list): A list of strings of length batch_size * num_captions.
rR  r   Tr  Nrg   rq  ra  r"   r  rl   
max_length   
min_length)r0  r   r  r?   )rO  rW  rn   r  rE   r  ri   rt  rr  rL   r6  bos_token_idr2   ra  r3  r   repeatr`  rD  rb  rc  r   r   r   r   rv   r9  is_encoder_decodergetgenerate)r6   r~   rZ  r[  r  r   r|   generate_kwargsr   ri  r+   rg  language_attention_maskstart_tokensr0  rj  inputsr   s                     r7   r  -InstructBlipForConditionalGeneration.generate  s   > 4))'')!''*
?C?V?V/#9%= @W @
<} #(**!&&("-UZZH]HdHd#
  KK33@@ALt{{$4d;G $ : :;dkk>Z>ZZ]iil^5::lNaNabI!((Q7I!"__Y7N113I> 4;; 0$7C"+{{/I/I"I!T!TUW!X!b!bcp!q0E0M0M0O0R0RS`SgSg0hM-z
 "II'<>N>NOdOkOk>l&mstuM"YY(*;*;<S<Z<Z*[\bcN &&--@@#''b9<Q<W<WXY<ZZ]^^  - 1@0C0CLRS0TWlWrWrstWu0u-#0S""))<<"+F;%%..KK?Kr>   rl  )NFF)NNNNNNNNNFNrF  )(r@   rA   rB   rC   r#   r  rG  r  r  r  r;  rS   rD  r  rv  r	   Modulerz  r}  r  rK  rW  rE   rF   r*  r   r   r  r   r   r   r,  r   r   r'   r   no_gradr  rH   r   r   s   @r7   r  r  e  s    &L$O ! %+,1 4:8B;ryy ;11R?0 >B38&+/%''/% !++/% !))9)9 :	/%
 #+4./% d^/%b 
 >B15598<=A,0/3-1&*).$(_
''_
 !,,_
 !))9)9 :	_

 E--._
 !!1!12_
 $E$4$45_
 !))9)9 :_
 $D>_
 'tn_
 ))*_
 d^_
 #'_
 D>_
 *+_
  
uEE	F!_
  _
B ]]_ 9==A0459).X''X $E$4$45X !))9)9 :	X
 E,,-X !!1!12X #'X 
		X Xr>   r  )r  r   r  r  r8  )r   )LrD   rv  dataclassesr   typingr   r   r   r   r   rE   torch.utils.checkpointr	   activationsr   
generationr   modeling_flash_attention_utilsr   modeling_outputsr   r   r   r   modeling_utilsr   r   processing_utilsr   pytorch_utilsr   r   r   utilsr   r   r   r   r   r   autor   r    r!   configuration_instructblipr#   r$   r%   
get_loggerr@   r   r'   r  rJ   r   floatr   r   r   r   r   r  r8  r   r   r  r  r  r  r  r   r  r,  r  r  __all__r?   r>   r7   <module>r     so   "  ! 8 8    ! ) B  G & l l b b I I o o 
		H	% 
k 
 
BG299 Gd %II%<<% 
% <<	%
 U\\*% % %0QBII Qjbii  -ryy -` $-/ $- $-PS
")) S
n;9 ;|BBII BLBII .299 .dbii  		 dryy dP\
 \
~0BII 0fF
: F
R ?,j > 
l
3 l

l
^ |+F ||~r>   