
     `ih                     p   d dl Z d dlmZmZ d dlZd dlmZ ddlmZ ddlm	Z	m
Z
mZ ddlmZ ddlmZ dd	lmZ dd
lmZmZ ddlmZmZmZmZ ddlmZmZ ddlmZmZ ddl m!Z! ddl"m#Z# ddl$m%Z%m&Z&m'Z'm(Z( ddl)m*Z* ddl+m,Z, ddl-m.Z.  e(j/        e0          Z1 G d dej2                  Z3d Z4d9dZ5dej6        de7dej6        fdZ8d Z9 G d dej2                  Z: G d  d!e:          Z; G d" d#e:          Z< ed$           G d% d&ej2                              Z=e:e;e<d'Z> G d( d)e          Z?e& G d* d+e!                      Z@ G d, d-ej2                  ZAe& G d. d/e@                      ZBe& G d0 d1e@e                      ZC G d2 d3ee@          ZD G d4 d5ee@          ZE G d6 d7ee@          ZFg d8ZGdS ):    N)OptionalUnion)nn   )ACT2FN)CacheDynamicCacheStaticCache)GenerationMixin)use_kernel_forward_from_hub)create_causal_mask)_flash_attention_forward!flash_attn_supports_top_left_mask)GenericForQuestionAnswering GenericForSequenceClassificationGenericForTokenClassificationGradientCheckpointingLayer)BaseModelOutputWithPastCausalLMOutputWithPast)ROPE_INIT_FUNCTIONSdynamic_rope_update)PreTrainedModel)Unpack)TransformersKwargsauto_docstringcan_return_tuplelogging)deprecate_kwarg)check_model_inputs   )DiffLlamaConfigc                   $     e Zd Z fdZd Z xZS )DiffLlamaMLPc                    t                                                       || _        |j        | _        |j        | _        t          j        | j        | j        d          | _        t          j        | j        | j        d          | _        t          j        | j        | j        d          | _	        t          |j                 | _        d S NFbias)super__init__confighidden_sizeintermediate_sizer   Linear	gate_projup_proj	down_projr   
hidden_actact_fnselfr*   	__class__s     /home/jaya/work/projects/VOICE-AGENT/VIET/agent-env/lib/python3.11/site-packages/transformers/models/diffllama/modeling_diffllama.pyr)   zDiffLlamaMLP.__init__8   s    !-!'!94#3T5KRWXXXy!143IPUVVV4#94;KRWXXXV./    c                     |                      |                     |                     |                    |                     |          z            }|S N)r0   r2   r.   r/   )r4   xr0   s      r6   forwardzDiffLlamaMLP.forwardB   sA    NN4;;t~~a/@/@#A#ADLLQROO#STT	r7   )__name__
__module____qualname__r)   r;   __classcell__r5   s   @r6   r#   r#   7   sG        0 0 0 0 0      r7   r#   c                     | dd| j         d         dz  f         }| d| j         d         dz  df         }t          j        | |fd          S )z*Rotates half the hidden dims of the input..N   dim)shapetorchcat)r:   x1x2s      r6   rotate_halfrK   G   s]    	
3"!'"+"""	#B	
3q """	#B9rc2YB''''r7   c                     |                     |          }|                     |          }| |z  t          |           |z  z   }||z  t          |          |z  z   }||fS )a  Applies Rotary Position Embedding to the query and key tensors.

    Args:
        q (`torch.Tensor`): The query tensor.
        k (`torch.Tensor`): The key tensor.
        cos (`torch.Tensor`): The cosine part of the rotary embedding.
        sin (`torch.Tensor`): The sine part of the rotary embedding.
        position_ids (`torch.Tensor`, *optional*):
            Deprecated and unused.
        unsqueeze_dim (`int`, *optional*, defaults to 1):
            The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and
            sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note
            that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and
            k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes
            cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have
            the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2.
    Returns:
        `tuple(torch.Tensor)` comprising of the query and key tensors rotated using the Rotary Position Embedding.
    )	unsqueezerK   )qkcossinposition_idsunsqueeze_dimq_embedk_embeds           r6   apply_rotary_pos_embrV   N   sc    ( --
&
&C
--
&
&C3w;q>>C/0G3w;q>>C/0GGr7   hidden_statesn_repreturnc                     | j         \  }}}}|dk    r| S | dddddddddf                             |||||          } |                     |||z  ||          S )z
    This is the equivalent of torch.repeat_interleave(x, dim=1, repeats=n_rep). The hidden states go from (batch,
    num_key_value_heads, seqlen, head_dim) to (batch, num_attention_heads, seqlen, head_dim)
    r    N)rF   expandreshape)rW   rX   batchnum_key_value_headsslenhead_dims         r6   	repeat_kvra   i   s    
 2?1D.Ehzz!!!!QQQaaa"23::5BUW\^bdlmmM  (;e(CT8TTTr7   c                 <    ddt          j        d| z            z  z
  S )Ng?g333333?g333333ӿ)mathexp)	layer_idxs    r6   lambda_init_fnrf   u   s!    txy 011111r7   c                   f    e Zd ZdZddedee         f fdZ eddd	          	 	 	 	 	 dde	j
        dee	j
        e	j
        f         dee	j
                 dee	j                 dee         dedee	j                 dee	j
        ee	j
                 eee	j
                          f         fd            Z xZS )DiffLlamaAttentionz=Multi-headed attention from 'Attention Is All You Need' paperNr*   re   c                    t                                                       || _        || _        |(t                              d| j        j         d           |j        | _        |j	        | _	        |j
        | _        t          |d| j	        | j        z            | _        |j        | _        | j        | j        z  | _        |j        | _        |j        | _        d| _        t'          j        | j	        | j        | j        z  |j                  | _        t'          j        | j	        | j        | j        z  |j                  | _        t'          j        | j	        | j        | j        z  |j                  | _        t'          j        | j        | j        z  | j	        |j                  | _        t5          |          | _        t'          j        t;          j        d|j        | j        f                    | _         t'          j        t;          j        d|j        | j        f                    | _!        t'          j        t;          j        d|j        | j        f                    | _"        t'          j        t;          j        d|j        | j        f                    | _#        t'          j$        d| j        z  |j%        d	
          | _&        d S )NzInstantiating z without passing a `layer_idx` is not recommended and will lead to errors during the forward call if caching is used. Please make sure to provide a `layer_idx` when creating this class.r`   Tr&   r   )sizerC   F)epselementwise_affine)'r(   r)   r*   re   loggerwarning_oncer5   r<   attention_dropoutr+   num_attention_heads	num_headsgetattrr`   r^   num_key_value_groupsmax_position_embeddings
rope_theta	is_causalr   r-   attention_biasq_projk_projv_projo_projrf   lambda_init	ParameterrG   normallambda_std_dev	lambda_q1	lambda_k1	lambda_q2	lambda_k2RMSNormrms_norm_eps	groupnormr4   r*   re   r5   s      r6   r)   zDiffLlamaAttention.__init__|   sa   ",!8 , , ,   "(!9!-3
D4D4VWW#)#= $(Nd6N$N!'-'E$ +i 0$.4=2PW]Wlmmmi 0$2JT]2Zagavwwwi 0$2JT]2Zagavwwwi >@PW]Wlmmm))44el1f6KSWS`Rb&c&c&cddel1f6KSWS`Rb&c&c&cddel1f6KSWS`Rb&c&c&cddel1f6KSWS`Rb&c&c&cddA$56;Nchiiir7   past_key_valuepast_key_values4.58new_nameversionFrW   position_embeddingsattention_maskrR   	use_cachecache_positionrY   c                    |                                 \  }	}
}|
}|                     |          }|                     |          }|                     |          }|                    |	|| j        | j                                      dd          }|                    |	|| j        | j                                      dd          }|                    |	|| j        | j                                      dd          }|\  }}t          ||||          \  }}|&|||d}|
                    ||| j        |          \  }}t          || j                  }t          || j                  }t          j        t          j        |dd          d          }|                    dddd          }t          j        ||                    dd                    t'          j        | j                  z  }|$|d d d d d d d |j        d         f         }||z   }t,          j                            |dt          j                                      |j                  }t,          j                            || j        | j        	          }t          j        t          j         | j!        | j"        z  dt          j                                                |j                  }t          j        t          j         | j#        | j$        z  dt          j                                                |j                  }||z
  | j%        z   }t          j        ||          }t          j        |dd          \  }}|||z  z
  }d| j%        z
  | &                    |          z  }|                    dd          '                                }|(                    |	|d          }| )                    |          }||fS )
Nr    rC   rQ   rP   r   rD   rB   r   rE   dtype)ptraining)*rj   rx   ry   rz   viewrq   r`   	transposer^   rV   updatere   ra   rs   rG   rH   chunkrepeatmatmulrc   sqrtrF   r   
functionalsoftmaxfloat32tor   dropoutro   r   rd   sumr   r   r   r   r|   r   
contiguousr\   r{   )r4   rW   r   r   rR   r   r   r   kwargsbsz
target_len_q_lenquery_states
key_statesvalue_statesrP   rQ   cache_kwargsattn_weightscausal_masklambda_1lambda_2lambda_fullattn_outputattn_output1attn_output2s                              r6   r;   zDiffLlamaAttention.forward   s    +//11Z{{=11[[//
{{=11#((eT^T]SS]]^_abcc__S%1I4=YYccdeghii
#((eT5Mt}]]gghiklmm&S#7jRUWZ#[#[ j&#&snUUL'6'='=j,X\Xfht'u'u$Jz4+DEE
 t/HIIy\1!!D!D!D"MMM#**1aA66|L*2F2Fq!2L2LMMPTPYZ^ZgPhPhh%(AAAqqq2HJ4DR4H2H)HIK'+5L },,\r,WWZZ[g[mnn},,\T=S^b^k,ll9UYt~'FBV[Vcdddeehh
 
 9UYt~'FBV[Vcdddeehh
 
 )D,<<l<>>%*[aQ%G%G%G"l"[<%??4++t~~k/J/JJ!++Aq11<<>>!))#ub99kk+..L((r7   r9   NNNFN)r<   r=   r>   __doc__r!   r   intr)   r   rG   Tensortuple
LongTensorr   boolr;   r?   r@   s   @r6   rh   rh   y   sI       GG j  j  j8C=  j  j  j  j  j  jD _%0A6RRR
 2637+/59<) <)|<) #5<#=><) !.	<)
 u/0<) "%<) <) !!12<) 
u|Xel3XeEL>Q5RR	S<) <) <) SR<) <) <) <) <)r7   rh   c                       e Zd ZdZ fdZ eddd          	 	 	 	 	 dd	ej        d
eej        ej        f         de	ej
                 de	ej
                 de	e         dede	ej
                 deej        df         fd            Z xZS )DiffLlamaFlashAttention2aN  
    DiffLlama flash attention module. This module inherits from `DiffLlamaAttention` as the weights of the module stays
    untouched. The only required change would be on the forward pass where it needs to correctly call the public API of
    flash attention and deal with padding tokens in case the input contains any of them.
    c                 `     t                      j        |i | t                      | _        d S r9   )r(   r)   r   _flash_attn_uses_top_left_mask)r4   argsr   r5   s      r6   r)   z!DiffLlamaFlashAttention2.__init__   s6    $)&)))
 /P.Q.Q+++r7   r   r   r   r   NFrW   r   r   rR   r   r   rY   c                 
   t          |t                    rt          d          |                                \  }}	}
|                     |          }|                     |          }|                     |          }|                    ||	| j        | j	                  
                    dd          }|                    ||	| j        | j	                  
                    dd          }|                    ||	| j        | j	                  
                    dd          }|4t                              d           |                     ||          \  }}n|\  }}t          ||||          \  }}|&|||d}|                    ||| j        |          \  }}|
                    dd          }|
                    dd          }|
                    dd          }| j        r| j        nd}|j        }|j        j        dk    r|j        j        nd}|t.          j        k    rt/          j                    r=t5          t.          d	          rt/          j        |          nt/          j                    }n3t5          | j        d
          r| j        j        }n| j        j        j        }t                              d| d           |                     |          }|                     |          }|                     |          }t/          j!        |dd          \  }}|"                    dddd          }|"                    dddd          }tG          |||||	||tI          | dd           | j%        | j&        
  
        }tG          |||||	||tI          | dd           | j%        | j&        
  
        }t/          j'        ||gd          }t/          j!        |dd          \  }}t/          j(        t/          j)        | j*        | j+        z  dt.          j                                                 |j                  }t/          j(        t/          j)        | j,        | j-        z  dt.          j                                                 |j                  }||z
  | j.        z   }|||z  z
  }d| j.        z
  | /                    |          z  }|0                    ||	d          1                                }| 2                    |          }|d fS )Nz`static` cache implementation is not compatible with `attn_implementation==flash_attention_2` make sure to use `sdpa` in the mean time, and open an issue at https://github.com/huggingface/transformersr    rC   aY  The attention layers in this model are transitioning from computing the RoPE embeddings internally through `position_ids` (2D tensor with the indexes of the tokens), to using externally computed `position_embeddings` (Tuple of tensors, containing cos and sin). In v4.46 `position_ids` will be removed and `position_embeddings` will be mandatory.r           mpscpuget_autocast_dtype_pre_quantization_dtypezThe input hidden states seems to be silently casted in float32, this might be related to the fact you have upcasted embedding or layer norm layers in float32. We will cast back the input in .rD   sliding_window)rR   r   r   use_top_left_maskrv   rB   r   )3
isinstancer
   
ValueErrorrj   rx   ry   rz   r   rq   r`   r   r^   rm   rn   
rotary_embrV   r   re   r   ro   r   devicetyperG   r   is_autocast_enabledhasattrr   get_autocast_gpu_dtyper*   r   weightr   r   r   r   rr   r   rv   rH   rd   r   r   r   r   r   r|   r   r\   r   r{   )r4   rW   r   r   rR   r   r   r   r   r   r   r   r   r   rP   rQ   r   dropout_rateinput_dtypedevice_typetarget_dtypevalue_states1value_states2r   r   r   r   r   r   s                                r6   r;   z DiffLlamaFlashAttention2.forward   s"    o{33 	}  
 &**,,UA{{=11[[//
{{=11
 $((eT^T]SS]]^_abcc__S%1I4=YYccdeghii
#((eT5Mt}]]gghiklmm&G   |\BBHC*HC#7jRUWZ#[#[ j&#&snUUL'6'='=j,X\Xfht'u'u$J $--a33))!Q//
#--a3315Gt--C #(2>2E2Je2S2Sl)..Y^%-''(** 
8 u&:;;8E,[999577  &?@@ 8#{B#{17$ $ $ $   (??<88L#|44J'??<88L',{<'J'J'J$}%,,Q1a88%,,Q1a88/% "4)94@@"An
 
 
 0% "4)94@@"An
 
 
 i| <"EEE%*[aQ%G%G%G"l9UYt~'FBV[Vcdddeehh
 
 9UYt~'FBV[Vcdddeehh
 
 )D,<<"[<%??4++t~~k/J/JJ!))#ub99DDFFkk+..D  r7   r   )r<   r=   r>   r   r)   r   rG   r   r   r   r   r   r   r;   r?   r@   s   @r6   r   r      s)        R R R R R _%0A6RRR
 6:37+/59B! B!|B! #5<#=>B! !!12	B!
 u/0B! "%B! B! !!12B! 
u|T!	"B! B! B! SRB! B! B! B! B!r7   r   c                   >   e Zd ZdZ eddd          	 	 	 	 	 ddej        d	eej        ej        f         d
eej                 deej	                 dee
         dedeej	                 deej        eej                 eeej                          f         fd            ZdS )DiffLlamaSdpaAttentiona   
    DiffLlama attention module using torch.nn.functional.scaled_dot_product_attention. This module inherits from
    `DiffLlamaAttention` as the weights of the module stays untouched. The only changes are on the forward pass to adapt to
    SDPA API.
    r   r   r   r   NFrW   r   r   rR   r   r   rY   c                 f   |                                 \  }	}
}|                     |          }|                     |          }|                     |          }|                    |	|
| j        | j                                      dd          }|                    |	|
| j        | j                                      dd          }|                    |	|
| j        | j                                      dd          }|\  }}t          ||||          \  }}|&|||d}|
                    ||| j        |          \  }}t          || j                  }t          || j                  }t          j        t          j        |dd          d          }|                    dddd          }|}||d d d d d d d |j        d         f         }|j        j        dk    r>|<|                                }|                                }|                                }|d u o|
dk    }t          j        j                            ||||| j        r| j        nd|	          }t          j        |dd          \  }}t          j        t          j        | j        | j        z  dt          j        
                                         |j!                  }t          j        t          j        | j"        | j#        z  dt          j        
                                         |j!                  }||z
  | j$        z   }|||z  z
  }d| j$        z
  | %                    |          z  }|                    dd                                          }|                    |	|
d          }| &                    |          }|d fS )Nr    rC   r   rD   rB   r   cudar   )	attn_mask	dropout_prv   r   )'rj   rx   ry   rz   r   rq   r`   r   r^   rV   r   re   ra   rs   rG   rH   r   r   rF   r   r   r   r   r   scaled_dot_product_attentionr   ro   rd   r   r   r   r   r   r   r   r   r|   r   r{   )r4   rW   r   r   rR   r   r   r   r   r   r   r   r   r   r   rP   rQ   r   r   rv   r   r   r   r   r   r   s                             r6   r;   zDiffLlamaSdpaAttention.forward{  s    &**,,UA{{=11[[//
{{=11#((eT^T]SS]]^_abcc__S%1I4=YYccdeghii
#((eT5Mt}]]gghiklmm&S#7jRUWZ#[#[ j&#&snUUL'6'='=j,X\Xfht'u'u$Jz4+DEE
 t/HIIy\1!!D!D!D"MMM#**1aA66$%%aaaAAA/E1A"1E/E&EFK #v--+2I'2244L#..00J'2244L  4'5EAI	h)FF!04Fd,,3 G 
 
 &+[aQ%G%G%G"l9UYt~'FBV[Vcdddeehh
 
 9UYt~'FBV[Vcdddeehh
 
 )D,<<"[<%??4++t~~k/J/JJ!++Aq11<<>>!&&sE266kk+..D  r7   r   )r<   r=   r>   r   r   rG   r   r   r   r   r   r   r;    r7   r6   r   r   s  s         _%0A6RRR
 2637+/59I! I!|I! #5<#=>I! !.	I!
 u/0I! "%I! I! !!12I! 
u|Xel3XeEL>Q5RR	SI! I! I! SRI! I! I!r7   r   r   c                   ,     e Zd Zd fd	Zd Zd Z xZS )DiffLlamaRMSNormư>c                     t                                                       t          j        t	          j        |                    | _        || _        dS )z?
        DiffLlamaRMSNorm is equivalent to T5LayerNorm
        N)r(   r)   r   r}   rG   onesr   variance_epsilon)r4   r+   rk   r5   s      r6   r)   zDiffLlamaRMSNorm.__init__  sD     	l5:k#:#:;; #r7   c                    |j         }|                    t          j                  }|                    d                              dd          }|t          j        || j        z             z  }| j        |                    |          z  S )NrC   rB   T)keepdim)	r   r   rG   r   powmeanrsqrtr   r   )r4   rW   r   variances       r6   r;   zDiffLlamaRMSNorm.forward  s|    #)%((77 $$Q'',,R,>>%Ht?T4T(U(UU{]--k::::r7   c                 H    t          | j        j                   d| j         S )Nz, eps=)r   r   rF   r   )r4   s    r6   
extra_reprzDiffLlamaRMSNorm.extra_repr  s&    )**II$2GIIIr7   )r   )r<   r=   r>   r)   r;   r   r?   r@   s   @r6   r   r     sb        $ $ $ $ $ $; ; ;J J J J J J Jr7   r   )eagerflash_attention_2sdpac                   4    e Zd Zdedef fdZ eddd          	 	 	 	 	 	 dd
ej        de	ej                 de	ej
                 de	e         de	e         de	ej
                 de	eej        ej        f                  dee         dej        fd            Z xZS )DiffLlamaDecoderLayerr*   re   c                 J   t                                                       |j        | _        t          |j                 ||          | _        t          |          | _        t          |j        |j	                  | _
        t          |j        |j	                  | _        d S )N)r*   re   rk   )r(   r)   r+   DIFFLLAMA_ATTENTION_CLASSES_attn_implementation	self_attnr#   mlpr   r   input_layernormpost_attention_layernormr   s      r6   r)   zDiffLlamaDecoderLayer.__init__  s    !-4V5PQY_ktuuu''/0BH[\\\(89KQWQd(e(e(e%%%r7   r   r   r   r   NFrW   r   rR   r   r   r   r   rY   c                     |}	|                      |          } | j        d|||||||d|\  }}
|	|z   }|}	|                     |          }|                     |          }|	|z   }|S )N)rW   r   rR   r   r   r   r   r   )r   r   r   r   )r4   rW   r   rR   r   r   r   r   r   residualr   s              r6   r;   zDiffLlamaDecoderLayer.forward  s     !,,];;)4> 	
')%+) 3	
 	
 	
 	
q !=0 !55mDD// =0r7   )NNNFNN)r<   r=   r>   r!   r   r)   r   rG   r   r   r   r   r   r   r   r   r;   r?   r@   s   @r6   r   r     s5       f f3 f f f f f f _%0A6RRR 2637+/$)59KO | !. u/0	
 "% D> !!12 &eEL%,,F&GH +, 
   SR    r7   r   c                   \     e Zd ZU eed<   dZdZdgZdgZdZ	dZ
dZdZdZeedZ fdZ xZS )	DiffLlamaPreTrainedModelr*   modelTr   r   F)rW   
attentionsc                    t                                          |           t          |t                    r|j        j                            d| j        j                   |j	        j                            d| j        j                   |j
        j                            d| j        j                   |j        j                            d| j        j                   d S d S )Nr   )r(   _init_weightsr   rh   r   datanormal_r*   r   r   r   r   )r4   moduler5   s     r6   r  z&DiffLlamaPreTrainedModel._init_weights$  s    f%%%f011 	I!))!T[-GHHH!))!T[-GHHH!))!T[-GHHH!))!T[-GHHHHH		I 	Ir7   )r<   r=   r>   r!   __annotations__base_model_prefixsupports_gradient_checkpointing_no_split_modules_skip_keys_device_placement_supports_flash_attn_supports_sdpa_supports_flex_attn_can_compile_fullgraph_supports_attention_backendr   rh   _can_record_outputsr  r?   r@   s   @r6   r  r    s         &*#01#4"5N!"'.( 
I I I I I I I I Ir7   r  c                   |     e Zd ZU ej        ed<   ddef fdZ ej                    e	d                         Z
 xZS )DiffLlamaRotaryEmbeddinginv_freqNr*   c                    t                                                       t          |d          rSt          |j        t
                    r9|j                            d|j                            d                    | _        nd| _        |j        | _	        |j        | _
        || _        t          | j                 | _        |                     | j        |          \  }| _        |                     d|d           | j        | _        d S )Nrope_scaling	rope_typer   defaultr  F)
persistent)r(   r)   r   r   r  dictgetr  rt   max_seq_len_cachedoriginal_max_seq_lenr*   r   rope_init_fnattention_scalingregister_bufferr  original_inv_freq)r4   r*   r   r  r5   s       r6   r)   z!DiffLlamaRotaryEmbedding.__init__0  s    6>** 	'z&:Mt/T/T 	'#044[&BUBYBYZ`BaBabbDNN&DN"("@$*$B!/?+/+<+<T[&+Q+Q($(ZeDDD!%r7   c                 X   | j         d d d d f                                                             |j        d         dd                              |j                  }|d d d d d f                                         }t          |j        j        t                    r|j        j        dk    r|j        j        nd}t          j
        |d          5  |                                |                                z                      dd          }t          j        ||fd	          }|                                | j        z  }|                                | j        z  }	d d d            n# 1 swxY w Y   |                    |j        
          |	                    |j        
          fS )Nr   rB   r    r   r   F)r   enabledrC   rD   )r   )r  floatr[   rF   r   r   r   r   strrG   autocastr   rH   rP   r"  rQ   r   )
r4   r:   rR   inv_freq_expandedposition_ids_expandedr   freqsembrP   rQ   s
             r6   r;   z DiffLlamaRotaryEmbedding.forwardA  s    !M$4-8>>@@GGHZ[\H]_acdeehhijiqrr ,QQQaaaZ 8 > > @ @'1!(-'E'Ek!(-[`J`J`ahmmfk^UCCC 	5 	5&,,..1F1L1L1N1NNYYZ[]^__E)UEN333C''))d44C''))d44C		5 	5 	5 	5 	5 	5 	5 	5 	5 	5 	5 	5 	5 	5 	5 vvAGv$$cff17f&;&;;;s   BE++E/2E/r9   )r<   r=   r>   rG   r   r
  r!   r)   no_gradr   r;   r?   r@   s   @r6   r  r  -  s         l/ / / / / / / /" U]__< <  _< < < < <r7   r  c                       e Zd Zdef fdZee	 	 	 	 	 	 	 ddeej	                 deej
                 deej	                 dee         deej                 d	eej	                 d
ee         dee         defd                        Z xZS )DiffLlamaModelr*   c                    t                                                     j        | _        j        | _        t          j        j        j        | j                  | _        t          j	        fdt          j                  D                       | _        t          j        j                  | _        t!                    | _        d| _        |                                  d S )Nc                 0    g | ]}t          |          S r   )r   ).0re   r*   s     r6   
<listcomp>z+DiffLlamaModel.__init__.<locals>.<listcomp>Z  s$    ggg)"6955gggr7   r   r*   F)r(   r)   pad_token_idpadding_idx
vocab_sizer   	Embeddingr+   embed_tokens
ModuleListrangenum_hidden_layerslayersr   r   normr  r   gradient_checkpointing	post_initr3   s    `r6   r)   zDiffLlamaModel.__init__S  s       !. +L):F<NPTP`aamgggguVMeGfGfggg
 
 %V%7V=PQQQ	2&AAA&+# 	r7   N	input_idsr   rR   r   inputs_embedsr   r   r   rY   c           
      N   |d u |d uz  rt          d          ||                     |          }|r|t          | j                  }|B||                                nd}	t          j        |	|	|j        d         z   |j                  }||	                    d          }t          | j        |||||          }
|}|                     ||          }| j        d | j        j                 D ]} ||f|
||||d|}|                     |          }t          ||          S )	Nz:You must specify exactly one of input_ids or inputs_embedsr5  r   r    )r   )r*   input_embedsr   r   r   rR   )r   rR   r   r   r   )last_hidden_stater   )r   r:  r	   r*   get_seq_lengthrG   arangerF   r   rM   r   r   r>  r=  r?  r   )r4   rB  r   rR   r   rC  r   r   r   past_seen_tokensr   rW   r   decoder_layers                 r6   r;   zDiffLlamaModel.forwardc  s    -t";< 	[YZZZ *.*;*;I*F*FM 	?0*$+>>>O!CRC^==???de+0< "2]5H5K"KTaTh, , ,N )33A66L(;&))+%
 
 
 &"oom\JJ![)H4;+H)HI 		 		M)M*) /-$7   MM 		-00&++
 
 
 	
r7   )NNNNNNN)r<   r=   r>   r!   r)   r   r   r   rG   r   r   r   FloatTensorr   r   r   r   r;   r?   r@   s   @r6   r0  r0  Q  s                151537+/5959$(8
 8
E,-8
 !.8
 u/0	8

 "%8
   128
 !!128
 D>8
 +,8
 
!8
 8
 8
 ^ 8
 8
 8
 8
 8
r7   r0  c                   f    e Zd ZdgZddiZddgdgfiZ fdZee	 	 	 	 	 	 	 	 	 dd	e	e
j                 d
e	e
j                 de	e
j                 de	e         de	e
j                 de	e
j                 de	e         de	e
j                 deee
j        f         dee         defd                        Z xZS )DiffLlamaForCausalLMzlm_head.weightlm_headcolwise_reprW   logitsc                     t                                          |           t          |          | _        |j        | _        t          j        |j        |j        d          | _        | 	                                 d S r%   )
r(   r)   r0  r  r8  r   r-   r+   rN  rA  r3   s     r6   r)   zDiffLlamaForCausalLM.__init__  sj       #F++
 +y!3V5FUSSS 	r7   Nr   rB  r   rR   r   rC  labelsr   r   logits_to_keepr   rY   c
                 R    | j         d|||||||d|
}|j        }t          |	t                    rt	          |	 d          n|	}|                     |dd|ddf                   }d}| | j        d||| j        j        d|
}t          |||j
        |j        |j                  S )a  
        Example:

        ```python
        >>> from transformers import AutoTokenizer, DiffLlamaForCausalLM

        >>> model = DiffLlamaForCausalLM.from_pretrained("google/diffllama-7b")
        >>> tokenizer = AutoTokenizer.from_pretrained("google/diffllama-7b")

        >>> prompt = "What is your favorite condiment?"
        >>> inputs = tokenizer(prompt, return_tensors="pt")

        >>> # Generate
        >>> generate_ids = model.generate(inputs.input_ids, max_length=30)
        >>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
        "What is your favorite condiment?"
        ```)rB  r   rR   r   rC  r   r   N)rP  rR  r8  )lossrP  r   rW   r  r   )r  rF  r   r   slicerN  loss_functionr*   r8  r   r   rW   r  )r4   rB  r   rR   r   rC  rR  r   r   rS  r   outputsrW   slice_indicesrP  rU  s                   r6   r;   zDiffLlamaForCausalLM.forward  s    @ ,64: 	,
)%+')	,
 	,
 	,
 	,
  18B>SV8W8Wk~ot444]kmAAA}aaa,?@AA%4%pVFt{OeppioppD%#3!/)
 
 
 	
r7   )	NNNNNNNNr   )r<   r=   r>   _tied_weights_keys_tp_plan_pp_planr)   r   r   r   rG   r   r   r   rK  r   r   r   r   r   r   r;   r?   r@   s   @r6   rM  rM    sa       *+=)H_-z:;H      151537+/59-1$(59348
 8
E,-8
 !.8
 u/0	8

 "%8
   128
 )*8
 D>8
 !!128
 c5</08
 +,8
 
 8
 8
 8
 ^ 8
 8
 8
 8
 8
r7   rM  c                       e Zd ZdS )"DiffLlamaForSequenceClassificationNr<   r=   r>   r   r7   r6   r^  r^            Dr7   r^  c                       e Zd ZdZdS )DiffLlamaForQuestionAnsweringtransformerN)r<   r=   r>   r  r   r7   r6   rb  rb    s        %r7   rb  c                       e Zd ZdS )DiffLlamaForTokenClassificationNr_  r   r7   r6   re  re    r`  r7   re  )r  r0  rM  r^  rb  re  )Nr    )Hrc   typingr   r   rG   r   activationsr   cache_utilsr   r	   r
   
generationr   integrationsr   masking_utilsr   modeling_flash_attention_utilsr   r   modeling_layersr   r   r   r   modeling_outputsr   r   modeling_rope_utilsr   r   modeling_utilsr   processing_utilsr   utilsr   r   r   r   utils.deprecationr   utils.genericr   configuration_diffllamar!   
get_loggerr<   rm   Moduler#   rK   rV   r   r   ra   rf   rh   r   r   r   r   r   r  r  r0  rM  r^  rb  re  __all__r   r7   r6   <module>ry     s  0  " " " " " " " "        ! ! ! ! ! ! ; ; ; ; ; ; ; ; ; ; ) ) ) ) ) ) 7 7 7 7 7 7 / / / / / / i i i i i i i i            P O O O O O O O K K K K K K K K - - - - - - & & & & & & R R R R R R R R R R R R 0 0 0 0 0 0 / / / / / / 4 4 4 4 4 4 
	H	%	%    29    ( ( (   6	UU\ 	U# 	U%, 	U 	U 	U 	U2 2 2b) b) b) b) b) b) b) b)JR! R! R! R! R!1 R! R! R!jR! R! R! R! R!/ R! R! R!j Y''J J J J Jry J J ('J*  1"  + + + + +6 + + +\ I I I I I I I I4!< !< !< !< !<ry !< !< !<H K
 K
 K
 K
 K
- K
 K
 K
\ H
 H
 H
 H
 H
3_ H
 H
 H
V	 	 	 	 	)IKc 	 	 	& & & & &$?AY & & &	 	 	 	 	&CE] 	 	 	  r7   