
    cCiP                        S SK r S SKJr  S SKrS SKJr  SSKJrJr  SSKJ	r	J
r
  SSKJr  SSKJr  SS	KJr  S
SKJr  S
SKJrJrJrJrJrJrJrJr  S
SKJr  SSKJr  \R@                  " \!5      r"Sr#Sr$ " S S\5      r%S r& " S S\RN                  5      r( " S S\(5      r) " S S\(5      r*\(\)\*S.r+ " S S\5      r, " S S\5      r- " S  S!\5      r. " S" S#\5      r/ " S$ S%\5      r0 " S& S'\5      r1 " S( S)\5      r2/ S*Qr3g)+    N)Optional)nn   )CacheStaticCache)_flash_attention_forward!flash_attn_supports_top_left_mask)PreTrainedModel)logging)deprecate_kwarg   )GemmaForCausalLM)LlamaDecoderLayerLlamaForQuestionAnsweringLlamaForSequenceClassificationLlamaForTokenClassification
LlamaModelLlamaPreTrainedModelapply_rotary_pos_emb	repeat_kv)
MistralMLP   )DiffLlamaConfigzkajuma/DiffLlama-0.3B-handcutr   c                       \ rS rSrSrg)DiffLlamaMLP2    N__name__
__module____qualname____firstlineno____static_attributes__r       i/home/james-whalen/.local/lib/python3.13/site-packages/transformers/models/diffllama/modular_diffllama.pyr   r   2       r$   r   c                 @    SS[         R                  " SU -  5      -  -
  $ )Ng?g333333?g333333ӿ)mathexp)	layer_idxs    r%   lambda_init_fnr+   6   s     txxy 01111r$   c                     ^  \ rS rSrSrSS\S\\   4U 4S jjjr\	" SSSS	9     SS
\
R                  S\\
R                  \
R                  4   S\\
R                     S\\
R                     S\\   S\S\\
R                     S\\
R                  \\
R                     \\\
R                        4   4S jj5       rSrU =r$ )DiffLlamaAttention:   z=Multi-headed attention from 'Attention Is All You Need' paperconfigr*   c                   > [         TU ]  5         Xl        X l        Uc-  [        R                  SU R                  R                   S35        UR                  U l        UR                  U l	        UR                  U l        [        USU R                  U R                  -  5      U l        UR                  U l        U R                  U R                  -  U l        UR                   U l        UR"                  U l        SU l        [&        R(                  " U R                  U R                  U R                  -  UR*                  S9U l        [&        R(                  " U R                  U R                  U R                  -  UR*                  S9U l        [&        R(                  " U R                  U R                  U R                  -  UR*                  S9U l        [&        R(                  " U R                  U R                  -  U R                  UR*                  S9U l        [5        U5      U l        [&        R8                  " [:        R<                  " SUR>                  U R                  4S95      U l         [&        R8                  " [:        R<                  " SUR>                  U R                  4S95      U l!        [&        R8                  " [:        R<                  " SUR>                  U R                  4S95      U l"        [&        R8                  " [:        R<                  " SUR>                  U R                  4S95      U l#        [&        RH                  " SU R                  -  URJ                  S	S
9U l&        g )NzInstantiating z without passing a `layer_idx` is not recommended and will lead to errors during the forward call if caching is used. Please make sure to provide a `layer_idx` when creating this class.head_dimT)biasr   )sizer   F)epselementwise_affine)'super__init__r/   r*   loggerwarning_once	__class__r   attention_dropouthidden_sizenum_attention_heads	num_headsgetattrr1   num_key_value_headsnum_key_value_groupsmax_position_embeddings
rope_theta	is_causalr   Linearattention_biasq_projk_projv_projo_projr+   lambda_init	Parametertorchnormallambda_std_dev	lambda_q1	lambda_k1	lambda_q2	lambda_k2RMSNormrms_norm_eps	groupnormselfr/   r*   r:   s      r%   r7   DiffLlamaAttention.__init__=   sz   " !8!8 9 :, , "(!9!9!--33
D4D4D4VW#)#=#= $(NNd6N6N$N!'-'E'E$ ++ii 0 0$..4==2PW]WlWlmii 0 0$2J2JT]]2Zagavavwii 0 0$2J2JT]]2Zagavavwii >@P@PW]WlWlm))4ell1f6K6KSWS`S`Rb&cdell1f6K6KSWS`S`Rb&cdell1f6K6KSWS`S`Rb&cdell1f6K6KSWS`S`Rb&cdA$56;N;Nchir$   past_key_valuepast_key_values4.58new_nameversionhidden_statesposition_embeddingsattention_maskposition_ids	use_cachecache_positionreturnc                    UR                  5       u  pnU
nU R                  U5      nU R                  U5      nU R                  U5      nUR	                  XU R
                  U R                  5      R                  SS5      nUR	                  XU R                  U R                  5      R                  SS5      nUR	                  XU R                  U R                  5      R                  SS5      nUu  nn[        XUU5      u  pUb%  UUUS.nUR                  XU R                  U5      u  p[        XR                  5      n[        XR                  5      n[        R                  " [        R                   " USSS9SS9nUR#                  SSSS5      n[        R$                  " XR                  SS5      5      [&        R(                  " U R                  5      -  nUb#  US S 2S S 2S S 2S UR*                  S   24   nUU-   n[,        R.                  R1                  US[        R2                  S9R5                  UR6                  5      n[,        R.                  R9                  UU R:                  U R<                  S	9n[        R>                  " [        R@                  " U RB                  U RD                  -  S[        R2                  S95      R5                  UR6                  5      n[        R>                  " [        R@                  " U RF                  U RH                  -  S[        R2                  S95      R5                  UR6                  5      nUU-
  U RJ                  -   n[        R$                  " UU5      n[        R                   " USSS9u  nnUUU-  -
  nSU RJ                  -
  U RM                  U5      -  nUR                  SS5      RO                  5       nURQ                  XS5      nU RS                  U5      nUU4$ )
Nr   r   sincosre   dimr   rl   dtype)ptraining)*r3   rG   rH   rI   viewr>   r1   	transposer@   r   updater*   r   rA   rM   catchunkrepeatmatmulr(   sqrtshaper   
functionalsoftmaxfloat32torp   dropoutr;   rr   r)   sumrP   rQ   rR   rS   rK   rV   
contiguousreshaperJ   )rX   r`   ra   rb   rc   r[   rd   re   kwargsbsz
target_len_q_lenquery_states
key_statesvalue_statesrj   ri   cache_kwargsattn_weightscausal_masklambda_1lambda_2lambda_fullattn_outputattn_output1attn_output2s                              r%   forwardDiffLlamaAttention.forward_   sd    +//1{{=1[[/
{{=1#((T^^T]]S]]^_abc__S1I1I4==Yccdeghi
#((T5M5Mt}}]gghiklm&S#7RUWZ#[ &#&snUL'6'='=jX\XfXfht'u$Jz+D+DE
 /H/HIyy\1!!D"M#**1aA6||L2F2Fq!2LMPTPYPYZ^ZgZgPhh%(Aq2HJ4D4DR4H2H)HIK'+5L }},,\r,WZZ[g[m[mn}},,\T=S=S^b^k^k,l99UYYt~~'FBV[VcVcdehh
 99UYYt~~'FBV[VcVcdehh
 )D,<,<<ll<>%*[[aQ%G"l"[<%??4+++t~~k/JJ!++Aq1<<>!))#b9kk+.L((r$   )r;   r/   rV   r1   r<   rD   rH   rK   rQ   rS   rP   rR   r*   rB   r>   rA   r@   rJ   rG   rC   rI   NNNNFN)r   r    r!   r"   __doc__r   r   intr7   r   rM   Tensortuple
LongTensorr   boolr   r#   __classcell__r:   s   @r%   r-   r-   :   s   G j  j8C=  j  jD %0A6R
 2637+/59<)||<) #5<<#=><) !.	<)
 u//0<) "%<) <) !!1!12<) 
u||Xell3XeELL>Q5RR	S<) S<)r$   r-   c                   2  ^  \ rS rSrSrU 4S jr\" SSSS9     SS	\R                  S
\	\R                  \R                  4   S\
\R                     S\
\R                     S\
\   S\S\
\R                     S\	\R                  S4   4S jj5       rSrU =r$ )DiffLlamaFlashAttention2   a>  
DiffLlama flash attention module. This module inherits from `DiffLlamaAttention` as the weights of the module stays
untouched. The only required change would be on the forward pass where it needs to correctly call the public API of
flash attention and deal with padding tokens in case the input contains any of them.
c                 D   > [         TU ]  " U0 UD6  [        5       U l        g r   )r6   r7   r	   _flash_attn_uses_top_left_mask)rX   argsr   r:   s      r%   r7   !DiffLlamaFlashAttention2.__init__   s#    $)&)
 /P.Q+r$   rZ   r[   r\   r]   Nr`   ra   rb   rc   rd   re   rf   c                 H
   [        U[        5      (       a  [        S5      eUR                  5       u  pn
U R	                  U5      nU R                  U5      nU R                  U5      nUR                  XU R                  U R                  5      R                  SS5      nUR                  XU R                  U R                  5      R                  SS5      nUR                  XU R                  U R                  5      R                  SS5      nUc)  [        R                  S5        U R                  X5      u  pOUu  p[        XX5      u  pUb$  XUS.nUR!                  XU R"                  U5      u  pUR                  SS5      nUR                  SS5      nUR                  SS5      nU R$                  (       a  U R&                  OSnUR(                  nUR*                  R,                  S:w  a  UR*                  R,                  OSnU[.        R0                  :X  a  [.        R2                  " 5       (       aA  [5        [.        S	5      (       a  [.        R6                  " U5      O[.        R8                  " 5       nOR[5        U R:                  S
5      (       a  U R:                  R<                  nO U R                  R>                  R(                  n[        R                  SU S35        URA                  U5      nURA                  U5      nURA                  U5      n[.        RB                  " USSS9u  nnURE                  SSSS5      nURE                  SSSS5      n[G        UUUUU	UU[I        U SS 5      U RJ                  U RL                  S9
n[G        UUUUU	UU[I        U SS 5      U RJ                  U RL                  S9
n[.        RN                  " UU/SS9n[.        RB                  " USSS9u  nn[.        RP                  " [.        RR                  " U RT                  U RV                  -  S[.        R0                  S95      RA                  UR(                  5      n[.        RP                  " [.        RR                  " U RX                  U RZ                  -  S[.        R0                  S95      RA                  UR(                  5      nUU-
  U R\                  -   nUUU-  -
  nSU R\                  -
  U R_                  U5      -  nURa                  XS5      Rc                  5       nU Re                  U5      nUS 4$ )Nz`static` cache implementation is not compatible with `attn_implementation==flash_attention_2` make sure to use `sdpa` in the mean time, and open an issue at https://github.com/huggingface/transformersr   r   aY  The attention layers in this model are transitioning from computing the RoPE embeddings internally through `position_ids` (2D tensor with the indexes of the tokens), to using externally computed `position_embeddings` (Tuple of tensors, containing cos and sin). In v4.46 `position_ids` will be removed and `position_embeddings` will be mandatory.rh           mpscpuget_autocast_dtype_pre_quantization_dtypezThe input hidden states seems to be silently casted in float32, this might be related to the fact you have upcasted embedding or layer norm layers in float32. We will cast back the input in .rk   sliding_window)rc   r   r   use_top_left_maskrD   rm   ro   )3
isinstancer   
ValueErrorr3   rG   rH   rI   rs   r>   r1   rt   r@   r8   r9   
rotary_embr   ru   r*   rr   r;   rp   devicetyperM   r~   is_autocast_enabledhasattrr   get_autocast_gpu_dtyper/   r   weightr   rw   rx   r   r?   r   rD   rv   r)   r   rP   rQ   rR   rS   rK   rV   r   r   rJ   )rX   r`   ra   rb   rc   r[   rd   re   r   r   r   r   r   r   rj   ri   r   dropout_rateinput_dtypedevice_typetarget_dtypevalue_states1value_states2r   r   r   r   r   r   s                                r%   r    DiffLlamaFlashAttention2.forward   s    o{33} 
 &**,A{{=1[[/
{{=1
 $((T^^T]]S]]^_abc__S1I1I4==Yccdeghi
#((T5M5Mt}}]gghiklm&G |BHC*HC#7RU#[ &#&nUL'6'='=jX\XfXfht'u$J $--a3))!Q/
#--a315t--C #((2>2E2E2J2Je2Sl))..Y^%--'((** u&:;; ,,[9557  &?@@#{{BB#{{1177 >$ (??<8L#|4J'??<8L',{{<'J$}%,,Q1a8%,,Q1a8/% "4)94@"AAnn
 0% "4)94@"AAnn
 ii| <"E%*[[aQ%G"l99UYYt~~'FBV[VcVcdehh
 99UYYt~~'FBV[VcVcdehh
 )D,<,<<"[<%??4+++t~~k/JJ!))#b9DDFkk+.D  r$   )r   r   )r   r    r!   r"   r   r7   r   rM   r   r   r   r   r   r   r   r#   r   r   s   @r%   r   r      s    R %0A6R
 6:37+/59B!||B! #5<<#=>B! !!1!12	B!
 u//0B! "%B! B! !!1!12B! 
u||T!	"B! SB!r$   r   c                   Z   \ rS rSrSr\" SSSS9     SS\R                  S	\\R                  \R                  4   S
\	\R                     S\	\R                     S\	\   S\S\	\R                     S\\R                  \	\R                     \	\\R                        4   4S jj5       rSrg)DiffLlamaSdpaAttentioni4  z
DiffLlama attention module using torch.nn.functional.scaled_dot_product_attention. This module inherits from
`DiffLlamaAttention` as the weights of the module stays untouched. The only changes are on the forward pass to adapt to
SDPA API.
rZ   r[   r\   r]   Nr`   ra   rb   rc   rd   re   rf   c           	         UR                  5       u  pnU R                  U5      nU R                  U5      nU R                  U5      nUR	                  XU R
                  U R                  5      R                  SS5      nUR	                  XU R                  U R                  5      R                  SS5      nUR	                  XU R                  U R                  5      R                  SS5      nUu  nn[        XUU5      u  pUb$  UXS.nUR                  XU R                  U5      u  p[        XR                  5      n[        XR                  5      n[        R                  " [        R                   " USSS9SS9nUR#                  SSSS5      nUnUb  US S 2S S 2S S 2S UR$                  S   24   nUR&                  R(                  S:X  a3  Ub0  UR+                  5       nUR+                  5       nUR+                  5       nUS L =(       a    U
S:  n[        R,                  R.                  R1                  UUUUU R2                  (       a  U R4                  OSUS	9n[        R                   " USSS9u  nn[        R6                  " [        R8                  " U R:                  U R<                  -  S[        R>                  S
95      RA                  URB                  5      n[        R6                  " [        R8                  " U RD                  U RF                  -  S[        R>                  S
95      RA                  URB                  5      nUU-
  U RH                  -   nUUU-  -
  nSU RH                  -
  U RK                  U5      -  nUR                  SS5      R+                  5       nUR	                  XS5      nU RM                  U5      nUS 4$ )Nr   r   rh   rk   rm   rn   cudar   )	attn_mask	dropout_prD   ro   )'r3   rG   rH   rI   rs   r>   r1   rt   r@   r   ru   r*   r   rA   rM   rv   rw   rx   r{   r   r   r   r   r|   scaled_dot_product_attentionrr   r;   r)   r   rP   rQ   r~   r   rp   rR   rS   rK   rV   rJ   )rX   r`   ra   rb   rc   r[   rd   re   r   r   r   r   r   r   r   rj   ri   r   r   rD   r   r   r   r   r   r   s                             r%   r   DiffLlamaSdpaAttention.forward<  sU    &**,A{{=1[[/
{{=1#((T^^T]]S]]^_abc__S1I1I4==Yccdeghi
#((T5M5Mt}}]gghiklm&S#7RUWZ#[ &#&sUL'6'='=jX\XfXfht'u$Jz+D+DE
 /H/HIyy\1!!D"M#**1aA6$%%aA/E1A1A"1E/E&EFK ##v-+2I'224L#..0J'224L  4'5EAI	hh))FF!04d,,3 G 
 &+[[aQ%G"l99UYYt~~'FBV[VcVcdehh
 99UYYt~~'FBV[VcVcdehh
 )D,<,<<"[<%??4+++t~~k/JJ!++Aq1<<>!&&s26kk+.D  r$   r   r   )r   r    r!   r"   r   r   rM   r   r   r   r   r   r   r   r#   r   r$   r%   r   r   4  s     %0A6R
 2637+/59I!||I! #5<<#=>I! !.	I!
 u//0I! "%I! I! !!1!12I! 
u||Xell3XeELL>Q5RR	SI! SI!r$   r   )eagerflash_attention_2sdpac                   4   ^  \ rS rSrS\S\4U 4S jjrSrU =r$ )DiffLlamaDecoderLayeri  r/   r*   c                 \   > [         TU ]  X5        [        UR                     " XS9U l        g )N)r/   r*   )r6   r7   DIFFLLAMA_ATTENTION_CLASSES_attn_implementation	self_attnrW   s      r%   r7   DiffLlamaDecoderLayer.__init__  s(    +4V5P5PQY_ur$   )r   )	r   r    r!   r"   r   r   r7   r#   r   r   s   @r%   r   r     s    v v3 v vr$   r   c                   "    \ rS rSrSrSrS rSrg)DiffLlamaPreTrainedModeli  Fc                 ,   [         R                  " X5        [        U[        5      (       a  UR                  R
                  R                  SU R                  R                  5        UR                  R
                  R                  SU R                  R                  5        UR                  R
                  R                  SU R                  R                  5        UR                  R
                  R                  SU R                  R                  5        g g )Nr   )r
   _init_weightsr   r-   rP   datanormal_r/   rO   rQ   rR   rS   )rX   modules     r%   r   &DiffLlamaPreTrainedModel._init_weights  s    %%d3f011!!))!T[[-G-GH!!))!T[[-G-GH!!))!T[[-G-GH!!))!T[[-G-GH	 2r$   r   N)r   r    r!   r"   _supports_flex_attn_supports_attention_backendr   r#   r   r$   r%   r   r     s    "'Ir$   r   c                       \ rS rSrSrg)DiffLlamaModeli  r   Nr   r   r$   r%   r   r     r&   r$   r   c                       \ rS rSrSrg)DiffLlamaForCausalLMi  r   Nr   r   r$   r%   r   r     r&   r$   r   c                       \ rS rSrSrg)"DiffLlamaForSequenceClassificationi  r   Nr   r   r$   r%   r   r     r&   r$   r   c                       \ rS rSrSrg)DiffLlamaForQuestionAnsweringi  r   Nr   r   r$   r%   r   r     r&   r$   r   c                       \ rS rSrSrg)DiffLlamaForTokenClassificationi  r   Nr   r   r$   r%   r   r     r&   r$   r   )r   r   r   r   r   r   )4r(   typingr   rM   r   cache_utilsr   r   modeling_flash_attention_utilsr   r	   modeling_utilsr
   utilsr   utils.deprecationr   gemma.modeling_gemmar   llama.modeling_llamar   r   r   r   r   r   r   r   mistral.modeling_mistralr   configuration_diffllamar   
get_loggerr   r8   _CHECKPOINT_FOR_DOC_CONFIG_FOR_DOCr   r+   Moduler-   r   r   r   r   r   r   r   r   r   r   __all__r   r$   r%   <module>r      s  $     - i -  0 3	 	 	 2 4 
		H	%5 #	: 	2b) b)JR!1 R!jR!/ R!l  1" v- v
I3 
I	Z 		+ 		)G 		$= 		&A 	r$   