
    bCi\                        S SK JrJrJr  S SKrS SKJr  SSKJr  SSK	J
r
Jr  SSKJr  SSKJrJr  SSKJr  SS	KJr  SS
KJrJr  SSKJrJr  SSKJrJr  SSKJr  SSK J!r!J"r"J#r#  SSK$J%r%  SSK&J'r'  SSK(J)r)   " S S\RT                  5      r+ " S S\RT                  5      r,S\RZ                  S\.S\RZ                  4S jr/ S3S\RT                  S\RZ                  S\RZ                  S\RZ                  S\\RZ                     S \0S!\0S"\\!   4S# jjr1S$ r2S4S% jr3 " S& S'\RT                  5      r4 " S( S)\RT                  5      r5 " S* S+\5      r6\" " S, S-\5      5       r7\" " S. S/\75      5       r8\" " S0 S1\7\5      5       r9/ S2Qr:g)5    )CallableOptionalUnionN   )ACT2FN)CacheDynamicCache)GenerationMixin)create_causal_mask!create_sliding_window_causal_mask)FlashAttentionKwargs)GradientCheckpointingLayer)BaseModelOutputWithPastCausalLMOutputWithPast)ROPE_INIT_FUNCTIONSdynamic_rope_update)ALL_ATTENTION_FUNCTIONSPreTrainedModel)Unpack)TransformersKwargsauto_docstringcan_return_tuple)deprecate_kwarg)check_model_inputs   )Cohere2Configc                      ^  \ rS rSr% \R
                  \S'   SS\4U 4S jjjr\R                  " 5       \
S 5       5       rSrU =r$ )Cohere2RotaryEmbedding+   inv_freqconfigc                   > [         TU ]  5         [        US5      (       aZ  [        UR                  [
        5      (       a;  UR                  R                  SUR                  R                  S5      5      U l        OSU l        UR                  U l	        UR                  U l
        Xl        [        U R                     U l        U R                  U R                  U5      u  o0l        U R                  SUSS9  U R                   U l        g )Nrope_scaling	rope_typetypedefaultr    F)
persistent)super__init__hasattr
isinstancer#   dictgetr$   max_position_embeddingsmax_seq_len_cachedoriginal_max_seq_lenr!   r   rope_init_fnattention_scalingregister_bufferr    original_inv_freq)selfr!   devicer    	__class__s       f/home/james-whalen/.local/lib/python3.13/site-packages/transformers/models/cohere2/modeling_cohere2.pyr)   Cohere2RotaryEmbedding.__init__.   s    6>**z&:M:Mt/T/T#0044[&BUBUBYBYZ`BabDN&DN"("@"@$*$B$B!/?+/+<+<T[[&+Q((ZeD!%    c                 0   U R                   S S S 2S 4   R                  5       R                  UR                  S   SS5      nUS S 2S S S 24   R                  5       n[	        UR
                  R                  [        5      (       a0  UR
                  R                  S:w  a  UR
                  R                  OSn[        R                  " USS9   UR                  5       UR                  5       -  R                  SS5      n[        R                  " USSS	9nUR                  5       U R                  -  nUR                  5       U R                  -  n	S S S 5        WR                  UR                   S
9W	R                  UR                   S
94$ ! , (       d  f       N@= f)Nr   r   mpscpuF)device_typeenabled   dimdtype)r    floatexpandshaper+   r6   r%   strtorchautocast	transposerepeat_interleavecosr2   sintorE   )
r5   xposition_idsinv_freq_expandedposition_ids_expandedr?   freqsembrN   rO   s
             r8   forwardCohere2RotaryEmbedding.forward?   sB    !MM$4-8>>@GGHZHZ[\H]_acde ,QaZ 8 > > @'1!((--'E'E!((--[`J`ahhmmfk^^UC&,,.1F1L1L1NNYYZ[]^_E))%;C'')d444C'')d444C	 D vvAGGv$cff177f&;;; DCs   BF
F)r2   r!   r/   r4   r0   r1   r$   N)__name__
__module____qualname____firstlineno__rJ   Tensor__annotations__r   r)   no_gradr   rW   __static_attributes____classcell__r7   s   @r8   r   r   +   s@    ll/} / /" ]]_<  <r:   r   c                   2   ^  \ rS rSrSU 4S jjrS rSrU =r$ )Cohere2LayerNormO   c                    > [         TU ]  5         [        R                  " [        R
                  " U5      5      U l        X l        g)zcThe hidden size can be a tuple or an int. The tuple is used for QKNorm to normalize across head_dimN)r(   r)   nn	ParameterrJ   onesweightvariance_epsilon)r5   hidden_sizeepsbiasr7   s       r8   r)   Cohere2LayerNorm.__init__P   s-    ll5::k#:; #r:   c                    UR                   nUR                  [        R                  5      nUR	                  SSS9nX-
  R                  S5      R	                  SSS9nX-
  [        R                  " X@R                  -   5      -  nU R                  R                  [        R                  5      U-  nUR                  U5      $ )Nr<   T)keepdimrA   )	rE   rP   rJ   float32meanpowrsqrtrl   rk   )r5   hidden_statesinput_dtypert   variances        r8   rW   Cohere2LayerNorm.forwardV   s    #))%((7!!"d!3!(--a055b$5G&-XH]H]=]1^^u}}5E,,r:   )rl   rk   )Ngh㈵>FrZ   r[   r\   r]   r)   rW   ra   rb   rc   s   @r8   re   re   O   s    $- -r:   re   rw   n_repreturnc                     U R                   u  p#pEUS:X  a  U $ U SS2SS2SSS2SS24   R                  X#XU5      n U R                  X#U-  XE5      $ )z
This is the equivalent of torch.repeat_interleave(x, dim=1, repeats=n_rep). The hidden states go from (batch,
num_key_value_heads, seqlen, head_dim) to (batch, num_attention_heads, seqlen, head_dim)
r   N)rH   rG   reshape)rw   r|   batchnum_key_value_headsslenhead_dims         r8   	repeat_kvr   `   s_    
 2?1D1D.Ez!!Qa"23::5W\dlmM  e(CTTTr:   modulequerykeyvalueattention_maskscalingdropoutkwargsc                 @   [        X R                  5      n[        X0R                  5      n	[        R                  " XR	                  SS5      5      U-  n
Ub"  US S 2S S 2S S 2S UR
                  S   24   nX-   n
[        R                  R                  U
S[        R                  S9R                  UR                  5      n
[        R                  R                  XU R                  S9n
[        R                  " X5      nUR	                  SS5      R                  5       nX4$ )NrA   r   r<   )rC   rE   )ptrainingr   )r   num_key_value_groupsrJ   matmulrL   rH   rh   
functionalsoftmaxrs   rP   rE   r   r   
contiguous)r   r   r   r   r   r   r   r   
key_statesvalue_statesattn_weightscausal_maskattn_outputs                r8   eager_attention_forwardr   l   s     3 ; ;<JU$?$?@L<<';';Aq'ABWLL!$Q1.D
0@0@0D.D%DE#1==((2U]](SVVW\WbWbcL==((6??([L,,|:K''1-88:K$$r:   c                 |    U SS S S24   nU SSS S24   n[         R                  " U* U/SS9R                  S5      nU$ )N.rA   r   r<   rB   r   )rJ   stackflatten)rQ   x1x2rot_xs       r8   rotate_halfr      sL    	
3!8B	
319BKK"b	r*2226ELr:   c                 &   U R                   nU R                  5       n UR                  5       nUR                  U5      nUR                  U5      nX-  [        U 5      U-  -   nX-  [        U5      U-  -   nUR	                  US9UR	                  US94$ )a  Applies Rotary Position Embedding to the query and key tensors.

Args:
    q (`torch.Tensor`): The query tensor.
    k (`torch.Tensor`): The key tensor.
    cos (`torch.Tensor`): The cosine part of the rotary embedding.
    sin (`torch.Tensor`): The sine part of the rotary embedding.
    position_ids (`torch.Tensor`, *optional*):
        Deprecated and unused.
    unsqueeze_dim (`int`, *optional*, defaults to 1):
        The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and
        sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note
        that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and
        k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes
        cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have
        the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2.
Returns:
    `tuple(torch.Tensor)` comprising of the query and key tensors rotated using the Rotary Position Embedding.
rD   )rE   rF   	unsqueezer   rP   )	qkrN   rO   rR   unsqueeze_dimrE   q_embedk_embeds	            r8   apply_rotary_pos_embr      s    ( GGE		A		A
--
&C
--
&Cw;q>C/0Gw;q>C/0G::E:"GJJUJ$;;;r:   c                   f  ^  \ rS rSrSrSS\S\\   4U 4S jjjr\	" SSSS	9  SS
\
R                  S\\
R                  \
R                  4   S\\
R                     S\\   S\\
R                     S\\   S\\
R                  \\
R                     \\\
R                        4   4S jj5       rSrU =r$ )Cohere2Attention   z=Multi-headed attention from 'Attention Is All You Need' paperr!   	layer_idxc                   > [         TU ]  5         Xl        X l        [	        USUR
                  UR                  -  5      U l        UR                  UR                  -  U l	        U R                  S-  U l
        UR                  U l        SU l        UR                  U   S:X  a  UR                  OS U l        [        R                   " UR
                  UR                  U R                  -  UR"                  S9U l        [        R                   " UR
                  UR                  U R                  -  UR"                  S9U l        [        R                   " UR
                  UR                  U R                  -  UR"                  S9U l        [        R                   " UR                  U R                  -  UR
                  UR"                  S9U l        g )Nr   g      Tsliding_attentionro   )r(   r)   r!   r   getattrrm   num_attention_headsr   r   r   r   attention_dropout	is_causallayer_typessliding_windowrh   Linearattention_biasq_projk_projv_projo_projr5   r!   r   r7   s      r8   r)   Cohere2Attention.__init__   sm   "
F4F4F&JdJd4de$*$>$>&B\B\$\!}}d*!'!9!97=7I7I)7TXk7kf33quii : :T]] JQWQfQf
 ii : :T]] JQWQfQf
 ii : :T]] JQWQfQf
 ii&&68J8JQWQfQf
r:   past_key_valuepast_key_values4.58new_nameversionrw   position_embeddingsr   cache_positionr   r}   c                 d   UR                   S S n/ UQSPU R                  P7nU R                  U5      R                  U5      R	                  SS5      n	U R                  U5      R                  U5      R	                  SS5      n
U R                  U5      R                  U5      R	                  SS5      nUu  pU R                  b  [        XX5      u  pUb$  XUS.nUR                  XU R                  U5      u  p[        nU R                  R                  S:w  a  [        U R                  R                     nU" U U	U
UU4U R                  (       d  SOU R                   U R"                  U R                  S.UD6u  nnUR$                  " / UQSP76 R'                  5       nU R)                  U5      nUU4$ )Nr<   r   rA   )rO   rN   r   eager        )r   r   r   )rH   r   r   viewrL   r   r   r   r   updater   r   r!   _attn_implementationr   r   r   r   r   r   r   )r5   rw   r   r   r   r   r   input_shapehidden_shapequery_statesr   r   rN   rO   cache_kwargsattention_interfacer   r   s                     r8   rW   Cohere2Attention.forward   s    $))#2.88b8$--8{{=166|DNNqRST[[/44\BLLQPQR
{{=166|DNNqRST&*';LVY'_$L&#&nUL'6'='=jX\XfXfht'u$J(?;;++w6"9$++:Z:Z"[$7
%
  $}}C$2H2HLL..
%
 
%
!\ "));;;;FFHkk+.L((r:   )r   r!   r   r   r   r   r   r   r   r   r   r   rY   )NN)rZ   r[   r\   r]   __doc__r   r   intr)   r   rJ   r^   tupler   
LongTensorr   r   rW   ra   rb   rc   s   @r8   r   r      s    G
} 
# 
 
0 %0A6R ,059*)||*) #5<<#=>*) !.	*)
 "%*) !!1!12*) -.*) 
u||Xell3XeELL>Q5RR	S*) S*)r:   r   c                   .   ^  \ rS rSrU 4S jrS rSrU =r$ )
Cohere2MLP   c                   > [         TU ]  5         Xl        UR                  U l        UR                  U l        [
        R                  " U R                  U R                  SS9U l        [
        R                  " U R                  U R                  SS9U l        [
        R                  " U R                  U R                  SS9U l	        [        UR                     U l        g NFr   )r(   r)   r!   rm   intermediate_sizerh   r   	gate_projup_proj	down_projr   
hidden_actact_fnr5   r!   r7   s     r8   r)   Cohere2MLP.__init__   s    !--!'!9!94#3#3T5K5KRWXyy!1!143I3IPUV4#9#94;K;KRWXV../r:   c                     U R                  U R                  U R                  U5      5      U R                  U5      -  5      nU$ rY   )r   r   r   r   )r5   rQ   r   s      r8   rW   Cohere2MLP.forward   s6    NN4;;t~~a/@#ADLLQRO#ST	r:   )r   r!   r   r   rm   r   r   r{   rc   s   @r8   r   r      s    0 r:   r   c                   b  ^  \ rS rSrS\S\4U 4S jjr\" SSSS9    SS	\R                  S
\
\R                  \R                  4   S\\R                     S\\   S\\   S\\R                     S\\   S\
\R"                  \\
\R"                  \R"                  4      4   4S jj5       rSrU =r$ )Cohere2DecoderLayeri  r!   r   c                    > [         TU ]  5         UR                  U l        [        XS9U l        [        U5      U l        [        UR                  UR                  S9U l	        UR                  U   U l        g )N)r!   r   rm   rn   )r(   r)   rm   r   	self_attnr   mlpre   layer_norm_epsinput_layernormr   attention_typer   s      r8   r)   Cohere2DecoderLayer.__init__  sc    !--)Mf%/V=O=OV\VkVkl$00;r:   r   r   r   r   rw   r   r   	use_cacher   r   r}   c           
          UnU R                  U5      nU R                  " SUUUUUUS.UD6u  pU R                  U5      nX-   U-   nU$ )a  
Args:
    hidden_states (`torch.FloatTensor`): input to the layer of shape `(batch, seq_len, embed_dim)`
    attention_mask (`torch.FloatTensor`, *optional*):
        attention mask of size `(batch_size, sequence_length)` if flash attention is used or `(batch_size, 1,
        query_sequence_length, key_sequence_length)` if default attention is used.
    past_key_values (`Cache`, *optional*): cached past key and value projection states
    output_attentions (`bool`, *optional*):
        Whether or not to return the attentions tensors of all attention layers. See `attentions` under
        returned tensors for more detail.
    use_cache (`bool`, *optional*):
        If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding
        (see `past_key_values`).
    cache_position (`torch.LongTensor` of shape `(sequence_length)`, *optional*):
        Indices depicting the position of the input sequence tokens in the sequence
    position_embeddings (`tuple[torch.FloatTensor, torch.FloatTensor]`, *optional*):
        Tuple containing the cosine and sine positional embeddings of shape `(batch_size, seq_len, head_dim)`,
        with `head_dim` being the embedding dimension of each attention head.
)rw   r   r   r   r   r    )r   r   r   )r5   rw   r   r   r   r   r   r   residualhidden_states_attention_hidden_states_mlps               r8   rW   Cohere2DecoderLayer.forward  sq    < !,,];%)^^ &
' 3)+)&
 &
" !HH]3 :=NNr:   )r   rm   r   r   r   )NNFN)rZ   r[   r\   r]   r   r   r)   r   rJ   r^   r   r   r   boolr   r   r   FloatTensorrW   ra   rb   rc   s   @r8   r   r     s    <} < < %0A6R
 26+/$)59+||+ #5<<#=>+ !.	+
 "%+ D>+ !!1!12+ -.+ 
u  (51B1BEDUDU1U+V"WW	X+ S+r:   r   c                   R    \ rS rSr% \\S'   SrSrS/rS/r	Sr
SrSrSrSr\\S.rSrg	)
Cohere2PreTrainedModeli=  r!   modelTr   r   )rw   
attentionsr   N)rZ   r[   r\   r]   r   r_   base_model_prefixsupports_gradient_checkpointing_no_split_modules_skip_keys_device_placement_supports_flash_attn_supports_sdpa_supports_flex_attn_can_compile_fullgraph_supports_attention_backendr   r   _can_record_outputsra   r   r:   r8   r   r   =  sQ    &*#./#4"5N!"&,&r:   r   c                   "  ^  \ rS rSrS\4U 4S jjr\" 5       \       SS\\	R                     S\\	R                     S\\	R                     S\\   S\\	R                     S	\\   S
\\	R                     S\\   S\4S jj5       5       rSrU =r$ )Cohere2ModeliP  r!   c           	        > [         TU ]  U5        UR                  U l        UR                  U l        [
        R                  " UR                  UR                  U R                  5      U l        [
        R                  " [        UR                  5       Vs/ s H  n[        X5      PM     sn5      U l        [        UR                  UR                  S9U l        [#        US9U l        SU l        U R)                  5         g s  snf )Nr   r!   F)r(   r)   pad_token_idpadding_idx
vocab_sizerh   	Embeddingrm   embed_tokens
ModuleListrangenum_hidden_layersr   layersre   r   normr   
rotary_embgradient_checkpointing	post_initr   s      r8   r)   Cohere2Model.__init__R  s     !.. ++LL):):F<N<NPTP`P`ammEJ6KcKcEdeEd	 3Ede
 %&2D2D6K`K`a	0?&+# 	 fs   C?	input_idsr   rR   r   inputs_embedsr   r   r   r}   c           
         US L US L-  (       a  [        S5      eUc  U R                  U5      nU(       a'  Uc$  U R                  (       d  [        U R                  S9nUcD  Ub  UR                  5       OSn	[        R                  " XUR                  S   -   UR                  S9nUc  UR                  S5      n[        U=n
[        5      (       d*  U R                  UUUUUS.n[        S
0 UD6[        S
0 UD6S.n
UnU R                  X5      nU R                    H  nU" U4UXR"                     UUUS.UD6nM      U R%                  U5      n['        UUS	9$ )Nz:You must specify exactly one of input_ids or inputs_embedsr  r   r   )r6   )r!   input_embedsr   r   r   rR   )full_attentionr   )r   r   r   r   r   )last_hidden_stater   r   )
ValueErrorr  r   r	   r!   get_seq_lengthrJ   arangerH   r6   r   r+   r,   r   r   r  r  r   r  r   )r5   r  r   rR   r   r  r   r   r   past_seen_tokenscausal_mask_mappingmask_kwargsrw   r   decoder_layers                  r8   rW   Cohere2Model.forwardb  s~    -t";<YZZ  --i8M0*$++>O!CRC^==?de"\\ ]5H5H5K"KTaThThN )33A6L?-FF++ -"0"0#2 ,K #5"C{"C%F%U%U#
 &"oomJ![[M)$723O3OP /#- M ) 		-0&++
 	
r:   )r  r  r  r  r  r  r  )NNNNNNN)rZ   r[   r\   r]   r   r)   r   r   r   rJ   r   r^   r   r   r   r   r   r   rW   ra   rb   rc   s   @r8   r
  r
  P  s    }    151537+/59$(59<
E,,-<
 !.<
 u//0	<

 "%<
   1 12<
 D><
 !!1!12<
 +,<
 
!<
  <
r:   r
  c                     ^  \ rS rSrS/rSS0rSS/S/40rU 4S jr\\	           SS\
\R                     S	\
\R                     S
\
\R                     S\
\\\\R"                     4      S\
\R"                     S\
\R                     S\
\   S\
\   S\
\   S\
\R                     S\\\R                  4   S\\   S\4S jj5       5       rSrU =r$ )Cohere2ForCausalLMi  zlm_head.weightlm_headcolwise_reprw   logitsc                 (  > [         TU ]  U5        [        U5      U l        UR                  U l        [
        R                  " UR                  UR                  SS9U l        UR                  U l	        UR                  U l
        U R                  5         g r   )r(   r)   r
  r   r  rh   r   rm   r+  logit_scaletie_word_embeddingsr  r   s     r8   r)   Cohere2ForCausalLM.__init__  sq     !&)
 ++yy!3!3V5F5FUS!--#)#=#=  	r:   r  r   rR   r   r  labelsr   output_attentionsoutput_hidden_statesr   logits_to_keepr   r}   c                    Ub  UOU R                   R                  nU	b  U	OU R                   R                  n	U R                  " SUUUUUUUU	U
S.	UD6nUR                  n[        U[        5      (       a  [        U* S5      OUnU R                  USS2USS24   5      nUU R                  -  nSnUb)  U R                  " SUX`R                   R                  S.UD6n[        UUUR                  UR                  UR                  S9$ )a  
labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
    Labels for computing the masked language modeling loss. Indices should either be in `[0, ...,
    config.vocab_size]` or -100 (see `input_ids` docstring). Tokens with indices set to `-100` are ignored
    (masked), the loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]`.

Example:

```python
>> from transformers import AutoTokenizer, Cohere2ForCausalLM

>> model = Cohere2ForCausalLM.from_pretrained("Cohere2ForAI/c4ai-command-r-v01")
>> tokenizer = AutoTokenizer.from_pretrained("Cohere2ForAI/c4ai-command-r-v01")

>> prompt = "Hey, are you conscious? Can you talk to me?"
>> inputs = tokenizer(prompt, return_tensors="pt")

>> # Generate
>> generate_ids = model.generate(inputs.input_ids, max_length=30)
>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
"Hey, are you conscious? Can you talk to me?\nI'm not conscious, but I can talk to you."
```N)	r  r   rR   r   r  r   r3  r4  r   )r-  r2  r  )lossr-  r   rw   r   r   )r!   r3  r4  r   r   r+   r   slicer+  r/  loss_functionr  r   r   rw   r   )r5   r  r   rR   r   r  r2  r   r3  r4  r   r5  r   outputsrw   slice_indicesr-  r7  s                     r8   rW   Cohere2ForCausalLM.forward  s(   N 2C1N-TXT_T_TqTq$8$D $++JjJj 	
 ,0:: ,
)%+'/!5),
 ,
  118B>SV8W8W~ot4]kmA}a,?@A$***%%pVF{{OeOepiopD%#33!//))
 	
r:   )r+  r/  r   r0  r  )NNNNNNNNNNr   )rZ   r[   r\   r]   _tied_weights_keys_tp_plan_pp_planr)   r   r   r   rJ   r   r^   r   r   listr   r   r   r   r   r   rW   ra   rb   rc   s   @r8   r*  r*    sw   *+=)H_-z:;H	  151537KO59-1$(,0/35934H
E,,-H
 !.H
 u//0	H

 "%tE4E4E/F(F"GHH
   1 12H
 ))*H
 D>H
 $D>H
 'tnH
 !!1!12H
 c5<</0H
 +,H
 
 H
  H
r:   r*  )r*  r
  r   )r   )Nr   );typingr   r   r   rJ   torch.nnrh   activationsr   cache_utilsr   r	   
generationr
   masking_utilsr   r   modeling_flash_attention_utilsr   modeling_layersr   modeling_outputsr   r   modeling_rope_utilsr   r   modeling_utilsr   r   processing_utilsr   utilsr   r   r   utils.deprecationr   utils.genericr   configuration_cohere2r   Moduler   re   r^   r   r   rF   r   r   r   r   r   r   r   r
  r*  __all__r   r:   r8   <module>rS     s  , - ,   ! . ) R B 9 O K F & I I 0 / 0!<RYY !<H-ryy -"	UU\\ 	U# 	U%,, 	U& %II%<<% 
% <<	%
 U\\*% % % '(%4<<F)ryy F)R  54 5p _  $ O
) O
 O
d Z
/ Z
 Z
z Kr:   