
    iV                     .   d dl mZ d dlmZ d dlZd dlmZ d dlmZ ddl	m
Z
 ddlmZmZ ddlmZ dd	lmZmZ dd
lmZ ddlmZ ddlmZmZ ddlmZmZ ddlmZmZ ddl m!Z! ddl"m#Z#m$Z$ ddl%m&Z&m'Z' ddl(m)Z)  ed       G d dejT                               Z+ G d dejT                        Z,dejZ                  de.dejZ                  fdZ/	 d5dejT                  dejZ                  dejZ                  d ejZ                  d!ejZ                  dz  d"e0d#e0d$e!e   fd%Z1d6d&Z2d' Z3 ee2       G d( d)ejT                               Z4 G d* d+ejT                        Z5 G d, d-e      Z6e# G d. d/e             Z7e# G d0 d1e7             Z8e# G d2 d3e7e             Z9g d4Z:y)7    )Callable)OptionalN)TransformersKwargs   )ACT2FN)CacheDynamicCache)GenerationMixin)use_kernel_forward_from_hubuse_kernelized_func)create_causal_mask)GradientCheckpointingLayer)BaseModelOutputWithPastCausalLMOutputWithPast)ROPE_INIT_FUNCTIONSdynamic_rope_update)ALL_ATTENTION_FUNCTIONSPreTrainedModel)Unpack)auto_docstringcan_return_tuple)check_model_inputsmaybe_autocast   )Olmo2ConfigRMSNormc                   ,     e Zd Zd fd	Zd Zd Z xZS )Olmo2RMSNormc                     t         |           t        j                  t	        j
                  |            | _        || _        y)z;
        Olmo2RMSNorm is equivalent to T5LayerNorm
        N)super__init__nn	Parametertorchonesweightvariance_epsilon)selfhidden_sizeeps	__class__s      r/home/obispo/Crisostomo_bridge/mision_env/lib/python3.12/site-packages/transformers/models/olmo2/modeling_olmo2.pyr!   zOlmo2RMSNorm.__init__3   s1     	ll5::k#:; #    c                 "   |j                   }|j                  t        j                        }|j	                  d      j                  dd      }|t        j                  || j                  z         z  }| j                  |z  j                  |      S )N   T)keepdim)	dtypetor$   float32powmeanrsqrtr'   r&   )r(   hidden_statesinput_dtypevariances       r,   forwardzOlmo2RMSNorm.forward;   sy    #))%((7 $$Q',,R,>%Ht?T?T4T(UUm+//<<r-   c                 ^    t        | j                  j                         d| j                   S )Nz, eps=)tupler&   shaper'   )r(   s    r,   
extra_reprzOlmo2RMSNorm.extra_reprB   s*    ))*+6$2G2G1HIIr-   )gư>)__name__
__module____qualname__r!   r;   r?   __classcell__r+   s   @r,   r   r   1   s    $=Jr-   r   c                        e Zd ZU ej                  ed<   ddef fdZe	 	 	 ddedz  de	d   de
dz  ded	ef   fd
       Z ej                         ed               Z xZS )Olmo2RotaryEmbeddinginv_freqNconfigc                    t         |           |j                  | _        |j                  | _        || _        | j
                  j                  d   | _        | j                  }| j                  dk7  rt        | j                     } || j
                  |      \  }| _
        | j                  d|d       | j                  d|j                         d       y )N	rope_typedefaultrG   F)
persistentoriginal_inv_freq)r    r!   max_position_embeddingsmax_seq_len_cachedoriginal_max_seq_lenrH   rope_parametersrJ   compute_default_rope_parametersr   attention_scalingregister_bufferclone)r(   rH   devicerope_init_fnrG   r+   s        r,   r!   zOlmo2RotaryEmbedding.__init__I   s    "("@"@$*$B$B!44[A!%!E!E>>Y&.t~~>L+7V+L($(ZeD0(..2BuUr-   rV   ztorch.deviceseq_lenreturnztorch.Tensorc                    | j                   d   }t        | dd      xs | j                  | j                  z  }d}d|t	        j
                  d|dt        j                        j                  |t        j                        |z  z  z  }||fS )	a  
        Computes the inverse frequencies according to the original RoPE implementation
        Args:
            config ([`~transformers.PreTrainedConfig`]):
                The model configuration.
            device (`torch.device`):
                The device to use for initialization of the inverse frequencies.
            seq_len (`int`, *optional*):
                The current sequence length. Unused for this type of RoPE.
        Returns:
            Tuple of (`torch.Tensor`, `float`), containing the inverse frequencies for the RoPE embeddings and the
            post-processing scaling factor applied to the computed cos/sin (unused in this type of RoPE).
        
rope_thetahead_dimNg      ?r   r/   )r2   )rV   r2   )	rQ   getattrr)   num_attention_headsr$   arangeint64r3   float)rH   rV   rX   basedimattention_factorrG   s          r,   rR   z4Olmo2RotaryEmbedding.compute_default_rope_parametersY   s    & %%l3fj$/c63E3EIcIc3c U\\!S!5;;?BB&X]XcXcBdgjjk
 )))r-   c                    | j                   d d d d f   j                         j                  |j                  d   dd      j	                  |j
                        }|d d d d d f   j                         }t        |j
                  j                  t              r/|j
                  j                  dk7  r|j
                  j                  nd}t        |d      5  |j                         |j                         z  j                  dd      }t        j                  ||fd	      }|j                         | j                  z  }|j                         | j                  z  }	d d d        ||	fS # 1 sw Y   	fS xY w)
Nr   r0   r   mpscpuF)device_typeenabledr/   rc   )rG   ra   expandr>   r3   rV   
isinstancetypestrr   	transposer$   catcosrS   sin)
r(   xposition_idsinv_freq_expandedposition_ids_expandedrh   freqsembrq   rr   s
             r,   r;   zOlmo2RotaryEmbedding.forwardw   s8    !MM$4-8>>@GGHZHZ[\H]_acdehhijiqiqr ,QaZ 8 > > @'1!((--'E!((--[`J`ahhmmfkUC 	5&,,.1F1L1L1NNYYZ[]^_E))UEN3C'')d444C'')d444C		5
 Cx	5
 Cxs   BE''E3N)NNN)r@   rA   rB   r$   Tensor__annotations__r   r!   staticmethodr   intr=   ra   rR   no_gradr   r;   rC   rD   s   @r,   rF   rF   F   s    llV{ V  %)+/"*d"*(* t* 
~u$	%	* *: U]]_
  
r-   rF   r8   n_reprY   c                     | j                   \  }}}}|dk(  r| S | dddddddddf   j                  |||||      } | j                  |||z  ||      S )z
    This is the equivalent of torch.repeat_interleave(x, dim=1, repeats=n_rep). The hidden states go from (batch,
    num_key_value_heads, seqlen, head_dim) to (batch, num_attention_heads, seqlen, head_dim)
    r   N)r>   rk   reshape)r8   r   batchnum_key_value_headsslenr\   s         r,   	repeat_kvr      so    
 2?1D1D.Ehz!!Qa"23::5BUW\^bdlmM  (;e(CT8TTr-   modulequerykeyvalueattention_maskscalingdropoutkwargsc                 T   t        || j                        }t        || j                        }	t        j                  ||j	                  dd            |z  }
|#|d d d d d d d |j
                  d   f   }|
|z   }
t        j                  j                  |
dt        j                        j                  |j                        }
t        j                  j                  |
|| j                        }
t        j                  |
|	      }|j	                  dd      j                         }||
fS )Nr/   r   r0   )rc   r2   )ptrainingr   )r   num_key_value_groupsr$   matmulro   r>   r"   
functionalsoftmaxr4   r3   r2   r   r   
contiguous)r   r   r   r   r   r   r   r   
key_statesvalue_statesattn_weightscausal_maskattn_outputs                r,   eager_attention_forwardr      s    3 ; ;<JUF$?$?@L<<z';';Aq'ABWLL!$Q1.D
0@0@0D.D%DE#k1==((2U]](SVVW\WbWbcL==((6??([L,,|\:K''1-88:K$$r-   c                 
   | j                   |j                   }}|j                  |      }|j                  |      }| |z  t        |       |z  z   }||z  t        |      |z  z   }|j                  |      |j                  |      fS )a  Applies Rotary Position Embedding to the query and key tensors.

    Args:
        q (`torch.Tensor`): The query tensor.
        k (`torch.Tensor`): The key tensor.
        cos (`torch.Tensor`): The cosine part of the rotary embedding.
        sin (`torch.Tensor`): The sine part of the rotary embedding.
        unsqueeze_dim (`int`, *optional*, defaults to 1):
            The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and
            sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note
            that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and
            k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes
            cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have
            the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2.
    Returns:
        `tuple(torch.Tensor)` comprising of the query and key tensors rotated using the Rotary Position Embedding.
    )r2   	unsqueezerotate_halfr3   )	qkrq   rr   unsqueeze_dimq_typek_typeq_embedk_embeds	            r,   apply_rotary_pos_embr      s|    $ WWaggFF
--
&C
--
&C3w;q>C/0G3w;q>C/0G::fwzz&111r-   c                     | dd| j                   d   dz  f   }| d| j                   d   dz  df   }t        j                  | |fd      S )z*Rotates half the hidden dims of the input..Nr0   r/   rj   )r>   r$   rp   )rs   x1x2s      r,   r   r      sZ    	
3"!''"+"""	#B	
3q ""	#B99rc2YB''r-   c                       e Zd ZdZddededz  f fdZ	 	 ddej                  de	ej                  ej                  f   dej                  dz  d	e
dz  d
ej                  dz  dee   de	ej                  ej                  dz  f   fdZ xZS )Olmo2Attentionz=Multi-headed attention from 'Attention Is All You Need' paperNrH   	layer_idxc                 ,   t         |           || _        || _        t	        |d|j
                  |j                  z        | _        |j                  |j                  z  | _	        | j                  dz  | _
        |j                  | _        d| _        t        j                  |j
                  |j                  | j                  z  |j                        | _        t        j                  |j
                  |j                  | j                  z  |j                        | _        t        j                  |j
                  |j                  | j                  z  |j                        | _        t        j                  |j                  | j                  z  |j
                  |j                        | _        t)        |j                  | j                  z  |j*                        | _        t)        |j                  | j                  z  |j*                        | _        y )Nr\   g      Tbias)r    r!   rH   r   r]   r)   r^   r\   r   r   r   attention_dropout	is_causalr"   Linearattention_biasq_projk_projv_projo_projr   rms_norm_epsq_normk_normr(   rH   r   r+   s      r,   r!   zOlmo2Attention.__init__   s   "
F4F4F&JdJd4de$*$>$>&B\B\$\!}}d*!'!9!9ii : :T]] JQWQfQf
 ii : :T]] JQWQfQf
 ii : :T]] JQWQfQf
 ii&&68J8JQWQfQf
 #6#=#=#MvObObc"6#=#=#MvObObcr-   r8   position_embeddingsr   past_key_valuescache_positionr   rY   c                 `   |j                   d d }g |d| j                  }| j                  | j                  |            }	| j	                  | j                  |            }
| j                  |      }|	j                  |      j                  dd      }	|
j                  |      j                  dd      }
|j                  |      j                  dd      }|\  }}t        |	|
||      \  }	}
|'|||d}|j                  |
|| j                  |      \  }
}t        j                  | j                  j                  t               } || |	|
||f| j"                  sdn| j$                  | j&                  d|\  }} |j(                  g |d j+                         }| j-                  |      }||fS )Nr0   r   r/   )rr   rq   r           )r   r   )r>   r\   r   r   r   r   r   viewro   r   updater   r   get_interfacerH   _attn_implementationr   r   r   r   r   r   r   )r(   r8   r   r   r   r   r   input_shapehidden_shapequery_statesr   r   rq   rr   cache_kwargsattention_interfacer   r   s                     r,   r;   zOlmo2Attention.forward   s    $))#2.88b8$--8{{4;;}#=>[[]!;<
{{=1#((6@@AF__\2<<QB
#((6@@AF&S#7jRUWZ#[ j&#&snUL'6'='=j,X\XfXfht'u$J(?(M(MKK,,.E)
 %8	%
  $}}C$2H2HLL	%
 	%
!\ *k));;;;FFHkk+.L((r-   ry   )NN)r@   rA   rB   __doc__r   r}   r!   r$   rz   r=   r   
LongTensorr   r   r;   rC   rD   s   @r,   r   r      s    Gd{ dsTz d< )-26-)||-) #5<<#=>-) t+	-)
 -) ((4/-) +,-) 
u||U\\D00	1-)r-   r   c                   $     e Zd Z fdZd Z xZS )Olmo2MLPc                    t         |           || _        |j                  | _        |j                  | _        t        j                  | j                  | j                  d      | _        t        j                  | j                  | j                  d      | _        t        j                  | j                  | j                  d      | _	        t        |j                     | _        y NFr   )r    r!   rH   r)   intermediate_sizer"   r   	gate_projup_proj	down_projr   
hidden_actact_fnr(   rH   r+   s     r,   r!   zOlmo2MLP.__init__  s    !--!'!9!94#3#3T5K5KRWXyy!1!143I3IPUV4#9#94;K;KRWXV../r-   c                     | j                  | j                  | j                  |            | j                  |      z        }|S ry   )r   r   r   r   )r(   rs   r   s      r,   r;   zOlmo2MLP.forward%  s6    NN4;;t~~a/@#ADLLQRO#ST	r-   )r@   rA   rB   r!   r;   rC   rD   s   @r,   r   r     s    0r-   r   c                   "    e Zd Zdedef fdZ	 	 	 	 	 	 ddej                  dej                  dz  dej                  dz  de	dz  d	e
dz  d
ej                  dz  deej                  ej                  f   dz  dee   dej                  fdZ xZS )Olmo2DecoderLayerrH   r   c                     t         |           |j                  | _        t        ||      | _        t        |      | _        t        |j                  |j                        | _	        t        |j                  |j                        | _
        y )N)rH   r   r*   )r    r!   r)   r   	self_attnr   mlpr   r   post_attention_layernormpost_feedforward_layernormr   s      r,   r!   zOlmo2DecoderLayer.__init__+  sl    !--'vKF#(4V5G5GVM`M`(a%*6v7I7IvObOb*c'r-   Nr8   r   rt   r   	use_cacher   r   r   rY   c                     |}	 | j                   d|||||||d|\  }}
| j                  |      }|	|z   }|}	| j                  |      }| j                  |      }|	|z   }|S )N)r8   r   rt   r   r   r   r    )r   r   r   r   )r(   r8   r   rt   r   r   r   r   r   residual_s              r,   r;   zOlmo2DecoderLayer.forward4  s     !)4>> 	
')%+) 3	
 	
q 55mD =0 !/77F =0r-   )NNNFNN)r@   rA   rB   r   r}   r!   r$   rz   r   r   boolr=   r   r   r;   rC   rD   s   @r,   r   r   *  s    d{ ds d /304(,!&26HL|| t+ &&-	
  $; ((4/ #5<<#=>E +, 
r-   r   c                   J    e Zd ZU eed<   dZdZdgZdgZdZ	dZ
dZdZdZeedZy)Olmo2PreTrainedModelrH   modelTr   r   )r8   
attentionsN)r@   rA   rB   r   r{   base_model_prefixsupports_gradient_checkpointing_no_split_modules_skip_keys_device_placement_supports_flash_attn_supports_sdpa_supports_flex_attn_can_compile_fullgraph_supports_attention_backendr   r   _can_record_outputsr   r-   r,   r   r   U  sQ    &*#,-#4"5N!"&*$r-   r   c                       e Zd Zdef fdZee	 	 	 	 	 	 	 ddej                  dz  dej                  dz  dej                  dz  de
dz  dej                  dz  d	ej                  dz  d
edz  dee   defd              Z xZS )
Olmo2ModelrH   c           	         t         |   |       |j                  | _        |j                  | _        t        j                  |j                  |j                  | j                        | _        t        j                  t        |j                        D cg c]  }t        ||       c}      | _        t        |j                  |j                        | _        t#        |      | _        d| _        | j)                          y c c}w )Nr   rH   F)r    r!   pad_token_idpadding_idx
vocab_sizer"   	Embeddingr)   embed_tokens
ModuleListrangenum_hidden_layersr   layersr   r   normrF   
rotary_embgradient_checkpointing	post_initr   s      r,   r!   zOlmo2Model.__init__j  s     !.. ++LL):):F<N<NPTP`P`ammCHIaIaCbcivy1c
 !!3!39L9LM	.f=&+# 	 ds   DN	input_idsr   rt   r   inputs_embedsr   r   r   rY   c                 D   |d u |d uz  rt        d      || j                  |      }|r|t        | j                        }|E||j	                         nd}	t        j                  |j                  d   |j                        |	z   }||j                  d      }t        | j                  |||||      }
|}| j                  ||      }| j                  d | j                  j                   D ]  } ||f|
|||||d|} | j                  |      }t        ||	      S )
Nz:You must specify exactly one of input_ids or inputs_embedsr   r   r   )rV   )rH   input_embedsr   r   r   rt   )rt   )r   r   rt   r   r   r   )last_hidden_stater   )
ValueErrorr   r	   rH   get_seq_lengthr$   r_   r>   rV   r   r   r  r  r   r  r   )r(   r  r   rt   r   r  r   r   r   past_seen_tokensr   r8   r   decoder_layers                 r,   r;   zOlmo2Model.forwardz  s]    -t";<YZZ *.*;*;I*FM0*$++>O!CRC^==?de]003M<P<PQTdd  )33A6L(;;&))+%
 &"oom,oW![[)H4;;+H+HI 
	M)	*$7) /#-	 	M
	 		-0&++
 	
r-   )NNNNNNN)r@   rA   rB   r   r!   r   r   r$   r   rz   r   FloatTensorr   r   r   r   r;   rC   rD   s   @r,   r   r   h  s    {    .2.204(,2626!%9
##d*9
 t+9
 &&-	9

 9
 ((4/9
 ((4/9
 $;9
 +,9
 
!9
  9
r-   r   c                   b    e Zd ZddiZddiZddgdgfiZ fdZee	 	 	 	 	 	 	 	 	 dd	e	j                  dz  d
e	j                  dz  de	j                  dz  dedz  de	j                  dz  de	j                  dz  dedz  de	j                  dz  dee	j                  z  dee   defd              Z xZS )Olmo2ForCausalLMzlm_head.weightzmodel.embed_tokens.weightlm_headcolwise_gather_outputr8   logitsc                     t         |   |       t        |      | _        |j                  | _        t        j                  |j                  |j                  d      | _        | j                          y r   )
r    r!   r   r   r   r"   r   r)   r  r  r   s     r,   r!   zOlmo2ForCausalLM.__init__  sU     '
 ++yy!3!3V5F5FUS 	r-   Nr  r   rt   r   r  labelsr   r   logits_to_keepr   rY   c
                 z    | j                   d|||||||d|
}|j                  }t        |	t              rt	        |	 d      n|	}| j                  |dd|ddf         }d}|* | j                  d||| j                  j                  d|
}t        |||j                  |j                  |j                        S )a  
        Example:

        ```python
        >>> from transformers import AutoTokenizer, Olmo2ForCausalLM

        >>> model = Olmo2ForCausalLM.from_pretrained("meta-olmo2/Olmo2-2-7b-hf")
        >>> tokenizer = AutoTokenizer.from_pretrained("meta-olmo2/Olmo2-2-7b-hf")

        >>> prompt = "Hey, are you conscious? Can you talk to me?"
        >>> inputs = tokenizer(prompt, return_tensors="pt")

        >>> # Generate
        >>> generate_ids = model.generate(inputs.input_ids, max_length=30)
        >>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
        "Hey, are you conscious? Can you talk to me?\nI'm not conscious, but I can talk to you."
        ```)r  r   rt   r   r  r   r   N)r  r  r   )lossr  r   r8   r   r   )r   r
  rl   r}   slicer  loss_functionrH   r   r   r   r8   r   )r(   r  r   rt   r   r  r  r   r   r  r   outputsr8   slice_indicesr  r  s                   r,   r;   zOlmo2ForCausalLM.forward  s    @ ,64:: 	,
)%+')	,
 	,
  118B>SV8W~ot4]kmA}a,?@A%4%%pVFt{{OeOepiopD%#33!//))
 	
r-   )	NNNNNNNNr   )r@   rA   rB   _tied_weights_keys_tp_plan_pp_planr!   r   r   r$   r   rz   r   r  r   r}   r   r   r   r;   rC   rD   s   @r,   r  r    s/   *,GH23H_-z:;H  .2.204(,26*.!%26-.8
##d*8
 t+8
 &&-	8

 8
 ((4/8
   4'8
 $;8
 ((4/8
 ell*8
 +,8
 
 8
  8
r-   r  )r  r   r   )r   )r   );collections.abcr   typingr   r$   torch.nnr"   transformers.utils.genericr   activationsr   cache_utilsr   r	   
generationr
   integrationsr   r   masking_utilsr   modeling_layersr   modeling_outputsr   r   modeling_rope_utilsr   r   modeling_utilsr   r   processing_utilsr   utilsr   r   utils.genericr   r   configuration_olmo2r   Moduler   rF   rz   r}   r   ra   r   r   r   r   r   r   r   r   r  __all__r   r-   r,   <module>r4     s  4 %    9 ! . ) L / 9 O K F & 5 ? , Y'J299 J (J(=299 =@	UU\\ 	U# 	U%,, 	U& %II%<<% 
% <<	%
 LL4'% % % '(%424( )*I)RYY I) +I)Xryy  (2 (V ?  $ L
% L
 L
^ H
+_ H
 H
V Er-   