
    iV[                     j   d dl mZ d dlmZ d dlZd dlmZ ddlmZ ddl	m
Z
mZ ddlmZ ddlmZmZ dd	lmZ dd
lmZ ddlmZmZmZ ddlmZmZ ddlmZmZ ddlm Z m!Z! ddl"m#Z# ddl$m%Z%m&Z&m'Z' ddl(m)Z)m*Z* ddl+m,Z,  G d dejZ                        Z. G d de      Z/dej`                  de1dej`                  fdZ2	 d9dejZ                  dej`                  dej`                  dej`                  d ej`                  dz  d!e3d"e3d#e#e%   fd$Z4d% Z5d:d&Z6 ee6       G d' d(ejZ                               Z7 G d) d*ejZ                        Z8 ed+       G d, d-ejZ                               Z9e& G d. d/e!             Z:e& G d0 d1e:             Z;e& G d2 d3e:e             Z< G d4 d5ee:      Z= G d6 d7ee:      Z>g d8Z?y);    )Callable)OptionalN   )ACT2FN)CacheDynamicCache)GenerationMixin)use_kernel_forward_from_hubuse_kernelized_func)create_causal_mask)FlashAttentionKwargs) GenericForSequenceClassificationGenericForTokenClassificationGradientCheckpointingLayer)BaseModelOutputWithPastCausalLMOutputWithPast)ROPE_INIT_FUNCTIONSdynamic_rope_update)ALL_ATTENTION_FUNCTIONSPreTrainedModel)Unpack)TransformersKwargsauto_docstringcan_return_tuple)check_model_inputsmaybe_autocast   )
Glm4Configc                   V     e Zd Z fdZdej
                  dej
                  fdZ xZS )Glm4MLPc                 *   t         |           || _        t        j                  |j
                  d|j                  z  d      | _        t        j                  |j                  |j
                  d      | _        t        |j                     | _        y )N   Fbias)super__init__confignnLinearhidden_sizeintermediate_sizegate_up_proj	down_projr   
hidden_actactivation_fnselfr'   	__class__s     p/home/obispo/Crisostomo_bridge/mision_env/lib/python3.12/site-packages/transformers/models/glm4/modeling_glm4.pyr&   zGlm4MLP.__init__1   sp    IIf&8&8!f>V>V:V]bc6#;#;V=O=OV[\#F$5$56    hidden_statesreturnc                     | j                  |      }|j                  dd      \  }}|| j                  |      z  }| j                  |      S )Nr"   dim)r,   chunkr/   r-   )r1   r5   	up_statesgates       r3   forwardzGlm4MLP.forward9   sL    %%m4	#//!/4i 2 24 88	~~i((r4   )__name__
__module____qualname__r&   torchFloatTensorr>   __classcell__r2   s   @r3   r    r    0   s'    7)U%6%6 )5;L;L )r4   r    c                   d    e Zd Zdedef fdZ	 	 	 	 	 	 ddej                  dej                  dz  dej                  dz  de	dz  d	e
dz  d
ej                  dz  deej                  ej                  f   dz  dee   deej                  eej                  ej                  f   dz  f   fdZ xZS )Glm4DecoderLayerr'   	layer_idxc                    t         |           |j                  | _        t        ||      | _        t        |      | _        t        |j                  |j                        | _	        t        |j                  |j                        | _
        t        |j                  |j                        | _        t        |j                  |j                        | _        y )N)r'   rH   eps)r%   r&   r*   Glm4Attention	self_attnr    mlpGlm4RMSNormrms_norm_epsinput_layernormpost_attention_layernormpost_self_attn_layernormpost_mlp_layernormr1   r'   rH   r2   s      r3   r&   zGlm4DecoderLayer.__init__C   s    !--&f	J6?*6+=+=6CVCVW(3F4F4FFL_L_(`%(3F4F4FFL_L_(`%"-f.@.@fFYFY"Zr4   Nr5   attention_maskposition_idspast_key_values	use_cachecache_positionposition_embeddingskwargsr6   c                    |}	| j                  |      } | j                  d|||||||d|\  }}
| j                  |      }|	|z   }|}	| j                  |      }| j	                  |      }| j                  |      }|	|z   }|S )N)r5   rV   rW   rX   rY   rZ   r[    )rQ   rM   rS   rR   rN   rT   )r1   r5   rV   rW   rX   rY   rZ   r[   r\   residual_s              r3   r>   zGlm4DecoderLayer.forwardN   s     !,,];)4>> 	
')%+) 3	
 	
q 55mD =0 55mD///> =0r4   )NNNFNN)r?   r@   rA   r   intr&   rB   Tensor
LongTensorr   booltupler   r   rC   r>   rD   rE   s   @r3   rG   rG   B   s    	[z 	[c 	[ /304(,!&26HL!||! t+! &&-	!
 ! $;! ((4/! #5<<#=>E! -.! 
u  %(9(95;L;L(L"MPT"TT	U!r4   rG   r5   n_repr6   c                     | j                   \  }}}}|dk(  r| S | dddddddddf   j                  |||||      } | j                  |||z  ||      S )z
    This is the equivalent of torch.repeat_interleave(x, dim=1, repeats=n_rep). The hidden states go from (batch,
    num_key_value_heads, seqlen, head_dim) to (batch, num_attention_heads, seqlen, head_dim)
    r   N)shapeexpandreshape)r5   rf   batchnum_key_value_headsslenhead_dims         r3   	repeat_kvro   r   so    
 2?1D1D.Ehz!!Qa"23::5BUW\^bdlmM  (;e(CT8TTr4   modulequerykeyvaluerV   scalingdropoutr\   c                 T   t        || j                        }t        || j                        }	t        j                  ||j	                  dd            |z  }
|#|d d d d d d d |j
                  d   f   }|
|z   }
t        j                  j                  |
dt        j                        j                  |j                        }
t        j                  j                  |
|| j                        }
t        j                  |
|	      }|j	                  dd      j                         }||
fS )Nr"   r   r8   )r:   dtype)ptrainingr   )ro   num_key_value_groupsrB   matmul	transposerh   r(   
functionalsoftmaxfloat32torx   ru   rz   
contiguous)rp   rq   rr   rs   rV   rt   ru   r\   
key_statesvalue_statesattn_weightscausal_maskattn_outputs                r3   eager_attention_forwardr   ~   s    3 ; ;<JUF$?$?@L<<z';';Aq'ABWLL!$Q1.D
0@0@0D.D%DE#k1==((2U]](SVVW\WbWbcL==((6??([L,,|\:K''1-88:K$$r4   c                 |    | ddddf   }| ddddf   }t        j                  | |fd      j                  d      S )	z*Rotates half the hidden dims of the input..r   Nr"   r   r8   r9   rw   )rB   stackflatten)xx1x2s      r3   rotate_halfr      sJ    	
319B	
319B;;Ryb)11"55r4   c                    |j                  |      }|j                  |      }|dd|j                  d   dz  f   j                  dd      }|dd|j                  d   dz  f   j                  dd      }|j                  d   }| dd|f   | d|df   }}|dd|f   |d|df   }	}||z  t        |      |z  z   }
||z  t        |      |z  z   }t	        j
                  |
|gd      }
t	        j
                  ||	gd      }|
|fS )a  Applies Rotary Position Embedding to the query and key tensors.

    Args:
        q (`torch.Tensor`): The query tensor.
        k (`torch.Tensor`): The key tensor.
        cos (`torch.Tensor`): The cosine part of the rotary embedding.
        sin (`torch.Tensor`): The sine part of the rotary embedding.
        unsqueeze_dim (`int`, *optional*, defaults to 1):
            The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and
            sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note
            that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and
            k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes
            cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have
            the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2.
    Returns:
        `tuple(torch.Tensor)` comprising of the query and key tensors rotated using the Rotary Position Embedding.
    .Nr8   r"   r9   )	unsqueezerh   repeat_interleaver   rB   cat)qkcossinunsqueeze_dim
rotary_dimq_rotq_passk_rotk_passq_embedk_embeds               r3   apply_rotary_pos_embr      sD   $ --
&C
--
&C c'SYYr]a'''
(
:
:1"
:
EC
c'SYYr]a'''
(
:
:1"
:
EC 2Jc;J;&'3
+;)<6Ec;J;&'3
+;)<6E s{{51C78Gs{{51C78G ii&)r2Gii&)r2GGr4   c                        e Zd ZdZddededz  f fdZ	 	 	 	 ddej                  de	ej                  ej                  f   dz  dej                  dz  d	e
dz  d
ej                  dz  dee   de	ej                  ej                  f   fdZ xZS )rL   z=Multi-headed attention from 'Attention Is All You Need' paperNr'   rH   c                 P   t         |           || _        || _        t	        |d|j
                  |j                  z        | _        |j                  |j                  z  | _	        | j                  dz  | _
        |j                  | _        d| _        t        j                  |j
                  |j                  | j                  z  |j                        | _        t        j                  |j
                  |j                  | j                  z  |j                        | _        t        j                  |j
                  |j                  | j                  z  |j                        | _        t        j                  |j                  | j                  z  |j
                  d      | _        y )Nrn   g      Tr#   F)r%   r&   r'   rH   getattrr*   num_attention_headsrn   rl   r{   rt   attention_dropout	is_causalr(   r)   attention_biasq_projk_projv_projo_projrU   s      r3   r&   zGlm4Attention.__init__   sD   "
F4F4F&JdJd4de$*$>$>&B\B\$\!}}d*!'!9!9ii : :T]] JQWQfQf
 ii : :T]] JQWQfQf
 ii : :T]] JQWQfQf
 ii : :T]] JFL^L^ejkr4   r5   r[   rV   rX   rZ   r\   r6   c                    |j                   d d }g |d| j                  }| j                  |      j                  |      j	                  dd      }	| j                  |      j                  |      j	                  dd      }
| j                  |      j                  |      j	                  dd      }|\  }}t        |	|
||      \  }	}
|'|||d}|j                  |
|| j                  |      \  }
}t        j                  | j                  j                  t              } || |	|
||f| j                  sdn| j                   | j"                  d|\  }} |j$                  g |d j'                         }| j)                  |      }||fS )Nr8   r   r"   )r   r   rZ           )ru   rt   )rh   rn   r   viewr}   r   r   r   updaterH   r   get_interfacer'   _attn_implementationr   rz   r   rt   rj   r   r   )r1   r5   r[   rV   rX   rZ   r\   input_shapehidden_shapequery_statesr   r   r   r   cache_kwargsattention_interfacer   r   s                     r3   r>   zGlm4Attention.forward   s    $))#2.88b8$--8{{=166|DNNqRST[[/44\BLLQPQR
{{=166|DNNqRST&S#7jRUWZ#[ j&#&snUL'6'='=j,X\XfXfht'u$J(?(M(MKK,,.E)
 %8	%
  $}}C$2H2HLL	%
 	%
!\ *k));;;;FFHkk+.L((r4   N)NNNN)r?   r@   rA   __doc__r   ra   r&   rB   rb   re   r   rc   r   r   r>   rD   rE   s   @r3   rL   rL      s    Glz lcDj l0 IM.2(,26))||)) #5<<#=>E)) t+	))
 )) ((4/)) +,)) 
u||U\\)	*))r4   rL   c                        e Zd ZU ej                  ed<   ddef fdZe	 	 	 ddedz  de	d   de
dz  ded	ef   fd
       Z ej                         ed               Z xZS )Glm4RotaryEmbeddinginv_freqNr'   c                    t         |           |j                  | _        |j                  | _        || _        | j
                  j                  d   | _        | j                  }| j                  dk7  rt        | j                     } || j
                  |      \  }| _
        | j                  d|d       | j                  d|j                         d       y )N	rope_typedefaultr   F)
persistentoriginal_inv_freq)r%   r&   max_position_embeddingsmax_seq_len_cachedoriginal_max_seq_lenr'   rope_parametersr   compute_default_rope_parametersr   attention_scalingregister_bufferclone)r1   r'   devicerope_init_fnr   r2   s        r3   r&   zGlm4RotaryEmbedding.__init__  s    "("@"@$*$B$B!44[A!%!E!E>>Y&.t~~>L+7V+L($(ZeD0(..2BuUr4   r   ztorch.deviceseq_lenr6   ztorch.Tensorc                 n   | j                   d   }| j                   j                  dd      }t        | dd      xs | j                  | j                  z  }t        ||z        }d}d|t        j                  d|dt        j                        j                  |t        j                  	      |z  z  z  }||fS )
a  
        Computes the inverse frequencies according to the original RoPE implementation
        Args:
            config ([`~transformers.PreTrainedConfig`]):
                The model configuration.
            device (`torch.device`):
                The device to use for initialization of the inverse frequencies.
            seq_len (`int`, *optional*):
                The current sequence length. Unused for this type of RoPE.
        Returns:
            Tuple of (`torch.Tensor`, `float`), containing the inverse frequencies for the RoPE embeddings and the
            post-processing scaling factor applied to the computed cos/sin (unused in this type of RoPE).
        
rope_thetapartial_rotary_factorg      ?rn   Nr   r"   rx   )r   rx   )r   getr   r*   r   ra   rB   arangeint64r   float)	r'   r   r   baser   rn   r:   attention_factorr   s	            r3   r   z3Glm4RotaryEmbedding.compute_default_rope_parameters  s    & %%l3 & 6 6 : :;RTW X6:t4h8J8JfNhNh8h(223 U\\!S!5;;?BB&X]XcXcBdgjjk
 )))r4   c                 N   | j                   d d d d f   j                         j                  |j                  d   dd      j	                  |j
                        }|d d d d d f   j                         }t        |j
                  j                  t              r/|j
                  j                  dk7  r|j
                  j                  nd}t        |d      5  |j                         |j                         z  j                  dd      }t        j                  ||fd	      }|j                         | j                  z  }|j                         | j                  z  }	d d d        j	                  |j                   
      	j	                  |j                   
      fS # 1 sw Y   AxY w)Nr   r8   r   mpscpuF)device_typeenabledr"   r9   r   )r   r   ri   rh   r   r   
isinstancetypestrr   r}   rB   r   r   r   r   rx   )
r1   r   rW   inv_freq_expandedposition_ids_expandedr   freqsembr   r   s
             r3   r>   zGlm4RotaryEmbedding.forward?  sR    !MM$4-8>>@GGHZHZ[\H]_acdehhijiqiqr ,QaZ 8 > > @'1!((--'E!((--[`J`ahhmmfkUC 	5&,,.1F1L1L1NNYYZ[]^_E))UEN3C'')d444C'')d444C		5 vvAGGv$cff177f&;;;	5 	5s   BFF$r   )NNN)r?   r@   rA   rB   rb   __annotations__r   r&   staticmethodr   ra   re   r   r   no_gradr   r>   rD   rE   s   @r3   r   r     s    llVz V  $(+/"*T!*(* t* 
~u$	%	* *> U]]_<  <r4   r   RMSNormc                   ,     e Zd Zd fd	Zd Zd Z xZS )rO   c                     t         |           t        j                  t	        j
                  |            | _        || _        y)z:
        Glm4RMSNorm is equivalent to T5LayerNorm
        N)r%   r&   r(   	ParameterrB   onesweightvariance_epsilon)r1   r*   rK   r2   s      r3   r&   zGlm4RMSNorm.__init__Q  s1     	ll5::k#:; #r4   c                 "   |j                   }|j                  t        j                        }|j	                  d      j                  dd      }|t        j                  || j                  z         z  }| j                  |j                  |      z  S )Nr"   r8   T)keepdim)	rx   r   rB   r   powmeanrsqrtr   r   )r1   r5   input_dtypevariances       r3   r>   zGlm4RMSNorm.forwardY  sy    #))%((7 $$Q',,R,>%Ht?T?T4T(UU{{]--k:::r4   c                 ^    t        | j                  j                         d| j                   S )Nz, eps=)re   r   rh   r   )r1   s    r3   
extra_reprzGlm4RMSNorm.extra_repr`  s*    ))*+6$2G2G1HIIr4   )gư>)r?   r@   rA   r&   r>   r   rD   rE   s   @r3   rO   rO   O  s    $;Jr4   rO   c                   J    e Zd ZU eed<   dZdZdgZdgZdZ	dZ
dZdZdZeedZy)Glm4PreTrainedModelr'   modelTrG   rX   )r5   
attentionsN)r?   r@   rA   r   r   base_model_prefixsupports_gradient_checkpointing_no_split_modules_skip_keys_device_placement_supports_flash_attn_supports_sdpa_supports_flex_attn_can_compile_fullgraph_supports_attention_backendrG   rL   _can_record_outputsr^   r4   r3   r   r   d  sQ    &*#+,#4"5N!"&)#r4   r   c                       e Zd Zdef fdZee	 	 	 	 	 	 	 ddej                  dz  dej                  dz  dej                  dz  de
dz  dej                  dz  d	ej                  dz  d
edz  dee   defd              Z xZS )	Glm4Modelr'   c           	         t         |   |       |j                  | _        |j                  | _        t        j                  |j                  |j                  | j                        | _        t        j                  t        |j                        D cg c]  }t        ||       c}      | _        t        |j                  |j                        | _        t#        |      | _        d| _        | j)                          y c c}w )NrJ   r'   F)r%   r&   pad_token_idpadding_idx
vocab_sizer(   	Embeddingr*   embed_tokens
ModuleListrangenum_hidden_layersrG   layersrO   rP   normr   
rotary_embgradient_checkpointing	post_initrU   s      r3   r&   zGlm4Model.__init__y  s     !.. ++LL):):F<N<NPTP`P`ammBGH`H`BabYfi0b
   2 28K8KL	-V<&+# 	 cs   DN	input_idsrV   rW   rX   inputs_embedsrZ   rY   r\   r6   c                 D   |d u |d uz  rt        d      || j                  |      }|r|t        | j                        }|E||j	                         nd}	t        j                  |j                  d   |j                        |	z   }||j                  d      }t        | j                  |||||      }
|}| j                  ||      }| j                  d | j                  j                   D ]  } ||f|
|||||d|} | j                  |      }t        ||	      S )
Nz:You must specify exactly one of input_ids or inputs_embedsr  r   r   )r   )r'   input_embedsrV   rZ   rX   rW   )rW   )rV   r[   rW   rX   rY   rZ   )last_hidden_staterX   )
ValueErrorr  r   r'   get_seq_lengthrB   r   rh   r   r   r   r  r  r  r  r   )r1   r  rV   rW   rX   r  rZ   rY   r\   past_seen_tokensr   r5   r[   decoder_layers                 r3   r>   zGlm4Model.forward  s]    -t";<YZZ *.*;*;I*FM0*$++>O!CRC^==?de]003M<P<PQTdd  )33A6L(;;&))+%
 &"oom,oW![[)H4;;+H+HI 
	M)	*$7) /#-	 	M
	 		-0&++
 	
r4   )NNNNNNN)r?   r@   rA   r   r&   r   r   rB   rc   rb   r   rC   rd   r   r   r   r>   rD   rE   s   @r3   r  r  w  s    z    .2.204(,2626!%9
##d*9
 t+9
 &&-	9

 9
 ((4/9
 ((4/9
 $;9
 +,9
 
!9
  9
r4   r  c                   h    e Zd ZddiZddiZddgdgfiZ fdZee	 	 	 	 	 	 	 	 	 dd	e	j                  dz  d
e	j                  dz  de	j                  dz  dedz  de	j                  dz  de	j                  dz  dedz  de	j                  dz  dee	j                  z  dee   deez  fd              Z xZS )Glm4ForCausalLMzlm_head.weightzmodel.embed_tokens.weightlm_headcolwise_gather_outputr5   logitsc                     t         |   |       t        |      | _        |j                  | _        t        j                  |j                  |j                  d      | _        | j                          y )NFr#   )
r%   r&   r  r   r	  r(   r)   r*   r  r  r0   s     r3   r&   zGlm4ForCausalLM.__init__  sU     v&
 ++yy!3!3V5F5FUS 	r4   Nr  rV   rW   rX   r  labelsrY   rZ   logits_to_keepr\   r6   c
                 z    | j                   d|||||||d|
}|j                  }t        |	t              rt	        |	 d      n|	}| j                  |dd|ddf         }d}|* | j                  d||| j                  j                  d|
}t        |||j                  |j                  |j                        S )ah  
        labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
            Labels for computing the masked language modeling loss. Indices should either be in `[0, ...,
            config.vocab_size]` or -100 (see `input_ids` docstring). Tokens with indices set to `-100` are ignored
            (masked), the loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]`.

        Example:

        ```python
        >>> from transformers import AutoTokenizer, Glm4ForCausalLM

        >>> model = Glm4ForCausalLM.from_pretrained("THUDM/GLM-4-9B-0414")
        >>> tokenizer = AutoTokenizer.from_pretrained("THUDM/GLM-4-9B-0414")

        >>> prompt = "Hey, are you conscious? Can you talk to me?"
        >>> inputs = tokenizer(prompt, return_tensors="pt")

        >>> # Generate
        >>> generate_ids = model.generate(inputs.input_ids, max_length=30)
        >>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
        "Hey, are you conscious? Can you talk to me?\nI'm not conscious, but I can talk to you."
        ```)r  rV   rW   rX   r  rY   rZ   N)r!  r#  r	  )lossr!  rX   r5   r   r^   )r   r  r   ra   slicer  loss_functionr'   r	  r   rX   r5   r   )r1   r  rV   rW   rX   r  r#  rY   rZ   r$  r\   outputsr5   slice_indicesr!  r&  s                   r3   r>   zGlm4ForCausalLM.forward  s    J ,64:: 	,
)%+')	,
 	,
  118B>SV8W~ot4]kmA}a,?@A%4%%pVFt{{OeOepiopD%#33!//))
 	
r4   )	NNNNNNNNr   )r?   r@   rA   _tied_weights_keys_tp_plan_pp_planr&   r   r   rB   rc   rb   r   rC   rd   ra   r   r   re   r   r>   rD   rE   s   @r3   r  r    s5   *,GH23H_-z:;H  .2.204(,26*.!%26-.=
##d*=
 t+=
 &&-	=

 =
 ((4/=
   4'=
 $;=
 ((4/=
 ell*=
 +,=
 
'	'=
  =
r4   r  c                       e Zd Zy)Glm4ForSequenceClassificationNr?   r@   rA   r^   r4   r3   r/  r/        r4   r/  c                       e Zd Zy)Glm4ForTokenClassificationNr0  r^   r4   r3   r3  r3    r1  r4   r3  )r   r  r  r/  r3  )r   )r   )@collections.abcr   typingr   rB   torch.nnr(   activationsr   cache_utilsr   r   
generationr	   integrationsr
   r   masking_utilsr   modeling_flash_attention_utilsr   modeling_layersr   r   r   modeling_outputsr   r   modeling_rope_utilsr   r   modeling_utilsr   r   processing_utilsr   utilsr   r   r   utils.genericr   r   configuration_glm4r   Moduler    rG   rb   ra   ro   r   r   r   r   rL   r   rO   r   r  r  r/  r3  __all__r^   r4   r3   <module>rG     s  , %    ! . ) L / B 
 P K F & I I ? *)bii )$-1 -`	UU\\ 	U# 	U%,, 	U& %II%<<% 
% <<	%
 LL4'% % % '(%46%P )*A)BII A) +A)H@<")) @<F Y'J")) J (J( /  $ L
# L
 L
^ M
)? M
 M
`	$DFY 		!>@S 	r4   