
    %	&h                        d dl mZ d dlZd dlmZ d dlZddlmZ ddlm	Z	m
Z
mZmZ ddlmZ dd	lmZ  ej"                  e      Zd
Z G d de      Zd ZddZ G d de	      Z G d de
      Z G d de      Z G d de      Zg dZy)    )OptionalN   )logging   )LlamaAttentionLlamaForCausalLMLlamaForSequenceClassificationLlamaForTokenClassification)Phi3MLP   )	GlmConfigzTHUDM/glm-4-9bc                       e Zd Zy)GlmMLPN__name__
__module____qualname__     y/var/www/pru.catia.catastroantioquia-mas.com/valormas/lib/python3.12/site-packages/transformers/models/glm/modular_glm.pyr   r   &       r   r   c                 |    | ddddf   }| ddddf   }t        j                  | |fd      j                  d      S )	z*Rotates half the hidden dims of the input..r   Nr   r   dim)torchstackflatten)xx1x2s      r   rotate_halfr#   *   sJ    	
319B	
319B;;Ryb)11"55r   c                    |j                  |      }|j                  |      }|dd|j                  d   dz  f   j                  dd      }|dd|j                  d   dz  f   j                  dd      }|j                  d   }| dd|f   | d|df   }}|dd|f   |d|df   }
}	||z  t        |      |z  z   }|	|z  t        |	      |z  z   }t	        j
                  ||gd      }t	        j
                  ||
gd      }||fS )a  Applies Rotary Position Embedding to the query and key tensors.

    Args:
        q (`torch.Tensor`): The query tensor.
        k (`torch.Tensor`): The key tensor.
        cos (`torch.Tensor`): The cosine part of the rotary embedding.
        sin (`torch.Tensor`): The sine part of the rotary embedding.
        position_ids (`torch.Tensor`, *optional*):
            Deprecated and unused.
        unsqueeze_dim (`int`, *optional*, defaults to 1):
            The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and
            sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note
            that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and
            k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes
            cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have
            the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2.
    Returns:
        `tuple(torch.Tensor)` comprising of the query and key tensors rotated using the Rotary Position Embedding.
    .Nr   r   r   )	unsqueezeshaperepeat_interleaver#   r   cat)qkcossinposition_idsunsqueeze_dim
rotary_dimq_rotq_passk_rotk_passq_embedk_embeds                r   apply_rotary_pos_embr6   1   sD   ( --
&C
--
&C c'SYYr]a'''
(
:
:1"
:
EC
c'SYYr]a'''
(
:
:1"
:
EC 2Jc;J;&'3
+;)<6Ec;J;&'3
+;)<6E s{{51C78Gs{{51C78G ii&)r2Gii&)r2GGr   c                   0     e Zd Zddedee   f fdZ xZS )GlmAttentionconfig	layer_idxc                     t         |   ||       t        j                  |j                  | j
                  z  |j                  d      | _        y )NF)bias)super__init__nnLinearnum_attention_headshead_dimhidden_sizeo_proj)selfr9   r:   	__class__s      r   r>   zGlmAttention.__init__\   s<    +ii : :T]] JFL^L^ejkr   )N)r   r   r   r   r   intr>   __classcell__)rF   s   @r   r8   r8   [   s#    ly lXc] l lr   r8   c                       e Zd Zy)GlmForCausalLMNr   r   r   r   rJ   rJ   a   r   r   rJ   c                       e Zd Zy)GlmForSequenceClassificationNr   r   r   r   rL   rL   e   r   r   rL   c                       e Zd Zy)GlmForTokenClassificationNr   r   r   r   rN   rN   i   r   r   rN   )GlmPreTrainedModelGlmModelrJ   rL   rN   )Nr   )typingr   r   torch.nnr?   torch.utils.checkpointutilsr   llama.modeling_llamar   r   r	   r
   phi3.modeling_phi3r   configuration_glmr   
get_loggerr   logger_CHECKPOINT_FOR_DOCr   r#   r6   r8   rJ   rL   rN   __all__r   r   r   <module>r\      s           ) ( 
		H	%& 	W 	6'Tl> l	% 		#A 		 ; 	r   