
    %	&h                     t   d Z ddlZddlmZmZmZmZmZm	Z	m
Z
 ddlZddlZddlmZ ddlmZmZmZ ddlmZ ddlmZmZmZmZ dd	lmZmZ dd
lmZmZ ddlm Z m!Z!m"Z"m#Z#m$Z$m%Z% ddl&m'Z' ddl(m)Z)  e#jT                  e+      Z,dZ-dZ.g dZ/dZ0dZ1 G d dejd                        Z3 G d dejd                        Z4	 dFdejd                  dejj                  dejj                  dejj                  deejj                     de6de6fdZ7 G d  d!ejd                        Z8 G d" d#ejd                        Z9 G d$ d%ejd                        Z: G d& d'ejd                        Z;dGd(ejj                  d)e6d*e<d+ejj                  fd,Z= G d- d.ejd                        Z> G d/ d0ejd                        Z? G d1 d2ejd                        Z@ G d3 d4ejd                        ZA G d5 d6ejd                        ZB G d7 d8e      ZCd9ZDd:ZEd;ZF e!d<eD       G d= d>eC             ZG e!d?eD       G d@ dAeC             ZH e!dBeD       G dC dDeCe'             ZIg dEZJy)HzPyTorch DINOv2 model.    N)CallableDictListOptionalSetTupleUnion)nn)BCEWithLogitsLossCrossEntropyLossMSELoss   )ACT2FN)BackboneOutputBaseModelOutputBaseModelOutputWithPoolingImageClassifierOutput)ALL_ATTENTION_FUNCTIONSPreTrainedModel) find_pruneable_heads_and_indicesprune_linear_layer)add_code_sample_docstringsadd_start_docstrings%add_start_docstrings_to_model_forwardloggingreplace_return_docstrings	torch_int)BackboneMixin   )Dinov2Configr    zfacebook/dinov2-base)r   i  i   z(facebook/dinov2-small-imagenet1k-1-layerztabby, tabby catc                        e Zd ZdZdeddf fdZdej                  dededej                  fd	Z	dd
ej                  de
ej                     dej                  fdZ xZS )Dinov2EmbeddingszM
    Construct the CLS token, mask token, position and patch embeddings.
    configreturnNc                 z   t         |           t        j                  t	        j
                  dd|j                              | _        |j                  r8t        j                  t	        j                  d|j                              | _
        t        |      | _        | j                  j                  }t        j                  t	        j
                  d|dz   |j                              | _        t        j                  |j                         | _        |j$                  | _        |j                  | _        || _        y )Nr   )super__init__r
   	Parametertorchrandnhidden_size	cls_tokenuse_mask_tokenzeros
mask_tokenDinov2PatchEmbeddingspatch_embeddingsnum_patchesposition_embeddingsDropouthidden_dropout_probdropout
patch_sizer#   )selfr#   r2   	__class__s      /var/www/pru.catia.catastroantioquia-mas.com/valormas/lib/python3.12/site-packages/transformers/models/dinov2/modeling_dinov2.pyr'   zDinov2Embeddings.__init__A   s    ekk!Q8J8J&KL   ll5;;q&:L:L+MNDO 5f =++77#%<<A{QPVPbPb0c#d zz&"<"<= ++$33    
embeddingsheightwidthc                    |j                   d   dz
  }| j                  j                   d   dz
  }t        j                  j	                         s||k(  r||k(  r| j                  S | j                  ddddf   }| j                  ddddf   }|j                   d   }|| j
                  z  }	|| j
                  z  }
t        |dz        }|j                  d|||      }|j                  dddd      }|j                  }t        j                  j                  |j                  t        j                        |	|
fdd	
      j                  |      }|j                  dddd      j                  dd|      }t        j                   ||fd      S )a-  
        This method allows to interpolate the pre-trained position encodings, to be able to use the model on higher resolution
        images. This method is also adapted to support torch.jit tracing and interpolation at torch.float32 precision.

        Adapted from:
        - https://github.com/facebookresearch/dino/blob/de9ee3df6cf39fac952ab558447af1fa1365362a/vision_transformer.py#L174-L194, and
        - https://github.com/facebookresearch/dinov2/blob/e1277af2ba9496fbadf7aec6eba56e8d882d1e35/dinov2/models/vision_transformer.py#L179-L211
        r   Ng      ?r   r      bicubicF)sizemodealign_cornersdtypedim)shaper3   r)   jit
is_tracingr7   r   reshapepermuterG   r
   
functionalinterpolatetofloat32viewcat)r8   r<   r=   r>   r2   num_positionsclass_pos_embedpatch_pos_embedrI   
new_height	new_widthsqrt_num_positionstarget_dtypes                r:   interpolate_pos_encodingz)Dinov2Embeddings.interpolate_pos_encodingO   s    !&&q)A-0066q9A= yy##%+*F6UZ?+++221bqb59221ab59r"t.
T__,	&}c'9:)11!5GI[]`a)11!Q1=&,,--33u}}-i(	 4 

 "<"
  	 *11!Q1=BB1b#Nyy/?;CCr;   pixel_valuesbool_masked_posc                 D   |j                   \  }}}}| j                  j                  j                  j                  }| j                  |j                  |            }|d| j                  rXt        j                  |j                  d      | j                  j                  |j                        j                  d      |      }| j                  j                  |dd      }	t        j                  |	|fd      }|| j                  |||      z   }| j                  |      }|S )NrF   r@   r   r   rH   )rJ   r1   
projectionweightrG   rQ   r-   r)   where	unsqueezer/   r,   expandrT   r\   r6   )
r8   r]   r^   
batch_size_r=   r>   r[   r<   
cls_tokenss
             r:   forwardzDinov2Embeddings.forwardw   s    '3'9'9$
Avu,,77>>DD**<???+NO
&4+>+>))"-t/A/A*BRBR/S/]/]^_/`blJ
 ^^**:r2>
YY
J7Q?
  $"?"?
FTY"ZZ
\\*-
r;   N)__name__
__module____qualname____doc__r    r'   r)   Tensorintr\   r   rh   __classcell__r9   s   @r:   r"   r"   <   s|    |  &D5<< &D &DUX &D]b]i]i &DPELL 8ELLCY ejeqeq r;   r"   c                   Z     e Zd ZdZ fdZdej                  dej                  fdZ xZS )r0   z
    This class turns `pixel_values` of shape `(batch_size, num_channels, height, width)` into the initial
    `hidden_states` (patch embeddings) of shape `(batch_size, seq_length, hidden_size)` to be consumed by a
    Transformer.
    c                    t         |           |j                  |j                  }}|j                  |j
                  }}t        |t        j                  j                        r|n||f}t        |t        j                  j                        r|n||f}|d   |d   z  |d   |d   z  z  }|| _        || _        || _        || _
        t        j                  ||||      | _        y )Nr   r   )kernel_sizestride)r&   r'   
image_sizer7   num_channelsr+   
isinstancecollectionsabcIterabler2   r
   Conv2dr`   )r8   r#   rv   r7   rw   r+   r2   r9   s          r:   r'   zDinov2PatchEmbeddings.__init__   s    !'!2!2F4E4EJ
$*$7$79K9Kk#-j+//:R:R#SZZdfpYq
#-j+//:R:R#SZZdfpYq
!!}
15*Q-:VW=:XY$$(&))L+:^hir;   r]   r$   c                     |j                   d   }|| j                  k7  rt        d| j                   d| d      | j                  |      j	                  d      j                  dd      }|S )Nr   zoMake sure that the channel dimension of the pixel values match with the one set in the configuration. Expected z	 but got .rA   )rJ   rw   
ValueErrorr`   flatten	transpose)r8   r]   rw   r<   s       r:   rh   zDinov2PatchEmbeddings.forward   sz    #))!,4,,,!../yaI  __\2::1=GG1M
r;   )	rj   rk   rl   rm   r'   r)   rn   rh   rp   rq   s   @r:   r0   r0      s)    jELL U\\ r;   r0   modulequerykeyvalueattention_maskscalingr6   c                    t        j                  ||j                  dd            |z  }t        j                  j                  |dt         j                        j                  |j                        }t        j                  j                  ||| j                        }|||z  }t        j                  ||      }	|	j                  dd      j                         }	|	|fS )Nr@   )rI   rG   )ptrainingr   rA   )r)   matmulr   r
   rO   softmaxrR   rQ   rG   r6   r   
contiguous)
r   r   r   r   r   r   r6   kwargsattn_weightsattn_outputs
             r:   eager_attention_forwardr      s     <<s}}R'<=GL ==((2U]](SVVW\WbWbcL ==((6??([L !#n4,,|U3K''1-88:K$$r;   c            
            e Zd Zdeddf fdZdej                  dej                  fdZ	 d
deej                     de	de
eej                  ej                  f   eej                     f   fd	Z xZS )Dinov2SelfAttentionr#   r$   Nc                 2   t         |           |j                  |j                  z  dk7  r2t	        |d      s&t        d|j                   d|j                   d      || _        |j                  | _        t        |j                  |j                  z        | _        | j                  | j                  z  | _	        |j                  | _        | j                  dz  | _        d| _        t        j                  |j                  | j                  |j                         | _        t        j                  |j                  | j                  |j                         | _        t        j                  |j                  | j                  |j                         | _        y )	Nr   embedding_sizezThe hidden size z4 is not a multiple of the number of attention heads r~   g      Fbias)r&   r'   r+   num_attention_headshasattrr   r#   ro   attention_head_sizeall_head_sizeattention_probs_dropout_probdropout_probr   	is_causalr
   Linearqkv_biasr   r   r   r8   r#   r9   s     r:   r'   zDinov2SelfAttention.__init__   sF    : ::a?PVXhHi"6#5#5"6 7334A7 
 #)#=#= #&v'9'9F<V<V'V#W !558P8PP"??//5YYv1143E3EFOO\
99V//1C1C&//ZYYv1143E3EFOO\
r;   xc                     |j                         d d | j                  | j                  fz   }|j                  |      }|j	                  dddd      S )Nr@   r   rA   r   r   )rC   r   r   rS   rN   )r8   r   new_x_shapes      r:   transpose_for_scoresz(Dinov2SelfAttention.transpose_for_scores   sL    ffhsmt'?'?AYAY&ZZFF;yyAq!$$r;   	head_maskoutput_attentionsc           
         | j                  | j                  |            }| j                  | j                  |            }| j                  | j                  |            }t        }| j
                  j                  dk7  rN| j
                  j                  dk(  r|rt        j                  d       nt        | j
                  j                     } || ||||| j                  | j                  | j                  sdn| j                        \  }}	|j                         d d | j                  fz   }
|j!                  |
      }|r||	f}|S |f}|S )Neagersdpaz`torch.nn.functional.scaled_dot_product_attention` does not support `output_attentions=True`. Falling back to eager attention. This warning can be removed using the argument `attn_implementation="eager"` when loading the model.        )r   r   r6   r   )r   r   r   r   r   r#   _attn_implementationloggerwarning_oncer   r   r   r   r   rC   r   rM   )r8   hidden_statesr   r   	key_layervalue_layerquery_layerattention_interfacecontext_layerattention_probsnew_context_layer_shapeoutputss               r:   rh   zDinov2SelfAttention.forward   s=    --dhh}.EF	//

=0IJ//

=0IJ(?;;++w6{{//69>O##L
 '>dkk>^>^&_#)<nnLL#}}C$2C2C	*
& #0"4"4"6s";t?Q?Q>S"S%--.EF6G=/2 O\M]r;   NF)rj   rk   rl   r    r'   r)   rn   r   r   boolr	   r   rh   rp   rq   s   @r:   r   r      s    ]| ] ](%ell %u|| % bg!(0(>!Z^!	uU\\5<</0%2EE	F!r;   r   c                   |     e Zd ZdZdeddf fdZdej                  dej                  dej                  fdZ xZ	S )	Dinov2SelfOutputz
    The residual connection is defined in Dinov2Layer instead of here (as is the case with other models), due to the
    layernorm applied before each block.
    r#   r$   Nc                     t         |           t        j                  |j                  |j                        | _        t        j                  |j                        | _        y ri   )	r&   r'   r
   r   r+   denser4   r5   r6   r   s     r:   r'   zDinov2SelfOutput.__init__  sB    YYv1163E3EF
zz&"<"<=r;   r   input_tensorc                 J    | j                  |      }| j                  |      }|S ri   )r   r6   )r8   r   r   s      r:   rh   zDinov2SelfOutput.forward  s$    

=1]3r;   )
rj   rk   rl   rm   r    r'   r)   rn   rh   rp   rq   s   @r:   r   r     sD    
>| > >
U\\  RWR^R^ r;   r   c                        e Zd Zdeddf fdZdee   ddfdZ	 	 ddej                  de
ej                     d	edeeej                  ej                  f   eej                     f   fd
Z xZS )Dinov2Attentionr#   r$   Nc                     t         |           t        |      | _        t	        |      | _        t               | _        y ri   )r&   r'   r   	attentionr   outputsetpruned_headsr   s     r:   r'   zDinov2Attention.__init__!  s0    ,V4&v.Er;   headsc                 >   t        |      dk(  ry t        || j                  j                  | j                  j                  | j
                        \  }}t        | j                  j                  |      | j                  _        t        | j                  j                  |      | j                  _        t        | j                  j                  |      | j                  _	        t        | j                  j                  |d      | j                  _        | j                  j                  t        |      z
  | j                  _        | j                  j                  | j                  j                  z  | j                  _        | j
                  j                  |      | _        y )Nr   r   rH   )lenr   r   r   r   r   r   r   r   r   r   r   r   union)r8   r   indexs      r:   prune_headszDinov2Attention.prune_heads'  s   u:?74>>55t~~7Y7Y[_[l[l
u
  2$..2F2FN/0B0BEJ1$..2F2FN.t{{/@/@%QO .2^^-O-ORUV[R\-\*'+~~'I'IDNNLnLn'n$ --33E:r;   r   r   r   c                 h    | j                  |||      }| j                  |d   |      }|f|dd  z   }|S )Nr   r   )r   r   )r8   r   r   r   self_outputsattention_outputr   s          r:   rh   zDinov2Attention.forward9  sE     ~~mY@QR;;|AF#%QR(88r;   r   )rj   rk   rl   r    r'   r   ro   r   r)   rn   r   r   r	   r   rh   rp   rq   s   @r:   r   r      s    "| " ";S ;d ;* -1"'	|| ELL)  	
 
uU\\5<</0%2EE	Fr;   r   c                   X     e Zd Zd fdZdej
                  dej
                  fdZ xZS )Dinov2LayerScaler$   c                     t         |           t        j                  |j                  t        j                  |j                        z        | _        y ri   )	r&   r'   r
   r(   layerscale_valuer)   onesr+   lambda1r   s     r:   r'   zDinov2LayerScale.__init__H  s8    ||F$;$;ejjI[I[>\$\]r;   hidden_statec                      || j                   z  S ri   )r   r8   r   s     r:   rh   zDinov2LayerScale.forwardL  s    dll**r;   r$   Nrj   rk   rl   r'   r)   rn   rh   rp   rq   s   @r:   r   r   G  s$    ^+ELL +U\\ +r;   r   input	drop_probr   r$   c                    |dk(  s|s| S d|z
  }| j                   d   fd| j                  dz
  z  z   }|t        j                  || j                  | j
                        z   }|j                          | j                  |      |z  }|S )aF  
    Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks).

    Comment by Ross Wightman: This is the same as the DropConnect impl I created for EfficientNet, etc networks,
    however, the original name is misleading as 'Drop Connect' is a different form of dropout in a separate paper...
    See discussion: https://github.com/tensorflow/tpu/issues/494#issuecomment-532968956 ... I've opted for changing the
    layer and argument names to 'drop path' rather than mix DropConnect as a layer name and use 'survival rate' as the
    argument.
    r   r   r   )r   )rG   device)rJ   ndimr)   randrG   r   floor_div)r   r   r   	keep_probrJ   random_tensorr   s          r:   	drop_pathr   Q  s     CxII[[^

Q 77E

5ELL YYMYYy!M1FMr;   c                   x     e Zd ZdZd	dee   ddf fdZdej                  dej                  fdZ	de
fdZ xZS )
Dinov2DropPathzXDrop paths (Stochastic Depth) per sample (when applied in main path of residual blocks).Nr   r$   c                 0    t         |           || _        y ri   )r&   r'   r   )r8   r   r9   s     r:   r'   zDinov2DropPath.__init__i  s    "r;   r   c                 D    t        || j                  | j                        S ri   )r   r   r   )r8   r   s     r:   rh   zDinov2DropPath.forwardm  s    FFr;   c                 8    dj                  | j                        S )Nzp={})formatr   r8   s    r:   
extra_reprzDinov2DropPath.extra_reprp  s    }}T^^,,r;   ri   )rj   rk   rl   rm   r   floatr'   r)   rn   rh   strr   rp   rq   s   @r:   r   r   f  sG    b#(5/ #T #GU\\ Gell G-C -r;   r   c                   X     e Zd Zd fdZdej
                  dej
                  fdZ xZS )	Dinov2MLPr$   c                 ~   t         |           |j                  x}}t        |j                  |j                  z        }t        j                  ||d      | _        t        |j                  t              rt        |j                     | _        n|j                  | _        t        j                  ||d      | _        y )NTr   )r&   r'   r+   ro   	mlp_ratior
   r   fc1rx   
hidden_actr   r   
activationfc2r8   r#   in_featuresout_featureshidden_featuresr9   s        r:   r'   zDinov2MLP.__init__u  s    %+%7%77lf0063C3CCD99[/Ef''-$V%6%67DO$//DO99_lFr;   r   c                 l    | j                  |      }| j                  |      }| j                  |      }|S ri   )r   r   r   r   s     r:   rh   zDinov2MLP.forward  s2    xx-|4xx-r;   r   r   rq   s   @r:   r   r   t  s$    	GELL U\\ r;   r   c                   X     e Zd Zd fdZdej
                  dej
                  fdZ xZS )Dinov2SwiGLUFFNr$   c                 0   t         |           |j                  x}}t        |j                  |j                  z        }t        |dz  dz        dz   dz  dz  }t        j                  |d|z  d      | _        t        j                  ||d      | _        y )NrA   r         Tr   )	r&   r'   r+   ro   r   r
   r   
weights_inweights_outr   s        r:   r'   zDinov2SwiGLUFFN.__init__  s    %+%7%77lf0063C3CCD2Q67!;AAE))K_1D4P99_lNr;   r   c                     | j                  |      }|j                  dd      \  }}t        j                  j	                  |      |z  }| j                  |      S )NrA   r@   rH   )r  chunkr
   rO   silur  )r8   r   x1x2hiddens        r:   rh   zDinov2SwiGLUFFN.forward  sS    |4##A2#.B##B'",''r;   r   r   rq   s   @r:   r   r     s$    O(ELL (U\\ (r;   r   c                        e Zd ZdZdeddf fdZ	 	 d
dej                  deej                     de	de
eej                  ej                  f   eej                     f   fd	Z xZS )Dinov2LayerzCThis corresponds to the Block class in the original implementation.r#   r$   Nc                    t         |           t        j                  |j                  |j
                        | _        t        |      | _        t        |      | _
        |j                  dkD  rt        |j                        nt        j                         | _        t        j                  |j                  |j
                        | _        |j                   rt#        |      | _        nt'        |      | _        t        |      | _        y )Nepsr   )r&   r'   r
   	LayerNormr+   layer_norm_epsnorm1r   r   r   layer_scale1drop_path_rater   Identityr   norm2use_swiglu_ffnr   mlpr   layer_scale2r   s     r:   r'   zDinov2Layer.__init__  s    \\&"4"4&:O:OP
(0,V4BHBWBWZ]B](=(=>cecncncp\\&"4"4&:O:OP
  &v.DH (DH,V4r;   r   r   r   c                 D   | j                  | j                  |      ||      }|d   }| j                  |      }|dd  }| j                  |      |z   }| j	                  |      }| j                  |      }| j                  |      }| j                  |      |z   }|f|z   }|S )N)r   r   r   )r   r  r  r   r  r  r  )r8   r   r   r   self_attention_outputsr   r   layer_outputs           r:   rh   zDinov2Layer.forward  s     "&JJ}%/ "0 "

 2!4,,-=>(, '78=H zz-0xx-((6 ~~l3mC/G+r;   r   )rj   rk   rl   rm   r    r'   r)   rn   r   r   r	   r   rh   rp   rq   s   @r:   r  r    s~    M5| 5 5& -1"'	|| ELL)  	
 
uU\\5<</0%2EE	Fr;   r  c                        e Zd Zdeddf fdZ	 	 	 	 ddej                  deej                     deded	ede	e
ef   fd
Z xZS )Dinov2Encoderr#   r$   Nc                     t         |           || _        t        j                  t        |j                        D cg c]  }t        |       c}      | _        d| _	        y c c}w r   )
r&   r'   r#   r
   
ModuleListrangenum_hidden_layersr  layergradient_checkpointingr8   r#   rf   r9   s      r:   r'   zDinov2Encoder.__init__  sN    ]]vG_G_A`#aAK$7#ab
&+# $bs   A#r   r   r   output_hidden_statesreturn_dictc                 t   |rdnd }|rdnd }t        | j                        D ]h  \  }}	|r||fz   }|||   nd }
| j                  r+| j                  r| j	                  |	j
                  ||
|      }n
 |	||
|      }|d   }|s`||d   fz   }j |r||fz   }|st        d |||fD              S t        |||      S )N r   r   c              3   &   K   | ]	  }||  y wri   r)  ).0vs     r:   	<genexpr>z(Dinov2Encoder.forward.<locals>.<genexpr>  s     mq_`_lms   )last_hidden_stater   
attentions)	enumerater#  r$  r   _gradient_checkpointing_func__call__tupler   )r8   r   r   r   r&  r'  all_hidden_statesall_self_attentionsilayer_modulelayer_head_masklayer_outputss               r:   rh   zDinov2Encoder.forward  s     #7BD$5b4(4 	POA|#$58H$H!.7.CilO**t}} $ A A ))!#%	! !-]OM^ _)!,M &9]1=M<O&O#'	P*   1]4D Dm]4EGZ$[mmm++*
 	
r;   )NFFT)rj   rk   rl   r    r'   r)   rn   r   r   r	   r3  r   rh   rp   rq   s   @r:   r  r    sz    ,| , , -1"'%* )
||)
 ELL))
  	)

 #)
 )
 
uo%	&)
r;   r  c                       e Zd ZdZeZdZdZdZdgZ	dZ
dZdeej                  ej                  ej                   f   ddfd	Zy)
Dinov2PreTrainedModelz
    An abstract class to handle weights initialization and a simple interface for downloading and loading pretrained
    models.
    dinov2r]   Tr   r   r$   Nc                 H   t        |t        j                  t        j                  f      rt        j                  j                  |j                  j                  j                  t        j                        d| j                  j                        j                  |j                  j                        |j                  _        |j                  %|j                  j                  j                          yyt        |t        j                         rJ|j                  j                  j                          |j                  j                  j#                  d       yt        |t$              rnt        j                  j                  |j&                  j                  j                  t        j                        d| j                  j                        j                  |j&                  j                        |j&                  _        t        j                  j                  |j(                  j                  j                  t        j                        d| j                  j                        j                  |j(                  j                        |j(                  _        | j                  j*                  r%|j,                  j                  j                          yyt        |t.              r:|j0                  j                  j#                  | j                  j2                         yy)zInitialize the weightsr   )meanstdNg      ?)rx   r
   r   r|   inittrunc_normal_ra   datarQ   r)   rR   r#   initializer_rangerG   r   zero_r  fill_r"   r3   r,   r-   r/   r   r   r   )r8   r   s     r:   _init_weightsz#Dinov2PreTrainedModel._init_weights  s!   fryy"))45 "$!6!6""%%emm43DKKDaDa "7 "b$$% MM {{&  &&( '-KK""$MM$$S) 01.0gg.C.C**//225==AKK11 /D / b++112	 &&+ %'GG$9$9  %%((7KK11 %: % b!!''(	 ! {{))!!&&,,. * 01NN%%dkk&B&BC 2r;   )rj   rk   rl   rm   r    config_classbase_model_prefixmain_input_namesupports_gradient_checkpointing_no_split_modules_supports_sdpa_supports_flash_attn_2r	   r
   r   r|   r  rF  r)  r;   r:   r;  r;    sb    
  L $O&*#*+N!DE"))RYY*L$M DRV Dr;   r;  aH  
    This model is a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass. Use it
    as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to general usage and
    behavior.

    Parameters:
        config ([`Dinov2Config`]): Model configuration class with all the parameters of the model.
            Initializing with a config file does not load the weights associated with the model, only the
            configuration. Check out the [`~PreTrainedModel.from_pretrained`] method to load the model weights.
a4  
    Args:
        pixel_values (`torch.FloatTensor` of shape `(batch_size, num_channels, height, width)`):
            Pixel values. Pixel values can be obtained using [`AutoImageProcessor`]. See
            [`BitImageProcessor.preprocess`] for details.

        bool_masked_pos (`torch.BoolTensor` of shape `(batch_size, sequence_length)`):
            Boolean masked positions. Indicates which patches are masked (1) and which aren't (0). Only relevant for
            pre-training.

        head_mask (`torch.FloatTensor` of shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*):
            Mask to nullify selected heads of the self-attention modules. Mask values selected in `[0, 1]`:

            - 1 indicates the head is **not masked**,
            - 0 indicates the head is **masked**.

        output_attentions (`bool`, *optional*):
            Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned
            tensors for more detail.
        output_hidden_states (`bool`, *optional*):
            Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for
            more detail.
        return_dict (`bool`, *optional*):
            Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple.
aM  
    Args:
        pixel_values (`torch.FloatTensor` of shape `(batch_size, num_channels, height, width)`):
            Pixel values. Pixel values can be obtained using [`AutoImageProcessor`]. See
            [`BitImageProcessor.preprocess`] for details.

        head_mask (`torch.FloatTensor` of shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*):
            Mask to nullify selected heads of the self-attention modules. Mask values selected in `[0, 1]`:

            - 1 indicates the head is **not masked**,
            - 0 indicates the head is **masked**.

        output_attentions (`bool`, *optional*):
            Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned
            tensors for more detail.
        output_hidden_states (`bool`, *optional*):
            Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for
            more detail.
        return_dict (`bool`, *optional*):
            Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple.
z`The bare DINOv2 Model transformer outputting raw hidden-states without any specific head on top.c                   "    e Zd Zdef fdZdefdZdeee	e   f   ddfdZ
 ee       eeeede	      	 	 	 	 	 	 dd
eej(                     deej(                     deej(                     dee   dee   dee   deeef   fd              Z xZS )Dinov2Modelr#   c                     t         |   |       || _        t        |      | _        t        |      | _        t        j                  |j                  |j                        | _        | j                          y )Nr  )r&   r'   r#   r"   r<   r  encoderr
   r  r+   r  	layernorm	post_initr   s     r:   r'   zDinov2Model.__init__n  sY     *62$V,f&8&8f>S>ST 	r;   r$   c                 .    | j                   j                  S ri   r<   r1   r   s    r:   get_input_embeddingsz Dinov2Model.get_input_embeddingsz      ///r;   heads_to_pruneNc                     |j                         D ]7  \  }}| j                  j                  |   j                  j	                  |       9 y)z
        Prunes heads of the model. heads_to_prune: dict of {layer_num: list of heads to prune in this layer} See base
        class PreTrainedModel
        N)itemsrQ  r#  r   r   )r8   rX  r#  r   s       r:   _prune_headszDinov2Model._prune_heads}  sE    
 +002 	CLE5LLu%//;;EB	Cr;   vision)
checkpointoutput_typerG  modalityexpected_outputr]   r^   r   r   r&  r'  c                    ||n| j                   j                  }||n| j                   j                  }||n| j                   j                  }|t	        d      | j                  || j                   j                        }| j                  ||      }| j                  |||||      }|d   }	| j                  |	      }	|	d d dd d f   }
|s|	|
f}||dd  z   S t        |	|
|j                  |j                        S )Nz You have to specify pixel_values)r^   r   r   r&  r'  r   r   )r.  pooler_outputr   r/  )r#   r   r&  use_return_dictr   get_head_maskr"  r<   rQ  rR  r   r   r/  )r8   r]   r^   r   r   r&  r'  embedding_outputencoder_outputssequence_outputpooled_outputhead_outputss               r:   rh   zDinov2Model.forward  s%   " 2C1N-TXT_T_TqTq$8$D $++JjJj 	 &1%<k$++B]B]?@@ &&y$++2O2OP	??<?Y,,/!5# ' 
 *!,..9'1a0+];L/!""555)-')77&11	
 	
r;   NNNNNN)rj   rk   rl   r    r'   r0   rV  r   ro   r   r[  r   DINOV2_BASE_INPUTS_DOCSTRINGr   _CHECKPOINT_FOR_DOCr   _CONFIG_FOR_DOC_EXPECTED_OUTPUT_SHAPEr   r)   rn   r   r	   r   rh   rp   rq   s   @r:   rO  rO  i  s    

| 
0&; 0C4T#Y+? CD C ++GH&.$. 0426,0,0/3&*/
u||,/
 "%,,//
 ELL)	/

 $D>/
 'tn/
 d^/
 
u00	1/
 I/
r;   rO  z
    Dinov2 Model transformer with an image classification head on top (a linear layer on top of the final hidden state
    of the [CLS] token) e.g. for ImageNet.
    c                        e Zd Zdeddf fdZ ee       eee	e
e      	 	 	 	 	 	 ddeej                     deej                     deej                     d	ee   d
ee   dee   deee	f   fd              Z xZS )Dinov2ForImageClassificationr#   r$   Nc                 0   t         |   |       |j                  | _        t        |      | _        |j                  dkD  r-t        j                  |j                  dz  |j                        nt        j                         | _	        | j                          y )Nr   rA   )r&   r'   
num_labelsrO  r<  r
   r   r+   r  
classifierrS  r   s     r:   r'   z%Dinov2ForImageClassification.__init__  sy      ++!&) EKDUDUXYDYBIIf((1,f.?.?@_a_j_j_l 	
 	r;   )r]  r^  rG  r`  r]   r   labelsr   r&  r'  c                    ||n| j                   j                  }| j                  |||||      }|d   }|dddf   }	|ddddf   }
t        j                  |	|
j                  d      gd      }| j                  |      }d}||j                  |j                        }| j                   j                  | j                  dk(  rd| j                   _	        nl| j                  dkD  rL|j                  t        j                  k(  s|j                  t        j                  k(  rd| j                   _	        nd| j                   _	        | j                   j                  dk(  rIt               }| j                  dk(  r& ||j                         |j                               }n |||      }n| j                   j                  dk(  r=t!               } ||j#                  d	| j                        |j#                  d	            }n,| j                   j                  dk(  rt%               } |||      }|s|f|d
d z   }||f|z   S |S t'        |||j(                  |j*                        S )a  
        labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*):
            Labels for computing the image classification/regression loss. Indices should be in `[0, ...,
            config.num_labels - 1]`. If `config.num_labels == 1` a regression loss is computed (Mean-Square loss), If
            `config.num_labels > 1` a classification loss is computed (Cross-Entropy).
        Nrb  r   r   rH   
regressionsingle_label_classificationmulti_label_classificationr@   rA   )losslogitsr   r/  )r#   rd  r<  r)   rT   r>  rt  rQ   r   problem_typers  rG   longro   r   squeezer   rS   r   r   r   r/  )r8   r]   r   ru  r   r&  r'  r   rh  r,   patch_tokenslinear_inputr{  rz  loss_fctr   s                   r:   rh   z$Dinov2ForImageClassification.forward  s!   , &1%<k$++B]B]++/!5#  
 "!*#AqD)	&q!"u-yy)\->->1->-E!FAN.YYv}}-F{{''/??a'/;DKK,__q(fllejj.HFLL\a\e\eLe/LDKK,/KDKK,{{''<7"9??a'#FNN$4fnn6FGD#FF3D))-JJ+-B @&++b/R))-II,./Y,F)-)9TGf$EvE$!//))	
 	
r;   rk  )rj   rk   rl   r    r'   r   DINOV2_INPUTS_DOCSTRINGr   _IMAGE_CLASS_CHECKPOINTr   rn  _IMAGE_CLASS_EXPECTED_OUTPUTr   r)   rn   r   r	   r3  rh   rp   rq   s   @r:   rq  rq    s    |   ++BC*)$4	 04,0)-,0/3&*D
u||,D
 ELL)D
 &	D

 $D>D
 'tnD
 d^D
 
u++	,D
 DD
r;   rq  zO
    Dinov2 backbone, to be used with frameworks like DETR and MaskFormer.
    c                        e Zd Z fdZdefdZ ee       ee	e
      	 	 	 d
dej                  dee   dee   dee   de	f
d	              Z xZS )Dinov2Backbonec                 v   t         |   |       t         | 	  |       t        |j                  dz         D cg c]  }|j
                   c}| _        t        |      | _        t        |      | _
        t        j                  |j
                  |j                        | _        | j                          y c c}w )Nr   r  )r&   r'   _init_backboner!  r"  r+   num_featuresr"   r<   r  rQ  r
   r  r  rR  rS  r%  s      r:   r'   zDinov2Backbone.__init__*  s     v&9>v?W?WZ[?[9\]AV//]*62$V,f&8&8f>S>ST 	 ^s   B6r$   c                 .    | j                   j                  S ri   rU  r   s    r:   rV  z#Dinov2Backbone.get_input_embeddings7  rW  r;   )r^  rG  r]   r&  r   r'  c                 b   ||n| j                   j                  }||n| j                   j                  }||n| j                   j                  }| j	                  |      }| j                  |d||      }|r|j                  n|d   }d}t        | j                  |      D ]  \  }	}
|	| j                  v s| j                   j                  r| j                  |
      }
| j                   j                  rn|
ddddf   }
|j                  \  }}}}| j                   j                  }|
j                  |||z  ||z  d      }
|
j!                  dddd	      j#                         }
||
fz  } |s|r|f|dd z   }|S |f|d	d z   }|S t%        ||r|j                  nd|r|j&                  
      S d
      S )a7  
        Returns:

        Examples:

        ```python
        >>> from transformers import AutoImageProcessor, AutoBackbone
        >>> import torch
        >>> from PIL import Image
        >>> import requests

        >>> url = "http://images.cocodataset.org/val2017/000000039769.jpg"
        >>> image = Image.open(requests.get(url, stream=True).raw)

        >>> processor = AutoImageProcessor.from_pretrained("facebook/dinov2-base")
        >>> model = AutoBackbone.from_pretrained(
        ...     "facebook/dinov2-base", out_features=["stage2", "stage5", "stage8", "stage11"]
        ... )

        >>> inputs = processor(image, return_tensors="pt")

        >>> outputs = model(**inputs)
        >>> feature_maps = outputs.feature_maps
        >>> list(feature_maps[-1].shape)
        [1, 768, 16, 16]
        ```NT)r&  r   r'  r   r)  r@   r   r   rA   )feature_mapsr   r/  )r#   rd  r&  r   r<   rQ  r   zipstage_namesr   apply_layernormrR  reshape_hidden_statesrJ   r7   rM   rN   r   r   r/  )r8   r]   r&  r   r'  rf  r   r   r  stager   re   rf   r=   r>   r7   r   s                    r:   rh   zDinov2Backbone.forward:  s   F &1%<k$++B]B]$8$D $++JjJj 	 2C1N-TXT_T_TqTq??<8,,4K\ju  
 2=--'!*#&t'7'7#G 	0E<)));;..#'>>,#?L;;44#/12#6L 4@3E3E0J65!%!7!7J#/#7#7
FjDXZ_cmZmoq#rL#/#7#71a#C#N#N#PL/	0 #&712;6 M '712;6M%3G'//T->w))
 	
 EI
 	
r;   )NNN)rj   rk   rl   r'   r0   rV  r   r  r   r   rn  r)   rn   r   r   rh   rp   rq   s   @r:   r  r  #  s    0&; 0 ++BC>X 04,0&*I
llI
 'tnI
 $D>	I

 d^I
 
I
 Y DI
r;   r  )rq  rO  r;  r  )r   )r   F)Krm   collections.abcry   typingr   r   r   r   r   r   r	   r)   torch.utils.checkpointr
   torch.nnr   r   r   activationsr   modeling_outputsr   r   r   r   modeling_utilsr   r   pytorch_utilsr   r   utilsr   r   r   r   r   r   utils.backbone_utilsr   configuration_dinov2r    
get_loggerrj   r   rn  rm  ro  r  r  Moduler"   r0   rn   r   r   r   r   r   r   r   r   r   r   r   r  r  r;  DINOV2_START_DOCSTRINGrl  r  rO  rq  r  __all__r)  r;   r:   <module>r     s     D D D    A A !  G Q  2 . 
		H	% ! - &  E 1 Nryy NbBII R %II%<<% 
% <<	%
 U\\*% % %>;")) ;~ryy &$bii $N+ryy +U\\ e T V[VbVb *-RYY -		 &(bii ("0")) 0h0
BII 0
f+DO +D\	   4 . fO
' O
	O
d  Z
#8 Z
Z
z  	\
*M \
\
~ er;   