
     `ig                     b   d dl mZmZmZ d dlZd dlmZ d dlZd dl	m
Z d dlmZmZmZ d dlmZmZ d dlmZ d dlmZmZ d dlmZ dd	lmZmZ dd
lmZmZmZmZ ddl m!Z!m"Z"m#Z# ddl$m%Z%m&Z&m'Z'  e#j(        e)          Z*dZ+dZ,dZ-dZ.ej/        j0         G d de!                      Z1ej/        j0         G d de!                      Z2 G d dej3                  Z4 G d dej3                  Z5 G d dej3                  Z6 G d dej3                  Z7 G d dej3                  Z8 G d  d!ej3                  Z9 G d" d#ej3                  Z: G d$ d%ej3                  Z; G d& d'ej3                  Z< G d( d)e          Z= G d* d+e          Z> G d, d-e          Z? G d. d/ej3                  Z@ G d0 d1e=          ZAd2ZB eeAe,eBz               eeAee&3            G d4 d5ej3                  ZC G d6 d7e=          ZDd8ZE eeDe,eEz               eeDe1e&3            G d9 d:ej3                  ZF G d; d<e>          ZGd=ZH eeGe-eHz               eeGee'3            G d> d?ej3                  ZI e"e+           G d@ dAe?                      ZJdBZK eeJe.eKz               eeJe2e%3           g dCZLdS )D    )AnyOptionalUnionN)
FrozenDictfreezeunfreeze)combine_masksmake_causal_mask)dot_product_attention_weights)flatten_dictunflatten_dict)lax   )FlaxBaseModelOutputFlaxBaseModelOutputWithPooling)ACT2FNFlaxPreTrainedModel append_replace_return_docstringsoverwrite_call_docstring)ModelOutputadd_start_docstringslogging   )
CLIPConfigCLIPTextConfigCLIPVisionConfiga  

    This model inherits from [`FlaxPreTrainedModel`]. Check the superclass documentation for the generic methods the
    library implements for all its model (such as downloading, saving and converting weights from PyTorch models)

    This model is also a
    [flax.linen.Module](https://flax.readthedocs.io/en/latest/api_reference/flax.linen/module.html) subclass. Use it as
    a regular Flax linen Module and refer to the Flax documentation for all matter related to general usage and
    behavior.

    Finally, this model supports inherent JAX features such as:

    - [Just-In-Time (JIT) compilation](https://jax.readthedocs.io/en/latest/jax.html#just-in-time-compilation-jit)
    - [Automatic Differentiation](https://jax.readthedocs.io/en/latest/jax.html#automatic-differentiation)
    - [Vectorization](https://jax.readthedocs.io/en/latest/jax.html#vectorization-vmap)
    - [Parallelization](https://jax.readthedocs.io/en/latest/jax.html#parallelization-pmap)

    Parameters:
        config ([`CLIPConfig`]): Model configuration class with all the parameters of the model.
            Initializing with a config file does not load the weights associated with the model, only the
            configuration. Check out the [`~FlaxPreTrainedModel.from_pretrained`] method to load the model weights.
        dtype (`jax.numpy.dtype`, *optional*, defaults to `jax.numpy.float32`):
            The data type of the computation. Can be one of `jax.numpy.float32`, `jax.numpy.float16` (on GPUs) and
            `jax.numpy.bfloat16` (on TPUs).

            This can be used to enable mixed-precision training or half-precision inference on GPUs or TPUs. If
            specified all the computation will be performed with the given `dtype`.

            **Note that this only specifies the dtype of the computation and does not influence the dtype of model
            parameters.**

            If you wish to change the dtype of the model parameters, see [`~FlaxPreTrainedModel.to_fp16`] and
            [`~FlaxPreTrainedModel.to_bf16`].
a~  
    Args:
        input_ids (`numpy.ndarray` of shape `(batch_size, sequence_length)`):
            Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide
            it.

            Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and
            [`PreTrainedTokenizer.__call__`] for details.

            [What are input IDs?](../glossary#input-ids)
        attention_mask (`numpy.ndarray` of shape `(batch_size, sequence_length)`, *optional*):
            Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`:

            - 1 for tokens that are **not masked**,
            - 0 for tokens that are **masked**.

            [What are attention masks?](../glossary#attention-mask)
        position_ids (`numpy.ndarray` of shape `(batch_size, sequence_length)`, *optional*):
            Indices of positions of each input sequence tokens in the position embeddings. Selected in the range `[0,
            config.max_position_embeddings - 1]`.

            [What are position IDs?](../glossary#position-ids)
        output_attentions (`bool`, *optional*):
            Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned
            tensors for more detail.
        output_hidden_states (`bool`, *optional*):
            Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for
            more detail.
        return_dict (`bool`, *optional*):
            Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple.
aA  
    Args:
        pixel_values (`numpy.ndarray` of shape `(batch_size, num_channels, height, width)`):
            Pixel values. Padding will be ignored by default should you provide it. Pixel values can be obtained using
            [`AutoImageProcessor`]. See [`CLIPImageProcessor.__call__`] for details.
        output_attentions (`bool`, *optional*):
            Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned
            tensors for more detail.
        output_hidden_states (`bool`, *optional*):
            Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for
            more detail.
        return_dict (`bool`, *optional*):
            Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple.
a  
    Args:
        input_ids (`numpy.ndarray` of shape `(batch_size, sequence_length)`):
            Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide
            it.

            Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and
            [`PreTrainedTokenizer.__call__`] for details.

            [What are input IDs?](../glossary#input-ids)
        attention_mask (`numpy.ndarray` of shape `(batch_size, sequence_length)`, *optional*):
            Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`:

            - 1 for tokens that are **not masked**,
            - 0 for tokens that are **masked**.

            [What are attention masks?](../glossary#attention-mask)
        position_ids (`numpy.ndarray` of shape `(batch_size, sequence_length)`, *optional*):
            Indices of positions of each input sequence tokens in the position embeddings. Selected in the range `[0,
            config.max_position_embeddings - 1]`.

            [What are position IDs?](../glossary#position-ids)
        pixel_values (`numpy.ndarray` of shape `(batch_size, num_channels, height, width)`):
            Pixel values. Padding will be ignored by default should you provide it. Pixel values can be obtained using
            [`AutoImageProcessor`]. See [`CLIPImageProcessor.__call__`] for details.
        output_attentions (`bool`, *optional*):
            Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned
            tensors for more detail.
        output_hidden_states (`bool`, *optional*):
            Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for
            more detail.
        return_dict (`bool`, *optional*):
            Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple.
c                       e Zd ZU dZdZej        ed<   dZej        ed<   dZ	e
eej        df                  ed<   dZe
eej        df                  ed<   dS )FlaxCLIPTextModelOutputaJ  
    Base class for text model's outputs that also contains a pooling of the last hidden states.

    Args:
        text_embeds (`jnp.ndarray` of shape `(batch_size, output_dim`):
            The text embeddings obtained by applying the projection layer to the pooled output of
            [`FlaxCLIPTextModel`].
        last_hidden_state (`jnp.ndarray` of shape `(batch_size, sequence_length, hidden_size)`):
            Sequence of hidden-states at the output of the last layer of the model.
        hidden_states (`tuple(jnp.ndarray)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`):
            Tuple of `jnp.ndarray` (one for the output of the embeddings + one for the output of each layer) of shape
            `(batch_size, sequence_length, hidden_size)`.

            Hidden-states of the model at the output of each layer plus the initial embedding outputs.
        attentions (`tuple(jnp.ndarray)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`):
            Tuple of `jnp.ndarray` (one for each layer) of shape `(batch_size, num_heads, sequence_length,
            sequence_length)`.

            Attentions weights after the attention softmax, used to compute the weighted average in the self-attention
            heads.
    Ntext_embedslast_hidden_state.hidden_states
attentions)__name__
__module____qualname____doc__r   jnpndarray__annotations__r    r!   r   tupler"        /home/jaya/work/projects/VOICE-AGENT/VIET/agent-env/lib/python3.11/site-packages/transformers/models/clip/modeling_flax_clip.pyr   r      s          ,  $K###%)s{)))7;M8E#+s"234;;;48Js{C/0188888r,   r   c                       e Zd ZU dZdZej        ed<   dZej        ed<   dZ	ej        ed<   dZ
ej        ed<   dZeed<   dZeed<   d	ee         fd
ZdS )FlaxCLIPOutputah  
    Args:
        logits_per_image:(`jnp.ndarray` of shape `(image_batch_size, text_batch_size)`):
            The scaled dot product scores between `image_embeds` and `text_embeds`. This represents the image-text
            similarity scores.
        logits_per_text:(`jnp.ndarray` of shape `(text_batch_size, image_batch_size)`):
            The scaled dot product scores between `text_embeds` and `image_embeds`. This represents the text-image
            similarity scores.
        text_embeds(`jnp.ndarray` of shape `(batch_size, output_dim`):
            The text embeddings obtained by applying the projection layer to the pooled output of
            [`FlaxCLIPTextModel`].
        image_embeds(`jnp.ndarray` of shape `(batch_size, output_dim`):
            The image embeddings obtained by applying the projection layer to the pooled output of
            [`FlaxCLIPVisionModel`].
        text_model_output(`FlaxBaseModelOutputWithPooling`):
            The output of the [`FlaxCLIPTextModel`].
        vision_model_output(`FlaxBaseModelOutputWithPooling`):
            The output of the [`FlaxCLIPVisionModel`].
    Nlogits_per_imagelogits_per_textr   image_embedstext_model_outputvision_model_outputreturnc                 ^     t           fd                                 D                       S )Nc              3   t   K   | ]2}|d vr|         n!t          |                                          V  3dS ))r3   r4   N)getattrto_tuple).0kselfs     r-   	<genexpr>z*FlaxCLIPOutput.to_tuple.<locals>.<genexpr>   sc       
 
  LLLDGGRYZ^`aRbRbRkRkRmRm
 
 
 
 
 
r,   )r*   keysr<   s   `r-   r9   zFlaxCLIPOutput.to_tuple   sC     
 
 
 
YY[[
 
 
 
 
 	
r,   )r#   r$   r%   r&   r0   r'   r(   r)   r1   r   r2   r3   r   r4   r*   r   r9   r+   r,   r-   r/   r/      s          ( %)ck(((#'OS['''#K### $L#+$$$8<5<<<:>7>>>
%* 
 
 
 
 
 
r,   r/   c                   H    e Zd ZU eed<   ej        Zej        ed<   d Zd Z	dS )FlaxCLIPVisionEmbeddingsconfigdtypec           
      j   | j         j        }| j         j        }| j         j        }|                     dt
          j        j                            d          |f          | _	        t          j
        |||f||fdd| j        t
          j        j                                                  | _        ||z  dz  | _        | j        dz   }t          j        ||t
          j        j                                        	          | _        t!          j        t!          j        d
|d          d
          | _        d S )Nclass_embedding{Gz?)stddevVALIDF)kernel_sizestridespaddinguse_biasrC   kernel_init   r   embedding_initr   i4rC   axis)rB   hidden_size
image_size
patch_sizeparamjaxnninitializersnormalrE   ConvrC   patch_embeddingnum_patchesEmbedposition_embeddingr'   expand_dimsarangeposition_ids)r<   	embed_dimrV   rW   num_positionss        r-   setupzFlaxCLIPVisionEmbeddings.setup   s   K+	[+
[+
#zz*;SV=P=W=W_c=W=d=dgpfrss!w#Z0,*+2244 
  
  
 '*4:(1,"$(=)TWTZTgTnTnTpTp"q"q"qOCJq-t,T,T,T[\]]]r,   c                 N   |                      |          }|j        \  }}}}t          j        ||||z  |f          }t          j        | j        d          }t          j        ||ddf          }t          j        ||gd          }||                     | j	                  z   }|S )Nr   r   rS   r   )
r^   shaper'   reshaperb   rE   tileconcatenatera   rd   )	r<   pixel_valuespatch_embeds
batch_sizeheightwidthchannelsclass_embeds
embeddingss	            r-   __call__z!FlaxCLIPVisionEmbeddings.__call__   s    ++L99.:.@+
FE8{<*funh1WXXt';&IIIxz1a.@AA_lL%AJJJ
$"9"9$:K"L"LL
r,   N)
r#   r$   r%   r   r)   r'   float32rC   rg   rv   r+   r,   r-   rA   rA      sW         {E39"""^ ^ ^,	 	 	 	 	r,   rA   c                   H    e Zd ZU eed<   ej        Zej        ed<   d Zd Z	dS )FlaxCLIPTextEmbeddingsrB   rC   c                    | j         j        }t          j        | j         j        |t
          j        j                                                  | _        t          j        | j         j	        |t
          j        j                                                  | _
        t          j        t          j        d| j         j	        d          d          | _        d S )NrO   r   rQ   rR   ri   rS   )rB   rU   rZ   r`   
vocab_sizerY   r[   r\   token_embeddingmax_position_embeddingsra   r'   rb   rc   rd   )r<   re   s     r-   rg   zFlaxCLIPTextEmbeddings.setup  s    K+	!x(>	Z]Z`ZmZtZtZvZvwww"$(K/36K^KeKeKgKg#
 #
 #
  OJq$+=TJJJQW
 
 
r,   c                     |                      |                    d                    }|                     |                    d                    }||z   }|S )NrQ   )r|   astypera   )r<   	input_idsrd   input_embedsposition_embedsru   s         r-   rv   zFlaxCLIPTextEmbeddings.__call__  sT    ++I,<,<T,B,BCC11,2E2Ed2K2KLL!O3
r,   N)
r#   r$   r%   r   r)   r'   rw   rC   rg   rv   r+   r,   r-   ry   ry     sT         {E39"""	
 	
 	
    r,   ry   c                   v    e Zd ZU eeef         ed<   ej        Z	ej	        ed<   d Z
d Zd Z	 	 	 dd	ed
efdZdS )FlaxCLIPAttentionrB   rC   c                    | j         j        | _        | j         j        | _        | j        | j        z  | _        | j        | j        z  | j        k    r t          d| j         d| j         d          | j        dz  | _        | j         j        | _	        t          j        | j        | j        t          j
        j                            d                    | _        t          j        | j        | j        t          j
        j                            d                    | _        t          j        | j        | j        t          j
        j                            d                    | _        t          j        | j        | j        t          j
        j                            d                    | _        t)          | j         t*                    | _        | j        r6t/          t1          j        d| j         j        fd	                    | _        d S d S )
Nz;embed_dim must be divisible by num_heads (got `embed_dim`: z and `num_heads`: z).g      {Gz?rC   rM   r   rQ   rR   )rB   rU   re   num_attention_heads	num_headshead_dim
ValueErrorscaleattention_dropoutdropoutrZ   DenserC   rY   r[   r\   k_projv_projq_projout_proj
isinstancer   causalr
   r'   onesr}   causal_maskr?   s    r-   rg   zFlaxCLIPAttention.setup"  s   08$.8=4>)T^;;'dn ' 'N' ' '   ]D(
{4ht~TZSVM`MgMghlMmMmnnnht~TZSVM`MgMghlMmMmnnnht~TZSVM`MgMghlMmMmnnntzsvObOiOijnOoOoppp n==; 	p/!T[=`9aim0n0n0nooD	p 	pr,   c                 d    |                     |j        d d         | j        | j        fz             S NrN   )rk   rj   r   r   r<   r!   s     r-   _split_headszFlaxCLIPAttention._split_heads7  s2    $$]%8!%<PTP]?^%^___r,   c                 X    |                     |j        d d         | j        fz             S r   )rk   rj   re   r   s     r-   _merge_headszFlaxCLIPAttention._merge_heads:  s,    $$]%8!%<?P%PQQQr,   NTFdeterministicoutput_attentionsc           
      f   |                      |          }|                     |          }|                     |          }|                     |          }|                     |          }|                     |          }d }| j        r6|j        d         |j        d         }
}	| j        d d d d |
|	z
  |
d |
f         }|+|)t          j        |d          }t          ||d          }n||}n|t          j        |d          }|t          j        |dk    t          j        |j        d                              | j                  t          j        |j        t          j        | j                  j                                      | j                            }nd }d }|s | j        dk    r|                     d          }t'          ||||| j        || j        d 	          }t          j        d
||          }|                     |          }|                     |          }|r||fn|f}|S )Nr   )rS   rQ   rR   r   g        r   )biasdropout_rngdropout_rater   rC   	precisionz...hqk,...khd->...qhd)r   r   r   r   r   rj   r   r'   rb   r	   r   selectfullr   rC   finfominr   make_rngr   einsumr   r   )r<   r!   attention_maskr   r   querykeyvaluecausal_attention_maskquery_length
key_lengthattention_biasr   attn_weightsattn_outputoutputss                   r-   rv   zFlaxCLIPAttention.__call__=  sR    M**kk-((M**!!%(($$!!%(( $; 	p',{1~sy|*L$($4QQQ:;TWa;acndncn5n$o!%*?*K _^(KKKN*>;PX\]]]NN".2NN' _^(KKKN% Z"-s33::4:FF-sy/D/D/HIIPPQUQ[\\ NN "N 	3!3!3--	22K4#'*	
 	
 	
 j!8,NN''44mmK001BV;--r,   )NTF)r#   r$   r%   r   r   r   r)   r'   rw   rC   rg   r   r   boolrv   r+   r,   r-   r   r     s         ."223333{E39"""p p p*` ` `R R R ""'9 9 	9
  9 9 9 9 9 9r,   r   c                   X    e Zd ZU eeef         ed<   ej        Z	ej	        ed<   d Z
d ZdS )FlaxCLIPMLPrB   rC   c                 r   t           | j        j                 | _        t	          j        | j        j        | j        t          j        j	        
                    d                    | _        t	          j        | j        j        | j        t          j        j	        
                    d                    | _        d S )Nr   r   )r   rB   
hidden_actactivation_fnrZ   r   intermediate_sizerC   rY   r[   r\   fc1rU   fc2r?   s    r-   rg   zFlaxCLIPMLP.setup}  s    #DK$:;8K)*+22488
 
 

 8DK34:SVSYSfSmSmnrSsSstttr,   c                     |                      |          }|                     |          }|                     |          }|S N)r   r   r   r   s     r-   rv   zFlaxCLIPMLP.__call__  s=    //**=99//r,   N)r#   r$   r%   r   r   r   r)   r'   rw   rC   rg   rv   r+   r,   r-   r   r   y  sb         ."223333{E39"""u u u    r,   r   c                   h    e Zd ZU eeef         ed<   ej        Z	ej	        ed<   d Z
	 	 d
dedefdZd	S )FlaxCLIPEncoderLayerrB   rC   c                 .   t          | j        | j                  | _        t	          j        | j        j        | j                  | _        t          | j        | j                  | _	        t	          j        | j        j        | j                  | _
        d S NrR   )epsilonrC   )r   rB   rC   	self_attnrZ   	LayerNormlayer_norm_epslayer_norm1r   mlplayer_norm2r?   s    r-   rg   zFlaxCLIPEncoderLayer.setup  sx    *4;djIII<0JRVR\]]]t{$*===<0JRVR\]]]r,   TFr   r   c                    |}|                      |          }|                     ||||          }|d         }||z   }|}|                     |          }|                     |          }||z   }|f}|r||dd          z  }|S )N)r!   r   r   r   r   r   )r   r   r   r   )r<   r!   r   r   r   residualattn_outputsr   s           r-   rv   zFlaxCLIPEncoderLayer.__call__  s     !((77~~')'/	 & 
 
 %Q =0 ((77// =0 " 	(|ABB''Gr,   N)TFr#   r$   r%   r   r   r   r)   r'   rw   rC   rg   r   rv   r+   r,   r-   r   r     s         ."223333{E39"""^ ^ ^ #"'  	
       r,   r   c            	       v    e Zd ZU eeef         ed<   ej        Z	ej	        ed<   d Z
	 	 	 	 	 ddeded	ed
efdZdS )FlaxCLIPLayerCollectionrB   rC   c                 \      fdt           j        j                  D              _        d S )Nc                 b    g | ]+}t          j        t          |          j                   ,S ))namerC   )r   rB   strrC   )r:   ir<   s     r-   
<listcomp>z1FlaxCLIPLayerCollection.setup.<locals>.<listcomp>  sB     
 
 
 !3q66LLL
 
 
r,   )rangerB   num_hidden_layerslayersr?   s   `r-   rg   zFlaxCLIPLayerCollection.setup  s>    
 
 
 
4;899
 
 
r,   NTFr   r   output_hidden_statesreturn_dictc                     |rdnd }|rdnd }| j         D ]/}	|r||fz  } |	||||          }
|
d         }|r||
d         fz  }0|r||fz  }|f}|st          d |D                       S t          |||          S )Nr+   )r   r   r   r   c              3      K   | ]}||V  	d S r   r+   )r:   vs     r-   r=   z3FlaxCLIPLayerCollection.__call__.<locals>.<genexpr>  s"      ==qq}}}}}==r,   )r    r!   r"   )r   r*   r   )r<   r!   r   r   r   r   r   all_attentionsall_hidden_stateslayerlayer_outputsr   s               r-   rv   z FlaxCLIPLayerCollection.__call__  s      1:d"6@BBD[ 
	6 
	6E# 6!m%55!!E~]^o  M *!,M  6=#3"55 	2-!11 " 	>==G======"+;LYg
 
 
 	
r,   NTFFTr   r+   r,   r-   r   r     s         ."223333{E39"""
 
 
 ""'%* "
 "
 	"

  "
 #"
 "
 "
 "
 "
 "
 "
r,   r   c            	       v    e Zd ZU eeef         ed<   ej        Z	ej	        ed<   d Z
	 	 	 	 	 ddeded	ed
efdZdS )FlaxCLIPEncoderrB   rC   c                 F    t          | j        | j                  | _        d S NrR   )r   rB   rC   r   r?   s    r-   rg   zFlaxCLIPEncoder.setup  s    -dkLLLr,   NTFr   r   r   r   c                 8    |                      ||||||          S )N)r!   r   r   r   r   r   )r   )r<   inputs_embedsr   r   r   r   r   s          r-   rv   zFlaxCLIPEncoder.__call__  s1     {{')'/!5#  
 
 	
r,   r   r   r+   r,   r-   r   r     s         ."223333{E39"""M M M ""'%* 
 
 	

  
 #
 
 
 
 
 
 
r,   r   c            	       d    e Zd ZU eed<   ej        Zej        ed<   d Z	 	 	 	 dde	de	de	d	e	fd
Z
dS )FlaxCLIPTextTransformerrB   rC   c                     t          | j        | j                  | _        t	          | j        | j                  | _        t          j        | j        j        | j                  | _	        | j        j
        | _
        d S r   )ry   rB   rC   ru   r   encoderrZ   r   r   final_layer_normeos_token_idr?   s    r-   rg   zFlaxCLIPTextTransformer.setup  sg    0DJOOO&t{$*EEE "T[5OW[Wa b b b !K4r,   TFr   r   r   r   c                 r   ||n| j         j        }||n| j         j        }||n| j         j        }|                     ||          }|                     ||||||          }	|	d         }
|                     |
          }
| j        dk    r<|
t          j	        |
j
        d                   |                    d          f         }nD|
t          j	        |
j
        d                   || j        k                        d          f         }|s|
|f|	dd          z   S t          |
||	j        |	j                  S )	N)r   rd   )r   r   r   r   r   r   r   rN   rS   r   r    pooler_outputr!   r"   )rB   r   r   use_return_dictru   r   r   r   r'   rc   rj   argmaxr   r!   r"   )r<   r   r   rd   r   r   r   r   r!   encoder_outputsr    pooled_outputs               r-   rv   z FlaxCLIPTextTransformer.__call__  s    2C1N--TXT_Tq$8$D  $+Jj 	 &1%<kk$+B]),WW,,')'/!5# ' 
 
 ,A. 112CDD!! .cj9J9PQR9S.T.TV_VfVflnVfVoVo.opMM .
,21566dFW9W8_8_eg8_8h8hhM  	L%}58KKK-/')7&1	
 
 
 	
r,   NTFFTr#   r$   r%   r   r)   r'   rw   rC   rg   r   rv   r+   r,   r-   r   r      s         {E39"""5 5 5 #"'%* 3
 3

 3
  3
 #3
 3
 3
 3
 3
 3
 3
r,   r   c                   ^    e Zd ZU eed<   ej        Zej        ed<   d Z	 	 	 	 	 d	de	de	fdZ
dS )
FlaxCLIPVisionTransformerrB   rC   c                 .   t          | j        | j                  | _        t	          j        | j        j        | j                  | _        t          | j        | j                  | _	        t	          j        | j        j        | j                  | _
        d S r   )rA   rB   rC   ru   rZ   r   r   pre_layrnormr   r   post_layernormr?   s    r-   rg   zFlaxCLIPVisionTransformer.setupF  sx    24;djQQQL1KSWS]^^^&t{$*EEE l4;3MUYU_```r,   NTr   r   c                    ||n| j         j        }||n| j         j        }||n| j         j        }|                     |          }|                     |          }|                     |||||          }|d         }|d d dd d f         }	|                     |	          }	|s||	f|dd          z   S t          ||	|j	        |j
                  S )N)r   r   r   r   r   r   r   r   )rB   r   r   r   ru   r   r   r  r   r!   r"   )
r<   rn   r   r   r   r   r!   r   r    r   s
             r-   rv   z"FlaxCLIPVisionTransformer.__call__L  s    2C1N--TXT_Tq$8$D  $+Jj 	 &1%<kk$+B]55))-88,,''/!5# ' 
 
 ,A.)!!!Q'2++M:: 	L%}58KKK-/')7&1	
 
 
 	
r,   )NTNNTr#   r$   r%   r   r)   r'   rw   rC   rg   r   rv   r+   r,   r-   r   r   B  s         {E39"""a a a "! %
 %
 %
 %
 %
 %
 %
 %
 %
r,   r   c                       e Zd ZU eZdZej        ed<   dde	j
        dfdedede	j        d	ef fd
Zddej        j        dededefdZ	 	 	 	 	 	 	 	 ddee         dej        j        dedee         dee         dee         fdZ xZS )FlaxCLIPTextPreTrainedModelNmodule_classr   r   r   TrB   seedrC   _do_initc                 v     | j         d||d|}t                                          ||||||           d S )NrB   rC   input_shaper  rC   r	  r+   )r  super__init__	r<   rB   r  r  rC   r	  kwargsmodule	__class__s	           r-   r  z$FlaxCLIPTextPreTrainedModel.__init__x  sQ     #"H&HHHH[tSXcklllllr,   rngr  paramsr5   c                 r   t          j        |d          }t          j        t          j        t          j        |          j        d                   |          }t          j        |          }t          j        	                    |          \  }}||d}	| j
                            |	|||          d         }
||t          t          |
                    }
t          t          |                    }| j        D ]}|
|         ||<   t                      | _        t!          t#          |                    S |
S )NrQ   rR   r   r  r   r  )r'   zerosbroadcast_torc   
atleast_2drj   	ones_likerY   randomsplitr  initr   r   _missing_keyssetr   r   )r<   r  r  r  r   rd   r   
params_rngr   rngsrandom_paramsmissing_keys               r-   init_weightsz(FlaxCLIPTextPreTrainedModel.init_weights  s   Ik666	'
3>)3L3L3RSU3V(W(WYdeey11"%*"2"23"7"7
K$==((y.,WWX`a(-)@)@AAM!(6"2"233F#1 A A&3K&@{##!$D.00111  r,   Fr   trainr   r   r   c
                    ||n| j         j        }||n| j         j        }|	|	n| j         j        }	|It	          j        t	          j        t	          j        |          j        d                   |j                  }|t	          j	        |          }i }
|||
d<   | j
                            d|p| j        it	          j        |d          t	          j        |d          t	          j        |d          | |||	|
	  	        S )Nr   r   r  rQ   rR   r"  )rB   r   r   r   r'   r  rc   r  rj   r  r  applyr  array)r<   r   r   rd   r  r   r&  r   r   r   r"  s              r-   rv   z$FlaxCLIPTextPreTrainedModel.__call__  s#    2C1N--TXT_Tq$8$D  $+Jj 	 &1%<kk$+BY+CJs~i7P7P7VWY7Z,[,[]f]lmmL! ]955N ")DO{  v,-Iit,,,InD111Il$///I  ! 

 

 
	
r,   r   NNNNFNNN)r#   r$   r%   r   config_classr  rZ   Moduler)   r'   rw   intrC   r   r  rY   r  PRNGKeyr*   r   r%  r   dictrv   __classcell__r  s   @r-   r  r  t  s^        !L"L")"""
 ;
m 
m
m 	
m
 y
m 
m 
m 
m 
m 
m 
m! !
 2 ! !PZ !fp ! ! ! !0 !%*.,0/3&*'
 '

 '
 Z''
 '
 $D>'
 'tn'
 d^'
 '
 '
 '
 '
 '
 '
 '
r,   r  c                        e Zd ZU eZdZdZej        e	d<   dde
j        dfdedee         ded	e
j        d
ef
 fdZddej        j        dededefdZ	 	 	 	 	 	 ddee         dej        j        dedee         dee         dee         fdZ xZS )FlaxCLIPVisionPreTrainedModelrn   Nr  r   TrB   r  r  rC   r	  c                     |d|j         |j         df} | j        d||d|}t                                          ||||||           d S )Nr   r   r  r  r+   )rV   r  r  r  r  s	           r-   r  z&FlaxCLIPVisionPreTrainedModel.__init__  sk     f/1BAFK""H&HHHH[tSXcklllllr,   r  r  r5   c                    t           j                            ||          }t           j                            |          \  }}||d}| j                            ||          d         }||t          t          |                    }t          t          |                    }| j        D ]}	||	         ||	<   t                      | _        t          t          |                    S |S )Nr  r  )rY   r  r\   r  r  r  r   r   r  r   r   r   )
r<   r  r  r  rn   r!  r   r"  r#  r$  s
             r-   r%  z*FlaxCLIPVisionPreTrainedModel.init_weights  s    z((k::"%*"2"23"7"7
K$==((|<<XF(-)@)@AAM!(6"2"233F#1 A A&3K&@{##!$D.00111  r,   Fr   r&  r   r   r   c           	      0   ||n| j         j        }||n| j         j        }||n| j         j        }t	          j        |d          }i }|||d<   | j                            d|p| j        it	          j	        |t          j
                  | ||||          S )Nr   rN   r   r   r   r  rR   r(  )rB   r   r   r   r'   	transposer  r)  r  r*  rw   )	r<   rn   r  r   r&  r   r   r   r"  s	            r-   rv   z&FlaxCLIPVisionPreTrainedModel.__call__  s     2C1N--TXT_Tq$8$D  $+Jj 	 &1%<kk$+BY}\<@@ ")DO{  v,-Il#+666I  ! 
 
 	
r,   r   )NNFNNN)r#   r$   r%   r   r,  main_input_namer  rZ   r-  r)   r'   rw   r   r*   r.  rC   r   r  rY   r  r/  r   r%  r0  rv   r1  r2  s   @r-   r4  r4    sl        #L$O"L")"""
 (,;m m m e_m 	m
 ym m m m m m m! !
 2 ! !PZ !fp ! ! ! !, "&*.,0/3&*
 
 
 Z'	

 
 $D>
 'tn
 d^
 
 
 
 
 
 
 
r,   r4  c                       e Zd ZU eZdZej        ed<   dde	j
        dfdedee         dede	j        d	ef
 fd
Zddej        j        dededefdZ	 	 	 	 	 	 	 	 ddee         dej        j        dedee         dee         dee         fdZ	 	 	 	 	 ddee         dej        j        fdZ	 ddee         dej        j        fdZ xZS )FlaxCLIPPreTrainedModelNr  r   TrB   r  r  rC   r	  c                     |dd|j         j        |j         j        dff} | j        d||d|}t                                          ||||||           d S )Nr  r   r   r  r  r+   )vision_configrV   r  r  r  r  s	           r-   r  z FlaxCLIPPreTrainedModel.__init__  sw     !Av';'FH\Hgij#klK""H&HHHH[tSXcklllllr,   r  r  r5   c                    t          j        |d         d          }t          j        t          j        t          j        |          j        d                   |d                   }t          j        |          }t          j        	                    ||d                   }t          j        
                    |          \  }}	||	d}
| j                            |
||||          d         }||t          t          |                    }t          t          |                    }| j        D ]}||         ||<   t!                      | _        t#          t%          |                    S |S )Nr   rQ   rR   r   r   r  r  )r'   r  r  rc   r  rj   r  rY   r  r\   r  r  r  r   r   r  r   r   r   )r<   r  r  r  r   rd   r   rn   r!  r   r"  r#  r$  s                r-   r%  z$FlaxCLIPPreTrainedModel.init_weights  sC   Ik!nD999	'
3>)3L3L3RSU3V(W(WYdefYghhy11z((k!n=="%*"2"23"7"7
K$==((y,Xdeefno(-)@)@AAM!(6"2"233F#1 A A&3K&@{##!$D.00111  r,   Fr   r&  r   r   r   c                 p   ||n| j         j        }|	|	n| j         j        }	|
|
n| j         j        }
|It	          j        t	          j        t	          j        |          j        d                   |j                  }|t	          j	        |          }t	          j
        |d          }i }|||d<   | j                            d|p| j        it	          j        |d          t	          j        |t          j                  t	          j        |d          t	          j        |d          | ||	|
|
  
        S )Nr   r8  r   r  rQ   rR   r(  )rB   r   r   r   r'   r  rc   r  rj   r  r9  r  r)  r  r*  rw   )r<   r   rn   r   rd   r  r   r&  r   r   r   r"  s               r-   rv   z FlaxCLIPPreTrainedModel.__call__4  sH    2C1N--TXT_Tq$8$D  $+Jj 	 &1%<kk$+BY+CJs~i7P7P7VWY7Z,[,[]f]lmmL! ]955N}\<@@ ")DO{  v,-Iit,,,Il#+666InD111Il$///I  ! 
 
 	
r,   c           	         |It          j        t          j        t          j        |          j        d                   |j                  }|t          j        |          }i }|||d<   d }| j                            d|p| j        it          j	        |d          t          j	        |d          t          j	        |d          | ||          S )	at  
        Args:
            input_ids (`numpy.ndarray` of shape `(batch_size, sequence_length)`):
                Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you
                provide it.

                Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and
                [`PreTrainedTokenizer.__call__`] for details.

                [What are input IDs?](../glossary#input-ids)

        Returns:
            text_features (`jnp.ndarray` of shape `(batch_size, output_dim`): The text embeddings obtained by applying
            the projection layer to the pooled output of [`FlaxCLIPTextModel`].

        Examples:

        ```python
        >>> from transformers import AutoTokenizer, FlaxCLIPModel

        >>> model = FlaxCLIPModel.from_pretrained("openai/clip-vit-base-patch32")
        >>> tokenizer = AutoTokenizer.from_pretrained("openai/clip-vit-base-patch32")

        >>> inputs = tokenizer(["a photo of a cat", "a photo of a dog"], padding=True, return_tensors="np")
        >>> text_features = model.get_text_features(**inputs)
        ```Nr   r   c                 r    |                      ||||          }|d         }|                     |          }|S )N)r   r   rd   r   r   )
text_modeltext_projection)r  r   r   rd   r   text_outputsr   text_featuress           r-   _get_featuresz@FlaxCLIPPreTrainedModel.get_text_features.<locals>._get_features  sL    !,,#-)+	 -  L )OM"22=AAM  r,   r  rQ   rR   methodr"  )
r'   r  rc   r  rj   r  r  r)  r  r*  )	r<   r   r   rd   r  r   r&  r"  rG  s	            r-   get_text_featuresz)FlaxCLIPPreTrainedModel.get_text_featuresa  s    F +CJs~i7P7P7VWY7Z,[,[]f]lmmL! ]955N ")DO		! 		! 		! {  v,-Iit,,,InD111Il$///I  ! 
 
 	
r,   c                     t          j        |d          }i }|||d<   d }| j                            d|p| j        it          j        |t           j                  | ||          S )a  
        Args:
            pixel_values (`numpy.ndarray` of shape `(batch_size, num_channels, height, width)`):
                Pixel values. Padding will be ignored by default should you provide it. Pixel values can be obtained
                using [`AutoImageProcessor`]. See [`CLIPImageProcessor.__call__`] for details.

        Returns:
            image_features (`jnp.ndarray` of shape `(batch_size, output_dim`): The image embeddings obtained by
            applying the projection layer to the pooled output of [`FlaxCLIPVisionModel`]

        Examples:

        ```python
        >>> from PIL import Image
        >>> import requests
        >>> from transformers import AutoProcessor, FlaxCLIPModel

        >>> model = FlaxCLIPModel.from_pretrained("openai/clip-vit-base-patch32")
        >>> processor = AutoProcessor.from_pretrained("openai/clip-vit-base-patch32")

        >>> url = "http://images.cocodataset.org/val2017/000000039769.jpg"
        >>> image = Image.open(requests.get(url, stream=True).raw)

        >>> inputs = processor(images=image, return_tensors="np")

        >>> image_features = model.get_image_features(**inputs)
        ```r8  Nr   c                 n    |                      ||          }|d         }|                     |          }|S )N)rn   r   r   )vision_modelvisual_projection)r  rn   r   vision_outputsr   image_featuress         r-   rG  zAFlaxCLIPPreTrainedModel.get_image_features.<locals>._get_features  s>    #00lZg0hhN*1-M#55mDDN!!r,   r  rR   rH  )r'   r9  r  r)  r  r*  rw   )r<   rn   r  r   r&  r"  rG  s          r-   get_image_featuresz*FlaxCLIPPreTrainedModel.get_image_features  s    < }\<@@ ")DO	" 	" 	" {  v,-Il#+666I  ! 
 
 	
r,   r   r+  )NNNNF)NNF)r#   r$   r%   r   r,  r  rZ   r-  r)   r'   rw   r   r*   r.  rC   r   r  rY   r  r/  r   r%  r0  rv   rJ  rQ  r1  r2  s   @r-   r<  r<    s        L"L")"""
 (,;m mm e_m 	m
 ym m m m m m m! !
 2 ! !PZ !fp ! ! ! !6 !%*.,0/3&*+
 +
 +
 Z'+
 +
 $D>+
 'tn+
 d^+
 +
 +
 +
` !%*.A
 A

 A
 Z'A
 A
 A
 A
H jo1
 1
$,TN1
HK
HZ1
 1
 1
 1
 1
 1
 1
 1
r,   r<  c            	       d    e Zd ZU eed<   ej        Zej        ed<   d Z	 	 	 	 dde	de	de	d	e	fd
Z
dS )FlaxCLIPTextModulerB   rC   c                 F    t          | j        | j                  | _        d S r   )r   rB   rC   rC  r?   s    r-   rg   zFlaxCLIPTextModule.setup  s    1$+TZPPPr,   TFr   r   r   r   c           	      :    |                      |||||||          S )Nr   r   rd   r   r   r   r   )rC  )r<   r   r   rd   r   r   r   r   s           r-   rv   zFlaxCLIPTextModule.__call__  s4     )%'/!5#  
 
 	
r,   Nr   r   r+   r,   r-   rS  rS    s         {E39"""Q Q Q #"'%* 
 

 
  
 #
 
 
 
 
 
 
r,   rS  c                       e Zd ZeZdS )FlaxCLIPTextModelN)r#   r$   r%   rS  r  r+   r,   r-   rX  rX    s        %LLLr,   rX  a'  
    Returns:

    Example:

    ```python
    >>> from transformers import AutoTokenizer, FlaxCLIPTextModel

    >>> model = FlaxCLIPTextModel.from_pretrained("openai/clip-vit-base-patch32")
    >>> tokenizer = AutoTokenizer.from_pretrained("openai/clip-vit-base-patch32")

    >>> inputs = tokenizer(["a photo of a cat", "a photo of a dog"], padding=True, return_tensors="np")

    >>> outputs = model(**inputs)
    >>> last_hidden_state = outputs.last_hidden_state
    >>> pooler_output = outputs.pooler_output  # pooled (EOS token) states
    ```
)output_typer,  c            	       d    e Zd ZU eed<   ej        Zej        ed<   d Z	 	 	 	 dde	de	de	d	e	fd
Z
dS )%FlaxCLIPTextModelWithProjectionModulerB   rC   c                     t          | j        | j                  | _        t	          j        | j        j        d| j                  | _        d S )NrR   F)rL   rC   )r   rB   rC   rC  rZ   r   projection_dimrD  r?   s    r-   rg   z+FlaxCLIPTextModelWithProjectionModule.setup  sB    1$+TZPPP!x(BUZ^Zdeeer,   TFr   r   r   r   c           	          |                      |||||||          }|d         }	|                     |	          }
|s|
|d         f|dd          z   S t          |
|j        |j        |j                  S )NrV  r   r   rN   )r   r    r!   r"   )rC  rD  r   r    r!   r"   )r<   r   r   rd   r   r   r   r   rE  r   r   s              r-   rv   z.FlaxCLIPTextModelWithProjectionModule.__call__  s     )%'/!5# ' 
 
 %Q**=99 	Ea1L4DDD&#*<&4#.	
 
 
 	
r,   Nr   r   r+   r,   r-   r[  r[    s         {E39"""f f f #"'%* 
 

 
  
 #
 
 
 
 
 
 
r,   r[  c                       e Zd ZeZdS )FlaxCLIPTextModelWithProjectionN)r#   r$   r%   r[  r  r+   r,   r-   r`  r`  ;  s        8LLLr,   r`  a  
    Returns:

    Example:

    ```python
    >>> from transformers import AutoTokenizer, FlaxCLIPTextModelWithProjection

    >>> model = FlaxCLIPTextModelWithProjection.from_pretrained("openai/clip-vit-base-patch32")
    >>> tokenizer = AutoTokenizer.from_pretrained("openai/clip-vit-base-patch32")

    >>> inputs = tokenizer(["a photo of a cat", "a photo of a dog"], padding=True, return_tensors="np")

    >>> outputs = model(**inputs)
    >>> text_embeds = outputs.text_embeds
    ```
c            	       d    e Zd ZU eed<   ej        Zej        ed<   d Z	 	 	 	 dde	de	de	d	e	fd
Z
dS )FlaxCLIPVisionModulerB   rC   c                 F    t          | j        | j                  | _        d S r   )r   rB   rC   rM  r?   s    r-   rg   zFlaxCLIPVisionModule.setup]  s!    5dkTTTr,   TFr   r   r   r   c                 6    |                      |||||          S )Nrn   r   r   r   r   )rM  )r<   rn   r   r   r   r   s         r-   rv   zFlaxCLIPVisionModule.__call__`  s0       %'/!5# ! 
 
 	
r,   Nr   r  r+   r,   r-   rb  rb  Y  s         {E39"""U U U #"'%* 
 
 
  	

 #
 
 
 
 
 
 
r,   rb  c                       e Zd ZeZdS )FlaxCLIPVisionModelN)r#   r$   r%   rb  r  r+   r,   r-   rg  rg  q  s        'LLLr,   rg  a  
    Returns:

    Example:

    ```python
    >>> from PIL import Image
    >>> import requests
    >>> from transformers import AutoProcessor, FlaxCLIPVisionModel

    >>> model = FlaxCLIPVisionModel.from_pretrained("openai/clip-vit-base-patch32")
    >>> processor = AutoProcessor.from_pretrained("openai/clip-vit-base-patch32")

    >>> url = "http://images.cocodataset.org/val2017/000000039769.jpg"
    >>> image = Image.open(requests.get(url, stream=True).raw)

    >>> inputs = processor(images=image, return_tensors="np")

    >>> outputs = model(**inputs)
    >>> last_hidden_state = outputs.last_hidden_state
    >>> pooler_output = outputs.pooler_output  # pooled CLS states
    ```
c                   `    e Zd ZU eed<   ej        Zej        ed<   d Z	 	 	 	 	 	 	 	 dde	fdZ
dS )	FlaxCLIPModulerB   rC   c                 X     j         j        } j         j        } j         j         _        |j         _        |j         _        t          | j                   _	        t          | j                   _        t          j         j         j        t          j        j                            d          d           _        t          j         j         j        t          j        j                            d          d           _                             d fdg            _        d S )NrR   rF   F)rC   rM   rL   logit_scalec                 F    t          j        |          j        j        z  S r   )r'   r   rB   logit_scale_init_value)_rj   r<   s     r-   <lambda>z&FlaxCLIPModule.setup.<locals>.<lambda>  s    CHUOOdk>`,` r,   )rB   text_configr>  r]  rU   text_embed_dimvision_embed_dimr   rC   rC  r   rM  rZ   r   rY   r[   r\   rN  rD  rX   rk  )r<   rp  r>  s   `  r-   rg   zFlaxCLIPModule.setup  s   k-1"k8)5 - 91+TZPPP5m4:VVV!#*+22488	"
 "
 "
  "x*+22488	 
  
  
  ::````bd
 
r,   NTr   c	           	      N   ||n| j         j        }|                     |||||          }	|                     |||||||          }
|	d         }|                     |          }|
d         }|                     |          }|t          j                            |dd          z  }|t          j                            |dd          z  }t          j	        | j
                  }t          j        ||j                  |z  }|j        }|s|||||
|	fS t          |||||
|	          S )Nre  rV  r   r   T)rT   keepdims)r0   r1   r   r2   r3   r4   )rB   r   rM  rC  rN  rD  r'   linalgnormexprk  matmulTr/   )r<   r   rn   r   rd   r   r   r   r   rO  rE  r2   r   rk  r1   r0   s                   r-   rv   zFlaxCLIPModule.__call__  sf    &1%<kk$+BY**%'/!5# + 
 
 )%'/!5# ' 
 
 &a(--l;;"1o**;77 $cjoolVZo&[&[[!CJOOKbSWO$X$XX gd.//*[,.AAKO*, 	p$o{LR^`noo-+#%* .
 
 
 	
r,   )NNNNTNNN)r#   r$   r%   r   r)   r'   rw   rC   rg   r   rv   r+   r,   r-   ri  ri    s         {E39"""
 
 
< "!8
 8
 8
 8
 8
 8
 8
 8
r,   ri  c                       e Zd ZeZdS )FlaxCLIPModelN)r#   r$   r%   ri  r  r+   r,   r-   r{  r{    s        !LLLr,   r{  ai  
    Returns:

    Example:

    ```python
    >>> import jax
    >>> from PIL import Image
    >>> import requests
    >>> from transformers import AutoProcessor, FlaxCLIPModel

    >>> model = FlaxCLIPModel.from_pretrained("openai/clip-vit-base-patch32")
    >>> processor = AutoProcessor.from_pretrained("openai/clip-vit-base-patch32")

    >>> url = "http://images.cocodataset.org/val2017/000000039769.jpg"
    >>> image = Image.open(requests.get(url, stream=True).raw)

    >>> inputs = processor(
    ...     text=["a photo of a cat", "a photo of a dog"], images=image, return_tensors="np", padding=True
    ... )

    >>> outputs = model(**inputs)
    >>> logits_per_image = outputs.logits_per_image  # this is the image-text similarity score
    >>> probs = jax.nn.softmax(logits_per_image, axis=1)  # we can take the softmax to get the label probabilities
    ```
)r{  r<  rX  r  r`  rg  r4  )Mtypingr   r   r   flax
flax.linenlinenrZ   rY   	jax.numpynumpyr'   flax.core.frozen_dictr   r   r   r	   r
   flax.linen.attentionr   flax.traverse_utilr   r   r   modeling_flax_outputsr   r   modeling_flax_utilsr   r   r   r   utilsr   r   r   configuration_clipr   r   r   
get_loggerr#   loggerCLIP_START_DOCSTRINGCLIP_TEXT_INPUTS_DOCSTRINGCLIP_VISION_INPUTS_DOCSTRINGCLIP_INPUTS_DOCSTRINGstruct	dataclassr   r/   r-  rA   ry   r   r   r   r   r   r   r   r  r4  r<  rS  rX  FLAX_CLIP_TEXT_MODEL_DOCSTRINGr[  r`  .FLAX_CLIP_TEXT_MODEL_WITH_PROJECTION_DOCSTRINGrb  rg   FLAX_CLIP_VISION_MODEL_DOCSTRINGri  r{  FLAX_CLIP_MODEL_DOCSTRING__all__r+   r,   r-   <module>r     s    ( ' ' ' ' ' ' ' ' '        



       > > > > > > > > > > 6 6 6 6 6 6 6 6 > > > > > > ; ; ; ; ; ; ; ;       X X X X X X X X            @ ? ? ? ? ? ? ? ? ? L L L L L L L L L L 
	H	%	%! F @  ! H 9 9 9 9 9k 9 9 9:  
  
  
  
  
[  
  
  
F# # # # #ry # # #L    RY   .X X X X X	 X X Xv    ")   (' ' ' ' '29 ' ' 'T,
 ,
 ,
 ,
 ,
bi ,
 ,
 ,
^
 
 
 
 
bi 
 
 
4?
 ?
 ?
 ?
 ?
bi ?
 ?
 ?
D/
 /
 /
 /
 /
	 /
 /
 /
dL
 L
 L
 L
 L
"5 L
 L
 L
^E
 E
 E
 E
 E
$7 E
 E
 E
PJ
 J
 J
 J
 J
1 J
 J
 J
Z
 
 
 
 
 
 
 
8& & & & &3 & & &" &  *,FIg,g h h h    #AP^   
'
 '
 '
 '
 '
BI '
 '
 '
T9 9 9 9 9&A 9 9 92 .$  #%?Bp%p   !  #1HWe   

 
 
 
 
29 
 
 
0( ( ( ( (7 ( ( ($  0  ,.JMm.m n n n    %CRb   
X
 X
 X
 X
 X
RY X
 X
 X
v *++" " " " "+ " " ,+" 6  (=@Y(Y Z Z Z    NYc d d d d  r,   