B
    «»ˆd5&  ã               @   sž   d Z ddlm  mZ ddlmZ ddlmZ ddlmZ ddlm	Z	 ddl
mZ ddlmZ dd	lmZ dd
lmZ ddlmZ edƒG dd„ deƒƒZdS )zEmbedding layer.é    N)Úbackend)Úconstraints)Úinitializers)Úregularizers)Úutils)Úbase_layer_utils)ÚLayer)Útf_utils)Úkeras_exportzkeras.layers.Embeddingc                   sd   e Zd ZdZejd‡ fdd„	ƒZejddd„ƒZ	dd	d
„Z
ejdd„ ƒZdd„ Z‡ fdd„Z‡  ZS )Ú	Embeddinga±  Turns positive integers (indexes) into dense vectors of fixed size.

    e.g. `[[4], [20]] -> [[0.25, 0.1], [0.6, -0.2]]`

    This layer can only be used on positive integer inputs of a fixed range. The
    `tf.keras.layers.TextVectorization`, `tf.keras.layers.StringLookup`,
    and `tf.keras.layers.IntegerLookup` preprocessing layers can help prepare
    inputs for an `Embedding` layer.

    This layer accepts `tf.Tensor` and `tf.RaggedTensor` inputs. It cannot be
    called with `tf.SparseTensor` input.

    Example:

    >>> model = tf.keras.Sequential()
    >>> model.add(tf.keras.layers.Embedding(1000, 64, input_length=10))
    >>> # The model will take as input an integer matrix of size (batch,
    >>> # input_length), and the largest integer (i.e. word index) in the input
    >>> # should be no larger than 999 (vocabulary size).
    >>> # Now model.output_shape is (None, 10, 64), where `None` is the batch
    >>> # dimension.
    >>> input_array = np.random.randint(1000, size=(32, 10))
    >>> model.compile('rmsprop', 'mse')
    >>> output_array = model.predict(input_array)
    >>> print(output_array.shape)
    (32, 10, 64)

    Args:
      input_dim: Integer. Size of the vocabulary,
        i.e. maximum integer index + 1.
      output_dim: Integer. Dimension of the dense embedding.
      embeddings_initializer: Initializer for the `embeddings`
        matrix (see `keras.initializers`).
      embeddings_regularizer: Regularizer function applied to
        the `embeddings` matrix (see `keras.regularizers`).
      embeddings_constraint: Constraint function applied to
        the `embeddings` matrix (see `keras.constraints`).
      mask_zero: Boolean, whether or not the input value 0 is a special
        "padding" value that should be masked out. This is useful when using
        recurrent layers which may take variable length input. If this is
        `True`, then all subsequent layers in the model need to support masking
        or an exception will be raised. If mask_zero is set to True, as a
        consequence, index 0 cannot be used in the vocabulary (input_dim should
        equal size of vocabulary + 1).
      input_length: Length of input sequences, when it is constant.
        This argument is required if you are going to connect
        `Flatten` then `Dense` layers upstream
        (without it, the shape of the dense outputs cannot be computed).

    Input shape:
      2D tensor with shape: `(batch_size, input_length)`.

    Output shape:
      3D tensor with shape: `(batch_size, input_length, output_dim)`.

    **Note on variable placement:**
    By default, if a GPU is available, the embedding matrix will be placed on
    the GPU. This achieves the best performance, but it might cause issues:

    - You may be using an optimizer that does not support sparse GPU kernels.
    In this case you will see an error upon training your model.
    - Your embedding matrix may be too large to fit on your GPU. In this case
    you will see an Out Of Memory (OOM) error.

    In such cases, you should place the embedding matrix on the CPU memory.
    You can do so with a device scope, as such:

    ```python
    with tf.device('cpu:0'):
      embedding_layer = Embedding(...)
      embedding_layer.build()
    ```

    The pre-built `embedding_layer` instance can then be added to a `Sequential`
    model (e.g. `model.add(embedding_layer)`), called in a Functional model
    (e.g. `x = embedding_layer(x)`), or used in a subclassed model.
    ÚuniformNFc	       
         sÈ   d|	kr |r|f|	d< nd|	d< |dks0|dkrDt d|› d|› ƒ‚t ¡ s`d|	kr`t ¡ |	d< d|	d< tƒ jf |	Ž || _|| _t	 
|¡| _t 
|¡| _t 
|¡| _t 
|¡| _|| _|| _|| _d S )	NÚinput_shape)Nr   zKBoth `input_dim` and `output_dim` should be positive, Received input_dim = z and output_dim = ÚdtypeFZautocast)Ú
ValueErrorr   Zv2_dtype_behavior_enabledr   ZfloatxÚsuperÚ__init__Ú	input_dimÚ
output_dimr   ÚgetÚembeddings_initializerr   Úembeddings_regularizerÚactivity_regularizerr   Úembeddings_constraintÚ	mask_zeroZsupports_maskingÚinput_length)
Úselfr   r   r   r   r   r   r   r   Úkwargs)Ú	__class__© úM/var/www/html/venv/lib/python3.7/site-packages/keras/layers/core/embedding.pyr   q   s*    zEmbedding.__init__c             C   s0   | j | j| jf| jd| j| jdd| _d| _d S )NÚ
embeddingsF)ÚshapeZinitializerÚnameZregularizerÚ
constraintZexperimental_autocastT)Z
add_weightr   r   r   r   r   r    Zbuilt)r   r   r   r   r   Úbuild¡   s    

zEmbedding.buildc             C   s   | j s
d S t |d¡S )Nr   )r   ÚtfÚ	not_equal)r   ÚinputsÚmaskr   r   r   Úcompute_mask­   s    zEmbedding.compute_maskc             C   sì   | j d kr|| jf S t| j ttfƒr2t| j ƒ}n| j g}t|ƒt|ƒd krftd| j › d|› ƒ‚nhxftt||dd … ƒƒD ]L\}\}}|d k	rº|d k	rº||krºtd| j › d|› ƒ‚q~|d kr~|||< q~W |d ft|ƒ | jf S d S )Né   z"input_length" is z, but received input has shape r   )	r   r   Ú
isinstanceÚlistÚtupleÚlenr   Ú	enumerateÚzip)r   r   Zin_lensÚiÚs1Ús2r   r   r   Úcompute_output_shape²   s    
$zEmbedding.compute_output_shapec             C   sZ   t  |¡}|dkr&|dkr&t |d¡}tj | j|¡}| jj| jj	krVt || jj¡}|S )NZint32Zint64)
r   r   r%   ÚcastÚnnZembedding_lookupr    Z_dtype_policyZcompute_dtypeZvariable_dtype)r   r'   r   Úoutr   r   r   ÚcallÌ   s    

zEmbedding.callc          	      sd   | j | jt | j¡t | j¡t | j¡t | j	¡| j
| jdœ}tƒ  ¡ }tt| ¡ ƒt| ¡ ƒ ƒS )N)r   r   r   r   r   r   r   r   )r   r   r   Ú	serializer   r   r   r   r   r   r   r   r   Ú
get_configÚdictr,   Úitems)r   ÚconfigZbase_config)r   r   r   r:   Ú   s    

zEmbedding.get_config)r   NNNFN)N)N)Ú__name__Ú
__module__Ú__qualname__Ú__doc__r   Zallow_initializer_layoutr   r	   Zshape_type_conversionr$   r)   r4   r8   r:   Ú__classcell__r   r   )r   r   r   !   s   N     &
r   )rA   Ztensorflow.compat.v2ÚcompatZv2r%   Zkerasr   r   r   r   Zkeras.dtensorr   Zkeras.enginer   Zkeras.engine.base_layerr   Zkeras.utilsr	   Z tensorflow.python.util.tf_exportr
   r   r   r   r   r   Ú<module>   s   