B
    «»ˆd2  ã               @   sž   d Z ddlm  mZ ddlmZ ddlmZ ddlmZ ddlm	Z	 ddlm
Z
 ddlmZ dd	lmZ dd
lmZ ddlmZ edƒG dd„ deƒƒZdS )zContains the Dense layer.é    N)Úactivations)Úbackend)Úconstraints)Úinitializers)Úregularizers)Úutils)ÚLayer)Ú	InputSpec)Úkeras_exportzkeras.layers.Densec            
       sL   e Zd ZdZejd‡ fdd„	ƒZdd	„ Zd
d„ Zdd„ Z	‡ fdd„Z
‡  ZS )ÚDensea_  Just your regular densely-connected NN layer.

    `Dense` implements the operation:
    `output = activation(dot(input, kernel) + bias)`
    where `activation` is the element-wise activation function
    passed as the `activation` argument, `kernel` is a weights matrix
    created by the layer, and `bias` is a bias vector created by the layer
    (only applicable if `use_bias` is `True`). These are all attributes of
    `Dense`.

    Note: If the input to the layer has a rank greater than 2, then `Dense`
    computes the dot product between the `inputs` and the `kernel` along the
    last axis of the `inputs` and axis 0 of the `kernel` (using `tf.tensordot`).
    For example, if input has dimensions `(batch_size, d0, d1)`, then we create
    a `kernel` with shape `(d1, units)`, and the `kernel` operates along axis 2
    of the `input`, on every sub-tensor of shape `(1, 1, d1)` (there are
    `batch_size * d0` such sub-tensors).  The output in this case will have
    shape `(batch_size, d0, units)`.

    Besides, layer attributes cannot be modified after the layer has been called
    once (except the `trainable` attribute).
    When a popular kwarg `input_shape` is passed, then keras will create
    an input layer to insert before the current layer. This can be treated
    equivalent to explicitly defining an `InputLayer`.

    Example:

    >>> # Create a `Sequential` model and add a Dense layer as the first layer.
    >>> model = tf.keras.models.Sequential()
    >>> model.add(tf.keras.Input(shape=(16,)))
    >>> model.add(tf.keras.layers.Dense(32, activation='relu'))
    >>> # Now the model will take as input arrays of shape (None, 16)
    >>> # and output arrays of shape (None, 32).
    >>> # Note that after the first layer, you don't need to specify
    >>> # the size of the input anymore:
    >>> model.add(tf.keras.layers.Dense(32))
    >>> model.output_shape
    (None, 32)

    Args:
      units: Positive integer, dimensionality of the output space.
      activation: Activation function to use.
        If you don't specify anything, no activation is applied
        (ie. "linear" activation: `a(x) = x`).
      use_bias: Boolean, whether the layer uses a bias vector.
      kernel_initializer: Initializer for the `kernel` weights matrix.
      bias_initializer: Initializer for the bias vector.
      kernel_regularizer: Regularizer function applied to
        the `kernel` weights matrix.
      bias_regularizer: Regularizer function applied to the bias vector.
      activity_regularizer: Regularizer function applied to
        the output of the layer (its "activation").
      kernel_constraint: Constraint function applied to
        the `kernel` weights matrix.
      bias_constraint: Constraint function applied to the bias vector.

    Input shape:
      N-D tensor with shape: `(batch_size, ..., input_dim)`.
      The most common situation would be
      a 2D input with shape `(batch_size, input_dim)`.

    Output shape:
      N-D tensor with shape: `(batch_size, ..., units)`.
      For instance, for a 2D input with shape `(batch_size, input_dim)`,
      the output would have shape `(batch_size, units)`.
    NTÚglorot_uniformÚzerosc                s¶   t ƒ jf d|i|—Ž t|tƒs(t|ƒn|| _| jdk rFtd|› ƒ‚t |¡| _|| _	t
 |¡| _t
 |¡| _t |¡| _t |¡| _t |	¡| _t |
¡| _tdd| _d| _d S )NÚactivity_regularizerr   zTReceived an invalid value for `units`, expected a positive integer. Received: units=é   )Úmin_ndimT)ÚsuperÚ__init__Ú
isinstanceÚintÚunitsÚ
ValueErrorr   ÚgetÚ
activationÚuse_biasr   Úkernel_initializerÚbias_initializerr   Úkernel_regularizerÚbias_regularizerr   Úkernel_constraintÚbias_constraintr	   Ú
input_specZsupports_masking)Úselfr   r   r   r   r   r   r   r   r   r   Úkwargs)Ú	__class__© úI/var/www/html/venv/lib/python3.7/site-packages/keras/layers/core/dense.pyr   f   s    
zDense.__init__c          	   C   sÖ   t  | jpt ¡ ¡}|js.|js.td|› ƒ‚t  |¡}t j	 
|d ¡}|d kr^td|› ƒ‚tdd|id| _| jd|| jg| j| j| j| jdd| _| jrÆ| jd	| jg| j| j| j| jdd| _nd | _d| _d S )
NzMA Dense layer can only be built with a floating-point dtype. Received: dtype=éÿÿÿÿzlThe last dimension of the inputs to a Dense layer should be defined. Found None. Full input shape received: r   )r   ZaxesÚkernelT)ÚshapeZinitializerZregularizerÚ
constraintÚdtypeZ	trainableÚbias)ÚtfZas_dtyper*   r   ZfloatxZis_floatingZ
is_complexÚ	TypeErrorÚTensorShapeÚcompatÚdimension_valuer   r	   r    Z
add_weightr   r   r   r   r'   r   r   r   r   r+   Zbuilt)r!   Úinput_shaper*   Zlast_dimr$   r$   r%   Úbuild‰   s:    

zDense.buildc             C   sê  |j j| jjkr tj|| jd}t|tjƒ}|rÊtj |j	d ¡d krXt
d|j	› dƒ‚|}|jj	jdkrr|j}nX|j	jdkrŒ| ¡ }d}n>xt|jd ƒD ]
}|j}qœW | ¡ }tj ||jd d… ¡}|j	j}|dksä|d krTt|tjƒrBtj |d¡\}}tj|j|jd d …df |jd	}|}tjj| j||d
d}ntj|| jd}nTt || j|d gdgg¡}t ¡ s¨|j	 ¡ }	|	d d… | jj	d g }
| |
¡ | j rÀtj !|| j"¡}| j#d k	rÖ|  #|¡}|ræ| $|¡}|S )N)r*   r&   zkDense layer only supports RaggedTensors when the innermost dimension is non-ragged. Received: inputs.shape=Ú.é   r   Fr   )ÚindicesÚvaluesÚdense_shapeÚsum)Zcombiner)ÚaÚb)%r*   Z
base_dtypeZ_compute_dtype_objectr,   Úcastr   ZRaggedTensorr/   r0   r(   r   Zflat_valuesÚrankZ	to_tensorÚrangeZragged_rankr6   Zfrom_nested_row_splitsZnested_row_splitsZSparseTensorÚsparseZfill_empty_rowsr5   r7   ÚnnZembedding_lookup_sparser'   ÚmatmulZ	tensordotZexecuting_eagerlyÚas_listÚ	set_shaper   Zbias_addr+   r   Zwith_flat_values)r!   ÚinputsZ	is_raggedZoriginal_inputsÚ_r<   ZidsÚweightsÚoutputsr(   Zoutput_shaper$   r$   r%   Úcall³   sT    







z
Dense.callc             C   sJ   t  |¡}| d¡}t j |d ¡d kr6td|› ƒ‚|d d…  | j¡S )Nr   r&   zlThe last dimension of the input shape of a Dense layer should be defined. Found None. Received: input_shape=)r,   r.   Zwith_rank_at_leastr/   r0   r   Zconcatenater   )r!   r1   r$   r$   r%   Úcompute_output_shape  s    

zDense.compute_output_shapec                sr   t ƒ  ¡ }| | jt | j¡| jt | j	¡t | j
¡t | j¡t | j¡t | j¡t | j¡t | j¡dœ
¡ |S )N)
r   r   r   r   r   r   r   r   r   r   )r   Ú
get_configÚupdater   r   Ú	serializer   r   r   r   r   r   r   r   r   r   r   r   )r!   Úconfig)r#   r$   r%   rI     s&    

zDense.get_config)	NTr   r   NNNNN)Ú__name__Ú
__module__Ú__qualname__Ú__doc__r   Zallow_initializer_layoutr   r2   rG   rH   rI   Ú__classcell__r$   r$   )r#   r%   r   !   s   C        *Sr   )rP   Ztensorflow.compat.v2r/   Zv2r,   Zkerasr   r   r   r   r   Zkeras.dtensorr   Zkeras.engine.base_layerr   Zkeras.engine.input_specr	   Z tensorflow.python.util.tf_exportr
   r   r$   r$   r$   r%   Ú<module>   s   