B
    dZ                 @   sP   d Z ddlmZ ddlmZ ddlmZ ddlmZ edG dd deZ	d	S )
z)Exponential Linear Unit activation layer.    )backend)Layer)tf_utils)keras_exportzkeras.layers.ELUc                   sD   e Zd ZdZd fdd	Zdd Z fddZejd	d
 Z	  Z
S )ELUa  Exponential Linear Unit.

    It follows:

    ```
      f(x) =  alpha * (exp(x) - 1.) for x < 0
      f(x) = x for x >= 0
    ```

    Input shape:
      Arbitrary. Use the keyword argument `input_shape`
      (tuple of integers, does not include the samples axis)
      when using this layer as the first layer in a model.

    Output shape:
      Same shape as the input.

    Args:
      alpha: Scale for the negative factor.
          ?c                s:   t  jf | |d kr$td| d| _t|| _d S )NzCAlpha of an ELU layer cannot be None, expecting a float. Received: T)super__init__
ValueErrorZsupports_maskingr   Zcast_to_floatxalpha)selfr   kwargs)	__class__ M/var/www/html/venv/lib/python3.7/site-packages/keras/layers/activation/elu.pyr	   1   s    zELU.__init__c             C   s   t || jS )N)r   Zelur   )r   inputsr   r   r   call;   s    zELU.callc                s4   dt | ji}t  }tt| t|  S )Nr   )floatr   r   
get_configdictlistitems)r   configZbase_config)r   r   r   r   >   s    
zELU.get_configc             C   s   |S )Nr   )r   Zinput_shaper   r   r   compute_output_shapeC   s    zELU.compute_output_shape)r   )__name__
__module____qualname____doc__r	   r   r   r   Zshape_type_conversionr   __classcell__r   r   )r   r   r      s
   
r   N)
r   Zkerasr   Zkeras.engine.base_layerr   Zkeras.utilsr   Z tensorflow.python.util.tf_exportr   r   r   r   r   r   <module>   s   