B
    ӻd]                 @   s"  d Z ddlZddlZddlZddlZddlZddlZddlm	Z	 ddl
mZ ddlmZ ddlmZ ddlmZ ddlmZ dd	lmZ dd
lmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlm Z! ddlm"Z# ddlm$Z% ddlm&Z' ddl(m)Z) ddl(m*Z* ddl+m,Z, dd Z-dd Z.dfddZ/ej0dgd d!Z1e2 Z3de3_4de3_5de3_6de3_7e)j8d"d# Z9e)j8d$d% Z:d&d' Z;e)j8d(d) Z<d*d+ Z=d,d- Z>d.d/ Z?dhd0d1Z@d2d3 ZAG d4d5 d5ejBZCG d6d7 d7ejBZDd8d9 ZEd:d; ZFd<d= ZGG d>d? d?ejBZHG d@dA dAejBZIdidBdCZJG dDdE dEejKZLG dFdG dGejBZMG dHdI dIejBZNdjdJdKZOejPejQejRe!jSe%jTe'jUe#jVdLZWdMdN ZXdkdPdQZYdRdS ZZdTdU Z[dVdW Z\ej8dXdY Z]ej8dZd[ Z^d\d] Z_d^d_ Z`d`da ZadldbdcZbddde ZcdS )mz!Utilities for unit-testing Keras.    N)tf2)context)config)dtypes)ops)tensor_shape)tensor_spec)	test_util)backend)layers)models)base_layer_utils)adadelta)adagrad)adam)adamax)gradient_descent)nadam)rmsprop)tf_contextlib)
tf_inspect)tf_decoratorc             C   s   t j| | d S )N)nptestingZassert_array_equal)actualexpected r   W/var/www/html/venv/lib/python3.7/site-packages/tensorflow/python/keras/testing_utils.pystring_test1   s    r   c             C   s   t jj| |ddd d S )NgMbP?gư>)ZrtolZatol)r   r   Zassert_allclose)r   r   r   r   r   numeric_test5   s    r   c       
      C   s   |dk	rt j| | | }d| t j|f|  }t jjd||fd}t j|f| t jd}x2t|D ]&}	|||	  t jjdd|d ||	< qjW |d|  |d|  f|| d || d ffS )a  Generates test data to train a model on.

  Args:
    train_samples: Integer, how many training samples to generate.
    test_samples: Integer, how many test samples to generate.
    input_shape: Tuple of integers, shape of the inputs.
    num_classes: Integer, number of classes for the data and targets.
    random_seed: Integer, random seed used by numpy to generate data.

  Returns:
    A tuple of Numpy arrays: `(x_train, y_train), (x_test, y_test)`.
  N   r   )size)dtypeg      ?)locscaler!   )r   randomseedrandintzerosfloat32rangenormal)
Ztrain_samplesZtest_samplesinput_shapenum_classesZrandom_seedZ
num_sampleZ	templatesyxir   r   r   get_test_data9   s    &r1   Tc       !         sB  |dkr|dkrt d|s d}t|}x.t|D ]"\}}|dkr2tjdd||< q2W dtj| }|dd dkr|d	8 }||}n|dkr|j}|dkr|j}|dkr|}t	
|t	jkr|r|j}qt}n|r|j}nt} pi  f  }|dk	r&|j|kr&td
j|j| f |	dk	r:||	 | }|| dtjkrp| d< f  }tj|dd |d|}t||krtdjt|| f  fdd}|dk	r|t||j t|}t |!t|" }|#t$j%||d}|&|}|j}||| ||j| |j|jkrptdj|j|j f |dk	r||| |' }tj(||
}|j)r| }|| |&|}||| | }|r(t|}t*j+dk	r
|j,dddgt- d n|j,dddgd |.|| |' }||d< |j/(|}|	dk	rX||	 t0 }|1tj|dd |d |1| || |&|}|j}xDt2||D ]6\}} |dk	r|| krtdj|| f qW |dk	r||| |' }tj0(||
}|j)r>| }|| |&|}||| |S )a  Test routine for a layer with a single input and single output.

  Args:
    layer_cls: Layer class object.
    kwargs: Optional dictionary of keyword arguments for instantiating the
      layer.
    input_shape: Input shape tuple.
    input_dtype: Data type of the input data.
    input_data: Numpy array of input data.
    expected_output: Numpy array of the expected output.
    expected_output_dtype: Data type expected for the output.
    expected_output_shape: Shape tuple for the expected shape of the output.
    validate_training: Whether to attempt to validate training on this layer.
      This might be set to False for non-differentiable layers that output
      string or integer values.
    adapt_data: Optional data for an 'adapt' call. If None, adapt() will not
      be tested for this layer. This is only relevant for PreprocessingLayers.
    custom_objects: Optional dictionary mapping name strings to custom objects
      in the layer class. This is helpful for testing custom layers.
    test_harness: The Tensorflow test, if any, that this function is being
      called in.
    supports_masking: Optional boolean to check the `supports_masking` property
      of the layer. If None, the check will not be performed.

  Returns:
    The output data (Numpy array) returned by the layer, for additional
    checks to be done by the calling code.

  Raises:
    ValueError: if `input_shape is None`.
  Nzinput_shape is Noner)         
      floatg      ?zbWhen testing layer %s, the `supports_masking` property is %rbut expected to be %r.
Full kwargs: %sweights)shaper"   zcWhen testing layer %s, for input %s, found output dtype=%s but expected to find %s.
Full kwargs: %sc                s   t | t |kr(tdj||  f xbt| |D ]T\}}t|tjrN|j}t|tjr`|j}|dk	r4||kr4tdj||  f q4W dS )zFAsserts that the output shape from the layer matches the actual shape.zcWhen testing layer %s, for input %s, found output_shape=%s but expected to find %s.
Full kwargs: %sN)lenAssertionError__name__zip
isinstancer   Z	Dimensionvalue)r   r   expected_dim
actual_dim)kwargs	layer_clsr/   r   r   assert_shapes_equal   s    z'layer_test.<locals>.assert_shapes_equalzcWhen testing layer %s, for input %s, found output_dtype=%s but expected to find %s.
Full kwargs: %sr   Zmseacc)weighted_metricsrun_eagerly)rE   Zbatch_input_shapezWhen testing layer %s **after deserialization**, for input %s, found output_shape=%s but expected to find inferred shape %s.
Full kwargs: %s)3
ValueErrorlist	enumerater   r%   r'   Zastyper8   r"   r   Zas_dtypestringZassertAllEqualr   ZassertAllCloser   supports_maskingr:   r;   ZadaptZget_weightsZset_weightsr   
getargspec__init__r   Inputr
   r   ZTensorShaper   ModeltupleZcompute_output_shapeas_listZcompute_output_signaturer   Z
TensorSpecZpredictZ
get_configfrom_configr7   _thread_local_datarF   compileshould_run_eagerlyZtrain_on_batch	__class__
Sequentialaddr<   )!rB   rA   r,   input_dtypeZ
input_dataZexpected_outputZexpected_output_dtypeZexpected_output_shapeZvalidate_trainingZ
adapt_dataZcustom_objectsZtest_harnessrK   Zinput_data_shaper0   eZassert_equallayerr7   r.   rC   modelZcomputed_output_shapeZcomputed_output_signatureZactual_outputZactual_output_shapeZmodel_configZrecovered_modeloutputZlayer_weightsZlayer_configr?   r@   r   )rA   rB   r/   r   
layer_testV   s    -



























r^   c             c   s$   t j}z| t _| V  W d|t _X dS )zProvides a scope within which the model type to test is equal to `value`.

  The model type gets restored to its original value upon exiting the scope.

  Args:
     value: model type value

  Yields:
    The provided value.
  N)rS   
model_type)r>   previous_valuer   r   r   model_type_scope;  s
    
ra   c             c   s$   t j}z| t _| V  W d|t _X dS )a.  Provides a scope within which we compile models to run eagerly or not.

  The boolean gets restored to its original value upon exiting the scope.

  Args:
     value: Bool specifying if we should run models eagerly in the active test.
     Should be True or False.

  Yields:
    The provided value.
  N)rS   rF   )r>   r`   r   r   r   run_eagerly_scopeP  s
    
rb   c               C   s    t jdkrtdt jot S )z@Returns whether the models we are testing should be run eagerly.NziCannot call `should_run_eagerly()` outside of a `run_eagerly_scope()` or `run_all_keras_modes` decorator.)rS   rF   rG   r   executing_eagerlyr   r   r   r   rU   f  s    
rU   c             k   s6   t j}t j}z| t _|t _dV  W d|t _|t _X dS )a1  Provides a scope within which the savde model format to test is `value`.

  The saved model format gets restored to its original value upon exiting the
  scope.

  Args:
     value: saved model format value
     **kwargs: optional kwargs to pass to the save function.

  Yields:
    The provided value.
  N)rS   saved_model_formatsave_kwargs)r>   rA   Zprevious_formatZprevious_kwargsr   r   r   saved_model_format_scopep  s    
rf   c               C   s   t jd krtdt jS )NzzCannot call `get_save_format()` outside of a `saved_model_format_scope()` or `run_with_all_saved_model_formats` decorator.)rS   rd   rG   r   r   r   r   get_save_format  s    
rg   c               C   s   t jd krtdt jpi S )NzzCannot call `get_save_kwargs()` outside of a `saved_model_format_scope()` or `run_with_all_saved_model_formats` decorator.)rS   re   rG   r   r   r   r   get_save_kwargs  s    
rh   c               C   s   t jdkrtdt jS )z*Gets the model type that should be tested.NziCannot call `get_model_type()` outside of a `model_type_scope()` or `run_with_all_model_types` decorator.)rS   r_   rG   r   r   r   r   get_model_type  s    
ri   c             C   s`   t  }|r$|tj| d|d n|tj| dd |dkrDdnd}|tj||d |S )Nrelu)
activation	input_dim)rk   r2   sigmoidsoftmax)r   rW   rX   r   Dense)
num_hiddenr-   rl   r\   rk   r   r   r   get_small_sequential_mlp  s    rq   c             C   sN   t j|fd}t j| dd|}|dkr,dnd}t j||d|}t||S )N)r8   rj   )rk   r2   rm   rn   )r   rN   ro   r   rO   )rp   r-   rl   inputsoutputsrk   r   r   r   get_small_functional_mlp  s
    rt   c                   s*   e Zd ZdZd fdd	Zdd Z  ZS )SmallSubclassMLPz!A subclass model based small MLP.Fc                s   t t| jf ddi| || _|| _tj|dd| _|dkrBdnd}tj||d| _| jrht	d| _
| jr|tjd	d
| _d S )NnameZ
test_modelrj   )rk   r2   rm   rn   g      ?)Zaxis)superru   rM   use_bnuse_dpr   ro   layer_alayer_bZDropoutdpZBatchNormalizationbn)selfrp   r-   ry   rz   rA   rk   )rV   r   r   rM     s    zSmallSubclassMLP.__init__c             K   s4   |  |}| jr| |}| jr*| |}| |S )N)r{   rz   r}   ry   r~   r|   )r   rr   rA   r/   r   r   r   call  s    


zSmallSubclassMLP.call)FF)r;   
__module____qualname____doc__rM   r   __classcell__r   r   )rV   r   ru     s    ru   c                   s0   e Zd ZdZ fddZdd Zdd Z  ZS )_SmallSubclassMLPCustomBuildz;A subclass model small MLP that uses a custom build method.c                s*   t t|   d | _d | _|| _|| _d S )N)rx   r   rM   r{   r|   rp   r-   )r   rp   r-   )rV   r   r   rM     s
    z%_SmallSubclassMLPCustomBuild.__init__c             C   s:   t j| jdd| _| jdkr dnd}t j| j|d| _d S )Nrj   )rk   r2   rm   rn   )r   ro   rp   r{   r-   r|   )r   r,   rk   r   r   r   build  s    z"_SmallSubclassMLPCustomBuild.buildc             K   s   |  |}| |S )N)r{   r|   )r   rr   rA   r/   r   r   r   r     s    
z!_SmallSubclassMLPCustomBuild.call)r;   r   r   r   rM   r   r   r   r   r   )rV   r   r     s   r   c             C   s
   t | |S )N)ru   )rp   r-   r   r   r   get_small_subclass_mlp  s    r   c             C   s
   t | |S )N)r   )rp   r-   r   r   r   (get_small_subclass_mlp_with_custom_build  s    r   c             C   sd   t  }|dkrt| |S |dkr*t| |S |dkr>t| ||S |dkrRt| ||S td|dS )z@Get a small mlp of the model type specified by `get_model_type`.subclasssubclass_custom_build
sequential
functionalzUnknown model type {}N)ri   r   r   rq   rt   rG   format)rp   r-   rl   r_   r   r   r   get_small_mlp  s    

r   c                   s0   e Zd ZdZ fddZdd Zdd Z  ZS )_SubclassModelzA Keras subclass model.c                sf   | dd}tt| j|| x&t|D ]\}}t| | || q(W t|| _|dk	rb| 	| dS )zInstantiate a model.

    Args:
      model_layers: a list of layers to be added to the model.
      *args: Model's args
      **kwargs: Model's keyword args, at most one of input_tensor -> the input
        tensor required for ragged/sparse input.
    input_tensorN)
poprx   r   rM   rI   setattr_layer_name_for_ir9   
num_layersZ_set_inputs)r   model_layersargsrA   rr   r0   r[   )rV   r   r   rM     s    

z_SubclassModel.__init__c             C   s
   d |S )Nzlayer{})r   )r   r0   r   r   r   r     s    z _SubclassModel._layer_name_for_ic             K   s4   |}x*t | jD ]}t| | |}||}qW |S )N)r*   r   getattrr   )r   rr   rA   r/   r0   r[   r   r   r   r     s
    z_SubclassModel.call)r;   r   r   r   rM   r   r   r   r   r   )rV   r   r     s   r   c                   s0   e Zd ZdZ fddZdd Zdd Z  ZS )_SubclassModelCustomBuildz7A Keras subclass model that uses a custom build method.c                s"   t t| j|| d | _|| _d S )N)rx   r   rM   
all_layers_layer_generating_func)r   layer_generating_funcr   rA   )rV   r   r   rM   (  s    z"_SubclassModelCustomBuild.__init__c             C   s*   g }x|   D ]}|| qW || _d S )N)r   appendr   )r   r,   r   r[   r   r   r   r   -  s    z_SubclassModelCustomBuild.buildc             K   s    |}x| j D ]}||}qW |S )N)r   )r   rr   rA   r/   r[   r   r   r   r   3  s    z_SubclassModelCustomBuild.call)r;   r   r   r   rM   r   r   r   r   r   )rV   r   r   %  s   r   c                s  |dkrt  }|dkrBd}|s"|r4tj||||d}t ||dS |dkrb fdd}t||dS |d	krtj|d}	|r|	tj||||d
 x D ]}
|	|
 qW |	S |dkr|st	dtj||||d}|}x D ]}
|
|}qW tj
|||dS t	d|dS )a-  Builds a model from a sequence of layers.

  Args:
    model_layers: The layers used to build the network.
    input_shape: Shape tuple of the input or 'TensorShape' instance.
    input_dtype: Datatype of the input.
    name: Name for the model.
    input_ragged: Boolean, whether the input data is a ragged tensor.
    input_sparse: Boolean, whether the input data is a sparse tensor.
    model_type: One of "subclass", "subclass_custom_build", "sequential", or
      "functional". When None, defaults to `get_model_type`.

  Returns:
    A Keras model.
  Nr   )r8   r"   raggedsparse)rv   r   r   c                  s    S )Nr   r   )r   r   r   <lambda>]      z'get_model_from_layers.<locals>.<lambda>)rv   r   )r,   r"   r   r   r   zACannot create a functional model from layers with no input shape.zUnknown model type {})ri   r   rN   r   r   r   rW   rX   Z
InputLayerrG   rO   r   )r   r,   rY   rv   Zinput_raggedZinput_sparser_   rr   r   r\   r[   rs   r   )r   r   get_model_from_layers:  sN    


r   c               @   s   e Zd Zdd Zdd ZdS )Biasc             C   s   | j dddd| _d S )Nbias)r2   r(   )Zinitializer)Zadd_variabler   )r   r,   r   r   r   r     s    z
Bias.buildc             C   s
   || j  S )N)r   )r   rr   r   r   r   r     s    z	Bias.callN)r;   r   r   r   r   r   r   r   r   r   ~  s   r   c                   s*   e Zd ZdZd fdd	Zdd Z  ZS )_MultiIOSubclassModelzMulti IO Keras subclass model.Nc                s.   t t| j|d || _|| _|| _|| _d S )N)rv   )rx   r   rM   _shared_input_branch	_branch_a	_branch_b_shared_output_branch)r   branch_abranch_bshared_input_branchshared_output_branchrv   )rV   r   r   rM     s
    z_MultiIOSubclassModel.__init__c             K   s   | j r(x| j D ]}||}qW |}|}n$t|trD|d }|d }n|\}}x| jD ]}||}qTW x| jD ]}||}qlW ||g}| jrx| jD ]}||}qW |S )NZinput_1Zinput_2)r   r=   dictr   r   r   )r   rr   rA   r[   aboutsr   r   r   r     s$    

z_MultiIOSubclassModel.call)NNN)r;   r   r   r   rM   r   r   r   r   )rV   r   r     s    r   c                   s2   e Zd ZdZd	 fdd	Zdd Zdd Z  ZS )
 _MultiIOSubclassModelCustomBuildz>Multi IO Keras subclass model that uses a custom build method.Nc                sB   t t|   || _|| _|| _|| _d | _d | _d | _	d | _
d S )N)rx   r   rM   _shared_input_branch_func_branch_a_func_branch_b_func_shared_output_branch_funcr   r   r   r   )r   Zbranch_a_funcZbranch_b_funcZshared_input_branch_funcZshared_output_branch_func)rV   r   r   rM     s    z)_MultiIOSubclassModelCustomBuild.__init__c             C   s<   |   r|   | _|  | _|  | _|  r8|  | _d S )N)r   r   r   r   r   r   r   r   )r   r,   r   r   r   r     s    


z&_MultiIOSubclassModelCustomBuild.buildc             K   s   | j r(x| j D ]}||}qW |}|}n|\}}x| jD ]}||}q8W x| jD ]}||}qPW ||f}| jrx| jD ]}||}qvW |S )N)r   r   r   r   )r   rr   rA   r[   r   r   r   r   r   r   r     s    z%_MultiIOSubclassModelCustomBuild.call)NN)r;   r   r   r   rM   r   r   r   r   r   )rV   r   r     s
    	r   c                sN  rd }dd n( d d f} dd  dd t  }|dkr^t S |dkrt fddfddfd	dfd
dS |dkrtd|dkr<r|}xD ]}||}qW |}|}	n|\}}	x D ]}||}qW xD ]}||	}	qW ||	f}
r0xD ]}||
}
qW t||
S td|dS )a  Builds a multi-io model that contains two branches.

  The produced model will be of the type specified by `get_model_type`.

  To build a two-input, two-output model:
    Specify a list of layers for branch a and branch b, but do not specify any
    shared input branch or shared output branch. The resulting model will apply
    each branch to a different input, to produce two outputs.

    The first value in branch_a must be the Keras 'Input' layer for branch a,
    and the first value in branch_b must be the Keras 'Input' layer for
    branch b.

    example usage:
    ```
    branch_a = [Input(shape=(2,), name='a'), Dense(), Dense()]
    branch_b = [Input(shape=(3,), name='b'), Dense(), Dense()]

    model = get_multi_io_model(branch_a, branch_b)
    ```

  To build a two-input, one-output model:
    Specify a list of layers for branch a and branch b, and specify a
    shared output branch. The resulting model will apply
    each branch to a different input. It will then apply the shared output
    branch to a tuple containing the intermediate outputs of each branch,
    to produce a single output. The first layer in the shared_output_branch
    must be able to merge a tuple of two tensors.

    The first value in branch_a must be the Keras 'Input' layer for branch a,
    and the first value in branch_b must be the Keras 'Input' layer for
    branch b.

    example usage:
    ```
    input_branch_a = [Input(shape=(2,), name='a'), Dense(), Dense()]
    input_branch_b = [Input(shape=(3,), name='b'), Dense(), Dense()]
    shared_output_branch = [Concatenate(), Dense(), Dense()]

    model = get_multi_io_model(input_branch_a, input_branch_b,
                               shared_output_branch=shared_output_branch)
    ```
  To build a one-input, two-output model:
    Specify a list of layers for branch a and branch b, and specify a
    shared input branch. The resulting model will take one input, and apply
    the shared input branch to it. It will then respectively apply each branch
    to that intermediate result in parallel, to produce two outputs.

    The first value in the shared_input_branch must be the Keras 'Input' layer
    for the whole model. Branch a and branch b should not contain any Input
    layers.

    example usage:
    ```
    shared_input_branch = [Input(shape=(2,), name='in'), Dense(), Dense()]
    output_branch_a = [Dense(), Dense()]
    output_branch_b = [Dense(), Dense()]


    model = get_multi_io_model(output__branch_a, output_branch_b,
                               shared_input_branch=shared_input_branch)
    ```

  Args:
    branch_a: A sequence of layers for branch a of the model.
    branch_b: A sequence of layers for branch b of the model.
    shared_input_branch: An optional sequence of layers to apply to a single
      input, before applying both branches to that intermediate result. If set,
      the model will take only one input instead of two. Defaults to None.
    shared_output_branch: An optional sequence of layers to merge the
      intermediate results produced by branch a and branch b. If set,
      the model will produce only one output instead of two. Defaults to None.

  Returns:
    A multi-io model of the type specified by `get_model_type`, specified
    by the different branches.
  r   r2   Nr   r   c                  s    S )Nr   r   )r   r   r   r   <  r   z$get_multi_io_model.<locals>.<lambda>c                  s    S )Nr   r   )r   r   r   r   =  r   c                  s    S )Nr   r   )r   r   r   r   >  r   c                  s    S )Nr   r   )r   r   r   r   ?  r   r   z>Cannot use `get_multi_io_model` to construct sequential modelsr   zUnknown model type {})ri   r   r   rG   r   rO   r   )r   r   r   r   rr   r_   Za_and_br[   r   r   rs   r   )r   r   r   r   r   get_multi_io_model  sF    S






r   )r   r   r   r   r   r   Zsgdc          
   K   s@   yt |  f |S  tk
r:   td| tt  Y nX dS )a  Get the v2 optimizer requested.

  This is only necessary until v2 are the default, as we are testing in Eager,
  and Eager + v1 optimizers fail tests. When we are in v2, the strings alone
  should be sufficient, and this mapping can theoretically be removed.

  Args:
    name: string name of Keras v2 optimizer.
    **kwargs: any kwargs to pass to the optimizer constructor.

  Returns:
    Initialized Keras v2 optimizer.

  Raises:
    ValueError: if an unknown name was passed.
  z;Could not find requested v2 optimizer: {}
Valid choices: {}N)_V2_OPTIMIZER_MAPKeyErrorrG   r   rH   keys)rv   rA   r   r   r   get_v2_optimizeri  s    r    c                s0   t  st rdd | D S  fdd| D S )zEReturns expected metric variable names given names and prefix/suffix.c             S   s   g | ]}|d  qS )z:0r   ).0nr   r   r   
<listcomp>  s    z6get_expected_metric_variable_names.<locals>.<listcomp>c                s   g | ]}|  d  qS )z:0r   )r   r   )name_suffixr   r   r     s    )r   enabledr   rc   )Z	var_namesr   r   )r   r   "get_expected_metric_variable_names  s    r   c             C   s
   t | dS )z=Decorator for enabling the layer V2 dtype behavior on a test.T)_set_v2_dtype_behavior)fnr   r   r   enable_v2_dtype_behavior  s    r   c             C   s
   t | dS )z>Decorator for disabling the layer V2 dtype behavior on a test.F)r   )r   r   r   r   disable_v2_dtype_behavior  s    r   c                s$   t  fdd}t|S )zCReturns version of 'fn' that runs with v2 dtype behavior on or off.c                 s$   t j} t _z
| |S |t _X d S )N)r   ZV2_DTYPE_BEHAVIOR)r   rA   Zv2_dtype_behavior)r   r   r   r   wrapper  s
    
z'_set_v2_dtype_behavior.<locals>.wrapper)	functoolswrapsr   Zmake_decorator)r   r   r   r   )r   r   r   r     s    	r   c          	   c   s6   | rt  rd}nd}t| dV  W dQ R X dS )z&Uses gpu when requested and available.z/device:GPU:0z/device:CPU:0N)r	   Zis_gpu_availabler   device)should_use_gpudevr   r   r   r     s
    r   c            	   c   s    t dd dV  W dQ R X dS )z&Uses gpu when requested and available.T)r   N)r   r   r   r   r   use_gpu  s    r   c                s    fdd}|S )a  Generate class-level decorator from given method-level decorator.

  It is expected for the given decorator to take some arguments and return
  a method that is then called on the test method to produce a decorated
  method.

  Args:
    decorator: The decorator to apply.
    *args: Positional arguments
    **kwargs: Keyword arguments
  Returns: Function that will decorate a given classes test methods with the
    decorator.
  c                sP   xJt | D ]>}t| |}t|r
|dr
|dkr
t| | | q
W | S )z-Apply decorator to all test methods in class.testZtest_session)dirr   callable
startswithr   )clsrv   r>   )r   	decoratorrA   r   r   all_test_methods_impl  s    
z3for_all_test_methods.<locals>.all_test_methods_implr   )r   r   rA   r   r   )r   r   rA   r   for_all_test_methods  s    	r   c             C   s   dd }|S )a  Execute test with TensorFloat-32 disabled.

  While almost every real-world deep learning model runs fine with
  TensorFloat-32, many tests use assertAllClose or similar methods.
  TensorFloat-32 matmuls typically will cause such methods to fail with the
  default tolerances.

  Args:
    description: A description used for documentation purposes, describing why
      the test requires TensorFloat-32 to be disabled.

  Returns:
    Decorator which runs a test with TensorFloat-32 disabled.
  c                s   t   fdd}|S )Nc          	      s8   t  }zt d  | f|| W d t | X d S )NF)r   Z!tensor_float_32_execution_enabledZ enable_tensor_float_32_execution)r   r   rA   allowed)fr   r   	decorated  s
    
zArun_without_tensor_float_32.<locals>.decorator.<locals>.decorated)r   r   )r   r   r   )r   r   r     s    	z.run_without_tensor_float_32.<locals>.decoratorr   )descriptionr   r   r   r   run_without_tensor_float_32  s    r   c             C   s
   t t| S )z:Execute all tests in a class with TensorFloat-32 disabled.)r   r   )r   r   r   r   run_all_without_tensor_float_32  s    r   c             C   s   dd }| dk	r|| S |S )ak  Execute the decorated test only if running in v2 mode.

  This function is intended to be applied to tests that exercise v2 only
  functionality. If the test is run in v1 mode it will simply be skipped.

  See go/tf-test-decorator-cheatsheet for the decorators to use in different
  v1/v2/eager/graph combinations.

  Args:
    func: function to be annotated. If `func` is None, this method returns a
      decorator the can be applied to a function. If `func` is not None this
      returns the decorator applied to `func`.

  Returns:
    Returns a decorator that will conditionally skip the decorated test method.
  c                s"   t  rtd fdd}|S )Nz)`run_v2_only` only supports test methods.c                s"   t  s| d  | f||S )NzTest is only compatible with v2)r   r   ZskipTest)r   r   rA   )r   r   r   r     s    
z1run_v2_only.<locals>.decorator.<locals>.decorated)r   isclassrG   )r   r   r   )r   r   r   	  s    
zrun_v2_only.<locals>.decoratorNr   )funcr   r   r   r   run_v2_only  s    r   c           
      s   dd }g }xDt |  |dD ]0\ }t|ts6|g}| fdd|D  qW dd tj| D }g }x\|D ]T}t|tjst	d
dd | D }|tt| d	d
|fg  qpW |S )a5  Generate combinations based on its keyword arguments using combine().

  This function calls combine() and appends a testcase name to the list of
  dictionaries returned. The 'testcase_name' key is a required for named
  parameterized tests.

  Args:
    **kwargs: keyword arguments of form `option=[possibilities, ...]` or
      `option=the_only_possibility`.

  Returns:
    a list of dictionaries for each combination. Keys in the dictionaries are
    the keyword argument names.  Each key has one value - one of the
    corresponding keyword argument values.
  c             S   s   | d S )Nr   r   )kr   r   r   r   +  r   z:generate_combinations_with_testcase_name.<locals>.<lambda>)keyc                s   g | ]} |fqS r   r   )r   r>   )r   r   r   r   0  s    z<generate_combinations_with_testcase_name.<locals>.<listcomp>c             S   s   g | ]}t |qS r   )collectionsOrderedDict)r   resultr   r   r   r   2  s   r   c             S   s<   g | ]4\}}d  dttj|dttjt|qS )z_{}_{}r   )r   joinfilterstrisalnum)r   r   r>   r   r   r   r   8  s   Ztestcase_namez_test{})sorteditemsr=   rH   r   	itertoolsproductr   r   r:   r   r   )rA   Zsort_by_keycombinationsvaluesZnamed_combinationsZcombinationrv   r   )r   r   (generate_combinations_with_testcase_name  s&    


r   )N)NNNNNNNTNNNN)N)NNNNNN)NN)r   )N)dr   r   
contextlibr   r   	threadingnumpyr   Ztensorflow.pythonr   Ztensorflow.python.eagerr   Ztensorflow.python.frameworkr   r   r   r   r   r	   Ztensorflow.python.kerasr
   r   r   Ztensorflow.python.keras.enginer   Z$tensorflow.python.keras.optimizer_v2r   Zadadelta_v2r   Z
adagrad_v2r   Zadam_v2r   Z	adamax_v2r   Zgradient_descent_v2r   Znadam_v2r   Z
rmsprop_v2Ztensorflow.python.keras.utilsr   r   Ztensorflow.python.utilr   r   r   r1   Zdisable_cudnn_autotuner^   localrS   r_   rF   rd   re   contextmanagerra   rb   rU   rf   rg   rh   ri   rq   rt   rO   ru   r   r   r   r   r   r   r   ZLayerr   r   r   r   ZAdadeltaZAdagradZAdamZAdamaxZNadamZRMSpropZSGDr   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   <module>   s   
            R
		

$     
>	$3 
 

	!
$