B
    ӻd[k              	   @   sB  d Z ddlmZ ddlmZ ddlZddlZddlZddlZddl	Z	ddl
Z
ddlZddlZddlZddlZddlZddlZddlZddlZddlZddlZddlZddlmZ ddlmZ ddlmZ ddlmZ dd	lm Z  dd
l!m"Z" ej#d dkrdDddZ$nddl%m$Z$ dd Z&dEddZ'e"ddFddZ(dd Z)dGddZ*dHd d!Z+dId"d#Z,G d$d% d%e-Z.d&d' Z/e"d(G d)d* d*e-Z0d+d, Z1i a2da3e4 Z5da6e7 Z8da9e: Z;d-d. Z<d/d0 Z=d1d2 Z>d3d4 Z?d5d6 Z@e"d7G d8d9 d9e-ZAe"d:G d;d< d<eAZBdJd=d>ZCd?d@ ZDe"dAG dBdC dCeAZEdS )Kz(Utilities for file download and caching.    )abstractmethod)closingN)ops)urlopen)
tf_inspect)Progbar)path_to_string)keras_export   c          	   C   sN   ddd}t | |}t|d&}x|||dD ]}|| q.W W dQ R X dS )a  Replacement for `urlretrieve` for Python 2.

    Under Python 2, `urlretrieve` relies on `FancyURLopener` from legacy
    `urllib` module, known to have issues with proxy management.

    Args:
        url: url to retrieve.
        filename: where to store the retrieved data locally.
        reporthook: a hook function that will be called once on establishment of
          the network connection and once after each block read thereafter. The
          hook will be passed three arguments; a count of blocks transferred so
          far, a block size in bytes, and the total size of the file.
        data: `data` argument passed to `urlopen`.
        Nc             s   sh   |   d}d}|d k	r&t| }d}x8| |}|d7 }|d k	rR|||| |r^|V  q,P q,W d S )NzContent-Lengthr      )infogetintstripread)response
chunk_size
reporthookcontent_type
total_sizecountchunk r   Z/var/www/html/venv/lib/python3.7/site-packages/tensorflow/python/keras/utils/data_utils.py
chunk_readA   s    
zurlretrieve.<locals>.chunk_readwb)r   )r   N)r   openwrite)urlfilenamer   datar   r   fdr   r   r   r   urlretrieve1   s
    

r$   )r$   c             C   sJ   t tttttf}t| tjt	j
f| r*dS t| pHt| tpHt| tjS )z'Check if `x` is a Keras generator type.F)strlisttupledictset	frozenset
isinstancer   ZTensornpZndarrayr   isgeneratorSequencetypingIterator)xZbuiltin_iteratorsr   r   r   is_generator_or_sequenceY   s    

r2   .autoc             C   s   |dkrdS |dkrddg}t |tr,|g}t| } t|}x|D ]}|dkrZtj}tj}|dkrntj}tj}|| rB|| b}y|	| W nL tj
ttfk
r   tj|rtj|rt| n
t|  Y nX W dQ R X dS qBW dS )a6  Extracts an archive if it matches tar, tar.gz, tar.bz, or zip formats.

  Args:
      file_path: path to the archive file
      path: path to extract the archive file
      archive_format: Archive format to try for extracting the file.
          Options are 'auto', 'tar', 'zip', and None.
          'tar' includes tar, tar.gz, and tar.bz files.
          The default 'auto' is ['tar', 'zip'].
          None or an empty list will return no matches found.

  Returns:
      True if a match was found and an archive extraction was completed,
      False otherwise.
  NFr4   tarzipT)r+   r%   r   tarfiler   
is_tarfilezipfileZipFile
is_zipfile
extractallTarErrorRuntimeErrorKeyboardInterruptospathexistsisfileremoveshutilrmtree)	file_pathrA   archive_formatZarchive_typeZopen_fnZis_match_fnarchiver   r   r   _extract_archivec   s6    



rJ   zkeras.utils.get_fileFdatasetsc
          
      s4  |	dkrt jt jdd}	|dk	r6|dkr6|}d}t j|	}
t |
t js^t jdd}
t j|
|}t| t| } |rt j|| }|d }nt j|| }d}t j|r|dk	rt	|||dst
d	| d
 | d  d}nd}|rt
d| G dd dt  fdd}d}yyt||| W nz tjjk
rn } zt|||j|jW dd}~X Y n> tjjk
r } zt|||j|jW dd}~X Y nX W nB ttfk
r } zt j|rt |  W dd}~X Y nX d _|rt j|st||dd |S |r0t||| |S )a  Downloads a file from a URL if it not already in the cache.

  By default the file at the url `origin` is downloaded to the
  cache_dir `~/.keras`, placed in the cache_subdir `datasets`,
  and given the filename `fname`. The final location of a file
  `example.txt` would therefore be `~/.keras/datasets/example.txt`.

  Files in tar, tar.gz, tar.bz, and zip formats can also be extracted.
  Passing a hash will verify the file after download. The command line
  programs `shasum` and `sha256sum` can compute the hash.

  Example:

  ```python
  path_to_downloaded_file = tf.keras.utils.get_file(
      "flower_photos",
      "https://storage.googleapis.com/download.tensorflow.org/example_images/flower_photos.tgz",
      untar=True)
  ```

  Args:
      fname: Name of the file. If an absolute path `/path/to/file.txt` is
          specified the file will be saved at that location.
      origin: Original URL of the file.
      untar: Deprecated in favor of `extract` argument.
          boolean, whether the file should be decompressed
      md5_hash: Deprecated in favor of `file_hash` argument.
          md5 hash of the file for verification
      file_hash: The expected hash string of the file after download.
          The sha256 and md5 hash algorithms are both supported.
      cache_subdir: Subdirectory under the Keras cache dir where the file is
          saved. If an absolute path `/path/to/folder` is
          specified the file will be saved at that location.
      hash_algorithm: Select the hash algorithm to verify the file.
          options are `'md5'`, `'sha256'`, and `'auto'`.
          The default 'auto' detects the hash algorithm in use.
      extract: True tries extracting the file as an Archive, like tar or zip.
      archive_format: Archive format to try for extracting the file.
          Options are `'auto'`, `'tar'`, `'zip'`, and `None`.
          `'tar'` includes tar, tar.gz, and tar.bz files.
          The default `'auto'` corresponds to `['tar', 'zip']`.
          None or an empty list will return no matches found.
      cache_dir: Location to store cached files, when None it
          defaults to the default directory `~/.keras/`.

  Returns:
      Path to the downloaded file
  N~z.kerasmd5z/tmpz.tar.gzF)	algorithmzNA local file was found, but it seems to be incomplete or outdated because the z0 file hash does not match the original value of z! so we will re-download the data.TzDownloading data fromc               @   s   e Zd ZdZdS )z!get_file.<locals>.ProgressTrackerN)__name__
__module____qualname__progbarr   r   r   r   ProgressTracker   s   rS   c                s6    j d kr"|dkrd }t| _ n j | |  d S )Nr   )rR   r   update)r   
block_sizer   )rS   r   r   dl_progress   s
    
zget_file.<locals>.dl_progressz!URL fetch failure on {}: {} -- {}r5   )rH   )r@   rA   join
expanduseraccessW_OK_makedirs_exist_okr   rB   validate_fileprintobjectr$   urlliberror	HTTPError	ExceptionformatcodemsgURLErrorerrnoreasonr?   rD   rR   rJ   )fnameoriginZuntarZmd5_hash	file_hashZcache_subdirZhash_algorithmextractrH   	cache_dirZdatadir_basedatadirZuntar_fpathfpathdownloadrV   	error_msger   )rS   r   get_file   s`    ;

&,
rs   c             C   s   t j| dd d S )NT)exist_ok)r@   makedirs)rn   r   r   r   r[     s    r[   c             C   s<   | dkrt  S | dkr4|dk	r4t|dkr4t  S t  S )z+Returns hash algorithm as hashlib function.sha256r4   N@   )hashlibrv   lenrM   )rN   rk   r   r   r   _resolve_hasher  s
    rz   rv     c          	      s^   t |trt|}n|}t| d.x&t fdddD ]}|| q:W W dQ R X | S )a  Calculates a file sha256 or md5 hash.

  Example:

  ```python
  _hash_file('/path/to/file.zip')
  'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855'
  ```

  Args:
      fpath: path to the file being validated
      algorithm: hash algorithm, one of `'auto'`, `'sha256'`, or `'md5'`.
          The default `'auto'` detects the hash algorithm in use.
      chunk_size: Bytes to read at a time, important for large files.

  Returns:
      The file hash
  rbc                  s
     S )N)r   r   )r   
fpath_filer   r   <lambda>A      z_hash_file.<locals>.<lambda>r   N)r+   r%   rz   r   iterrT   	hexdigest)ro   rN   r   hasherr   r   )r   r}   r   
_hash_file(  s    

r   c             C   s.   t ||}tt| ||t|kr&dS dS dS )a  Validates a file against a sha256 or md5 hash.

  Args:
      fpath: path to the file being validated
      file_hash:  The expected hash string of the file.
          The sha256 and md5 hash algorithms are both supported.
      algorithm: Hash algorithm, one of 'auto', 'sha256', or 'md5'.
          The default 'auto' detects the hash algorithm in use.
      chunk_size: Bytes to read at a time, important for large files.

  Returns:
      Whether the file is valid
  TFN)rz   r%   r   )ro   rk   rN   r   r   r   r   r   r\   G  s    
r\   c               @   s0   e Zd ZdZdd Zdd Zdd Zdd	 Zd
S )ThreadsafeIterzEWrap an iterator with a lock and propagate exceptions to all threads.c             C   s   || _ t | _d | _d S )N)it	threadingLocklock
_exception)selfr   r   r   r   __init__`  s    
	zThreadsafeIter.__init__c             C   s   | S )Nr   )r   r   r   r   __iter__m  s    zThreadsafeIter.__iter__c             C   s   |   S )N)__next__)r   r   r   r   nextp  s    zThreadsafeIter.nextc             C   sX   | j H | jr| jy
t| jS  tk
rH } z|| _ W d d }~X Y nX W d Q R X d S )N)r   r   r   r   rb   )r   rr   r   r   r   r   s  s    
zThreadsafeIter.__next__N)rO   rP   rQ   __doc__r   r   r   r   r   r   r   r   r   ]  s
   r   c                s   t   fdd}|S )Nc                 s   t  | |S )N)r   )akw)fr   r   g  s    zthreadsafe_generator.<locals>.g)	functoolswraps)r   r   r   )r   r   threadsafe_generator  s    r   zkeras.utils.Sequencec               @   s8   e Zd ZdZedd Zedd Zdd Zdd	 Zd
S )r.   a1  Base object for fitting to a sequence of data, such as a dataset.

  Every `Sequence` must implement the `__getitem__` and the `__len__` methods.
  If you want to modify your dataset between epochs you may implement
  `on_epoch_end`.
  The method `__getitem__` should return a complete batch.

  Notes:

  `Sequence` are a safer way to do multiprocessing. This structure guarantees
  that the network will only train once
   on each sample per epoch which is not the case with generators.

  Examples:

  ```python
  from skimage.io import imread
  from skimage.transform import resize
  import numpy as np
  import math

  # Here, `x_set` is list of path to the images
  # and `y_set` are the associated classes.

  class CIFAR10Sequence(Sequence):

      def __init__(self, x_set, y_set, batch_size):
          self.x, self.y = x_set, y_set
          self.batch_size = batch_size

      def __len__(self):
          return math.ceil(len(self.x) / self.batch_size)

      def __getitem__(self, idx):
          batch_x = self.x[idx * self.batch_size:(idx + 1) *
          self.batch_size]
          batch_y = self.y[idx * self.batch_size:(idx + 1) *
          self.batch_size]

          return np.array([
              resize(imread(file_name), (200, 200))
                 for file_name in batch_x]), np.array(batch_y)
  ```
  c             C   s   t dS )zGets batch at position `index`.

    Args:
        index: position of the batch in the Sequence.

    Returns:
        A batch
    N)NotImplementedError)r   indexr   r   r   __getitem__  s    
zSequence.__getitem__c             C   s   t dS )zbNumber of batch in the Sequence.

    Returns:
        The number of batches in the Sequence.
    N)r   )r   r   r   r   __len__  s    zSequence.__len__c             C   s   dS )z-Method called at the end of every epoch.
    Nr   )r   r   r   r   on_epoch_end  s    zSequence.on_epoch_endc             #   s.   x( fddt t D D ]
}|V  qW dS )z2Create a generator that iterate over the Sequence.c             3   s   | ]} | V  qd S )Nr   ).0i)r   r   r   	<genexpr>  s    z$Sequence.__iter__.<locals>.<genexpr>N)rangery   )r   itemr   )r   r   r     s     zSequence.__iter__N)	rO   rP   rQ   r   r   r   r   r   r   r   r   r   r   r.     s
   -	r.   c             c   s   xx| D ]
}|V  qW qW dS )zIterates indefinitely over a Sequence.

  Args:
    seq: `Sequence` instance.

  Yields:
    Batches of data from the `Sequence`.
  Nr   )seqr   r   r   r   iter_sequence_infinite  s    	
r   c                s   t   fdd}|S )Nc           	      s,   t  td }a | |}|a|S Q R X d S )NT)_FORCE_THREADPOOL_LOCK_FORCE_THREADPOOL)argskwargsZold_force_threadpoolout)r   r   r   wrapped  s
    

z.dont_use_multiprocessing_pool.<locals>.wrapped)r   r   )r   r   r   )r   r   dont_use_multiprocessing_pool  s    r   c             C   s   | rt rtjjS tjS )N)r   multiprocessingdummyZPool)use_multiprocessingr   r   r   get_pool_class  s    r   c               C   s   t dkrt a t S )z,Lazily create the queue to track worker ids.N)_WORKER_ID_QUEUEr   Queuer   r   r   r   get_worker_id_queue  s    r   c             C   s   | a d S )N)_SHARED_SEQUENCES)seqsr   r   r   	init_pool  s    r   c             C   s   t |  | S )aQ  Get the value from the Sequence `uid` at index `i`.

  To allow multiple Sequences to be used at the same time, we use `uid` to
  get a specific one. A single Sequence would cause the validation to
  overwrite the training Sequence.

  Args:
      uid: int, Sequence identifier
      i: index

  Returns:
      The value at index `i`.
  )r   )uidr   r   r   r   	get_index  s    r   zkeras.utils.SequenceEnqueuerc               @   sj   e Zd ZdZdddZdd Zdd	d
Zdd ZdddZdd Z	e
dd Ze
dd Ze
dd ZdS )SequenceEnqueuera  Base class to enqueue inputs.

  The task of an Enqueuer is to use parallelism to speed up preprocessing.
  This is done with processes or threads.

  Example:

  ```python
      enqueuer = SequenceEnqueuer(...)
      enqueuer.start()
      datas = enqueuer.get()
      for data in datas:
          # Use the inputs; training, evaluating, predicting.
          # ... stop sometime.
      enqueuer.stop()
  ```

  The `enqueuer.get()` should be an infinite stream of datas.
  Fc          	   C   s   || _ || _td kr>ytddaW n tk
r<   daY nX tttrXt| _td7 an*t	  tj
| _t j
d7  _
W d Q R X d| _d | _d | _d | _d | _d S )Nr   r   r   )sequencer   _SEQUENCE_COUNTERr   ValueOSErrorr+   r   r   Zget_lockvalueworkersexecutor_fnqueue
run_threadstop_signal)r   r   r   r   r   r   r   <  s$    



zSequenceEnqueuer.__init__c             C   s   | j d k	o| j   S )N)r   is_set)r   r   r   r   
is_runningZ  s    zSequenceEnqueuer.is_runningr   
   c                sd   | j r|  | _n fdd| _ | _t|| _t | _tj	| j
d| _d| j_| j  dS )zStarts the handler's workers.

    Args:
        workers: Number of workers.
        max_queue_size: queue size
            (when full, workers could block on `put()`)
    c                s   t d S )NF)r   )_)r   r   r   r~   i  r   z(SequenceEnqueuer.start.<locals>.<lambda>)targetTN)r   _get_executor_initr   r   r   r   r   Eventr   Thread_runr   daemonstart)r   r   Zmax_queue_sizer   )r   r   r   ]  s    
zSequenceEnqueuer.startc             C   s   | j t| j< dS )z&Sends current Iterable to all workers.N)r   r   r   )r   r   r   r   _send_sequenceq  s    zSequenceEnqueuer._send_sequenceNc          	   C   sX   | j   | jj& | jj  d| j_| jj  W dQ R X | j	| dt
| j< dS )zStops running threads and wait for them to exit, if necessary.

    Should be called by the same thread which called `start()`.

    Args:
        timeout: maximum time to wait on `thread.join()`
    r   N)r   r)   r   mutexclearunfinished_tasksnot_fullnotifyr   rW   r   r   )r   timeoutr   r   r   stopv  s    

zSequenceEnqueuer.stopc             C   s   |   r|   d S )N)r   r   )r   r   r   r   __del__  s    zSequenceEnqueuer.__del__c             C   s   t dS )z?Submits request to the executor and queue the `Future` objects.N)r   )r   r   r   r   r     s    zSequenceEnqueuer._runc             C   s   t dS )zGets the Pool initializer for multiprocessing.

    Args:
        workers: Number of workers.

    Returns:
        Function, a Function to initialize the pool
    N)r   )r   r   r   r   r   r     s    
z#SequenceEnqueuer._get_executor_initc             C   s   t dS )zCreates a generator to extract data from the queue.

    Skip the data if it is `None`.
    # Returns
        Generator yielding tuples `(inputs, targets)`
            or `(inputs, targets, sample_weights)`.
    N)r   )r   r   r   r   r     s    	zSequenceEnqueuer.get)F)r   r   )N)rO   rP   rQ   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   &  s   


r   zkeras.utils.OrderedEnqueuerc                   sB   e Zd ZdZd fdd	Zdd Zdd Zd	d
 Zdd Z  Z	S )OrderedEnqueuera  Builds a Enqueuer from a Sequence.

  Args:
      sequence: A `tf.keras.utils.data_utils.Sequence` object.
      use_multiprocessing: use multiprocessing if True, otherwise threading
      shuffle: whether to shuffle the data at the beginning of each epoch
  Fc                s   t t| || || _d S )N)superr   r   shuffle)r   r   r   r   )	__class__r   r   r     s    zOrderedEnqueuer.__init__c                s    fdd}|S )zGets the Pool initializer for multiprocessing.

    Args:
        workers: Number of workers.

    Returns:
        Function, a Function to initialize the pool
    c                s(   t d t| d t fd}t| |S )NT)initializerinitargs)r   init_pool_generatorr   _DATA_POOLSadd)r   pool)r   r   r   pool_fn  s
    
z3OrderedEnqueuer._get_executor_init.<locals>.pool_fnr   )r   r   r   r   )r   r   r     s    	z"OrderedEnqueuer._get_executor_initc             C   s.   x(t d | jjdks"| j rdS qW dS )zWait for the queue to be empty.g?r   N)timesleepr   r   r   r   )r   r   r   r   _wait_queue  s    
zOrderedEnqueuer._wait_queuec          	   C   s   t tt| j}|   x| jr,t| t| t	V}x8|D ]0}| j
 rTdS | jj|t| j|fdd qBW |   | j
 rdS W dQ R X | j  |   qW dS )z?Submits request to the executor and queue the `Future` objects.NT)block)r&   r   ry   r   r   r   randomr   r   r   r   r   r   putapply_asyncr   r   r   r   )r   r   executorr   r   r   r   r     s     




zOrderedEnqueuer._runc          
   c   s   x|   ry8| jjddd }|   r2| j  |dk	r@|V  W q tjk
rX   Y q tk
r } z|   |W dd}~X Y qX qW dS )zCreates a generator to extract data from the queue.

    Skip the data if it is `None`.

    Yields:
        The next element in the queue, i.e. a tuple
        `(inputs, targets)` or
        `(inputs, targets, sample_weights)`.
    T   )r   r   N)r   r   r   	task_doneEmptyrb   r   )r   inputsrr   r   r   r   r     s    



zOrderedEnqueuer.get)FF)
rO   rP   rQ   r   r   r   r   r   r   __classcell__r   r   )r   r   r     s   r   c             C   sR   | a t }d|j|_|dk	r4tj||j  |dk	rN|j	|jddd dS )a  Initializer function for pool workers.

  Args:
    gens: State which should be made available to worker processes.
    random_seed: An optional value with which to seed child processes.
    id_queue: A multiprocessing Queue of worker ids. This is used to indicate
      that a worker process was created by Keras and can be terminated using
      the cleanup_all_keras_forkpools utility.
  zKeras_worker_{}NTg?)r   r   )
r   r   current_processrc   namer,   r   seedidentr   )Zgensrandom_seedZid_queueZworker_procr   r   r   r     s    r   c             C   s   t t|  S )aK  Gets the next value from the generator `uid`.

  To allow multiple generators to be used at the same time, we use `uid` to
  get a specific one. A single generator would cause the validation to
  overwrite the training generator.

  Args:
      uid: int, generator identifier

  Returns:
      The next value of generator `uid`.
  )r   r   )r   r   r   r   next_sample  s    r   zkeras.utils.GeneratorEnqueuerc                   s:   e Zd ZdZd fdd	Zdd Zdd	 Zd
d Z  ZS )GeneratorEnqueuera  Builds a queue out of a data generator.

  The provided generator can be finite in which case the class will throw
  a `StopIteration` exception.

  Args:
      generator: a generator function which yields data
      use_multiprocessing: use multiprocessing if True, otherwise threading
      random_seed: Initial seed for workers,
          will be incremented by one for each worker.
  FNc                s   t t| || || _d S )N)r   r   r   r   )r   	generatorr   r   )r   r   r   r   :  s    zGeneratorEnqueuer.__init__c                s    fdd}|S )zGets the Pool initializer for multiprocessing.

    Args:
      workers: Number of works.

    Returns:
        A Function to initialize the pool
    c                s*   t dt|  jt fd}t| |S )NT)r   r   )r   r   r   r   r   r   )r   r   )r   r   r   r   r   I  s
    
z5GeneratorEnqueuer._get_executor_init.<locals>.pool_fnr   )r   r   r   r   )r   r   r   r   @  s    	z$GeneratorEnqueuer._get_executor_initc          	   C   sV   |    t| t6}x.| j r(dS | jj|t	| j
fdd qW W dQ R X dS )z?Submits request to the executor and queue the `Future` objects.NT)r   )r   r   r   r   r   r   r   r   r   r   r   )r   r   r   r   r   r   Q  s    
zGeneratorEnqueuer._runc          
   c   s  y<x6|   r8| jjdd }| j  |dk	r|V  qW W n tk
r   g }x&| j dkrv|| jjdd qRW x|D ]}|  q~W dd |D }x|D ]}|dk	r|V  qW Y nB tk
r } z$| 	  dt
|krtd|W dd}~X Y nX dS )	zCreates a generator to extract data from the queue.

    Skip the data if it is `None`.

    Yields:
        The next element in the queue, i.e. a tuple
        `(inputs, targets)` or
        `(inputs, targets, sample_weights)`.
    T)r   Nr   c             S   s   g | ]}|  r| qS r   )Z
successfulr   )r   futurer   r   r   
<listcomp>u  s    z)GeneratorEnqueuer.get.<locals>.<listcomp>zgenerator already executingzyYour generator is NOT thread-safe. Keras requires a thread-safe generator when `use_multiprocessing=False, workers > 1`. )r   r   r   r   StopIterationqsizeappendwaitrb   r   r%   r>   )r   r   Z	last_onesr   rr   r   r   r   r   \  s,    




zGeneratorEnqueuer.get)FN)	rO   rP   rQ   r   r   r   r   r   r   r   r   )r   r   r   ,  s    r   )NN)r3   r4   )FNNrK   r4   Fr4   N)N)rv   r{   )r4   r{   )NN)Fr   abcr   
contextlibr   r   rx   r   Zmultiprocessing.dummyr@   r   r   rE   sysr7   r   r   r/   r_   weakrefr9   numpyr,   Ztensorflow.python.frameworkr   Zsix.moves.urllib.requestr   Ztensorflow.python.keras.utilsr   Z+tensorflow.python.keras.utils.generic_utilsr   Z&tensorflow.python.keras.utils.io_utilsr   Z tensorflow.python.util.tf_exportr	   version_infor$   urllib.requestr2   rJ   rs   r[   rz   r   r\   r^   r   r   r.   r   r   r   WeakSetr   r   r)   Z_WORKER_IDSr   RLockr   r   r   r   r   r   r   r   r   r   r   r   r   r   r   <module>   s   %

1       z


"	N Y
