1- from __future__ import absolute_import
2- from __future__ import print_function
1+ from .. utils . data_utils import *
2+ import warnings
33
4- import tarfile
5- import os
6- import sys
7- import shutil
8- from six .moves .urllib .request import urlopen
9- from six .moves .urllib .error import URLError , HTTPError
10-
11- from ..utils .generic_utils import Progbar
12-
13-
14- # Under Python 2, 'urlretrieve' relies on FancyURLopener from legacy
15- # urllib module, known to have issues with proxy management
16- if sys .version_info [0 ] == 2 :
17- def urlretrieve (url , filename , reporthook = None , data = None ):
18- def chunk_read (response , chunk_size = 8192 , reporthook = None ):
19- total_size = response .info ().get ('Content-Length' ).strip ()
20- total_size = int (total_size )
21- count = 0
22- while 1 :
23- chunk = response .read (chunk_size )
24- if not chunk :
25- break
26- count += 1
27- if reporthook :
28- reporthook (count , chunk_size , total_size )
29- yield chunk
30-
31- response = urlopen (url , data )
32- with open (filename , 'wb' ) as fd :
33- for chunk in chunk_read (response , reporthook = reporthook ):
34- fd .write (chunk )
35- else :
36- from six .moves .urllib .request import urlretrieve
37-
38-
39- def get_file (fname , origin , untar = False ):
40- datadir_base = os .path .expanduser (os .path .join ('~' , '.keras' ))
41- if not os .access (datadir_base , os .W_OK ):
42- datadir_base = os .path .join ('/tmp' , '.keras' )
43- datadir = os .path .join (datadir_base , 'datasets' )
44- if not os .path .exists (datadir ):
45- os .makedirs (datadir )
46-
47- if untar :
48- untar_fpath = os .path .join (datadir , fname )
49- fpath = untar_fpath + '.tar.gz'
50- else :
51- fpath = os .path .join (datadir , fname )
52-
53- if not os .path .exists (fpath ):
54- print ('Downloading data from' , origin )
55- global progbar
56- progbar = None
57-
58- def dl_progress (count , block_size , total_size ):
59- global progbar
60- if progbar is None :
61- progbar = Progbar (total_size )
62- else :
63- progbar .update (count * block_size )
64-
65- error_msg = 'URL fetch failure on {}: {} -- {}'
66- try :
67- try :
68- urlretrieve (origin , fpath , dl_progress )
69- except URLError as e :
70- raise Exception (error_msg .format (origin , e .errno , e .reason ))
71- except HTTPError as e :
72- raise Exception (error_msg .format (origin , e .code , e .msg ))
73- except (Exception , KeyboardInterrupt ) as e :
74- if os .path .exists (fpath ):
75- os .remove (fpath )
76- raise e
77- progbar = None
78-
79- if untar :
80- if not os .path .exists (untar_fpath ):
81- print ('Untaring file...' )
82- tfile = tarfile .open (fpath , 'r:gz' )
83- try :
84- tfile .extractall (path = datadir )
85- except (Exception , KeyboardInterrupt ) as e :
86- if os .path .exists (untar_fpath ):
87- if os .path .isfile (untar_fpath ):
88- os .remove (untar_fpath )
89- else :
90- shutil .rmtree (untar_fpath )
91- raise e
92- tfile .close ()
93- return untar_fpath
94-
95- return fpath
4+ warnings .warn ('data_utils has been moved to keras.utils.data_utils.' )
0 commit comments