@@ -103,7 +103,7 @@ def stream_url(url, start_byte=None, block_size=32 * 1024, progress_bar=True):
103103 pbar .update (len (chunk ))
104104
105105
106- def download_single_url (
106+ def download_url (
107107 url ,
108108 download_folder ,
109109 filename = None ,
@@ -163,38 +163,6 @@ def download_single_url(
163163 )
164164
165165
166- def download_url (urls , * args , max_workers = 5 , ** kwargs ):
167- """Download urls to disk. The other arguments are passed to download_single_url.
168-
169- Args:
170- urls (str or List[str]): List of urls.
171- max_workers (int): Maximum number of workers.
172- """
173-
174- if isinstance (urls , str ):
175- return download_single_url (urls , * args , ** kwargs )
176-
177- # Turn arguments into lists
178- args = list (args )
179- for i , item in enumerate (args ):
180- if not isinstance (item , list ):
181- args [i ] = [item ] * len (urls )
182- args = list (zip (* args ))
183-
184- # Turn keyword arguments into lists
185- for key , value in kwargs .items ():
186- if not isinstance (value , list ):
187- kwargs [key ] = [value ] * len (urls )
188- kwargs = [dict (zip (kwargs .keys (), values )) for values in zip (* (kwargs .values ()))]
189-
190- with concurrent .futures .ThreadPoolExecutor (max_workers = max_workers ) as executor :
191- futures = [
192- executor .submit (download_single_url , url , * arg , ** kwarg )
193- for url , arg , kwarg in zip (urls , args , kwargs )
194- ]
195- return concurrent .futures .as_completed (futures )
196-
197-
198166def validate_file (filepath , hash_value , hash_type = "sha256" ):
199167 """Validate a given file with its hash.
200168
0 commit comments