Merge pull request #792 from fp7/master
[youtube-dl] / test / test_download.py
index f1bccf58c5a2eaab0fbb8d60bb1185007eae0ee8..cf8028718fc86f31da16a0910817e4f06e56f8e3 100644 (file)
@@ -20,6 +20,8 @@ from youtube_dl.utils import *
 DEF_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'tests.json')
 PARAMETERS_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)), "parameters.json")
 
+RETRIES = 3
+
 # General configuration (from __init__, not very elegant...)
 jar = compat_cookiejar.CookieJar()
 cookie_processor = compat_urllib_request.HTTPCookieProcessor(jar)
@@ -56,6 +58,7 @@ with io.open(PARAMETERS_FILE, encoding='utf-8') as pf:
 
 
 class TestDownload(unittest.TestCase):
+    maxDiff = None
     def setUp(self):
         self.parameters = parameters
         self.defs = defs
@@ -64,7 +67,7 @@ class TestDownload(unittest.TestCase):
 def generator(test_case):
 
     def test_template(self):
-        ie = getattr(youtube_dl.InfoExtractors, test_case['name'] + 'IE')
+        ie = youtube_dl.InfoExtractors.get_info_extractor(test_case['name'])#getattr(youtube_dl.InfoExtractors, test_case['name'] + 'IE')
         if not ie._WORKING:
             print('Skipping: IE marked as not _WORKING')
             return
@@ -79,9 +82,8 @@ def generator(test_case):
         params.update(test_case.get('params', {}))
 
         fd = FileDownloader(params)
-        fd.add_info_extractor(ie())
-        for ien in test_case.get('add_ie', []):
-            fd.add_info_extractor(getattr(youtube_dl.InfoExtractors, ien + 'IE')())
+        for ie in youtube_dl.InfoExtractors.gen_extractors():
+            fd.add_info_extractor(ie)
         finished_hook_called = set()
         def _hook(status):
             if status['status'] == 'finished':
@@ -94,7 +96,19 @@ def generator(test_case):
             _try_rm(tc['file'] + '.part')
             _try_rm(tc['file'] + '.info.json')
         try:
-            fd.download([test_case['url']])
+            for retry in range(1, RETRIES + 1):
+                try:
+                    fd.download([test_case['url']])
+                except (DownloadError, ExtractorError) as err:
+                    if retry == RETRIES: raise
+
+                    # Check if the exception is not a network related one
+                    if not err.exc_info[0] in (compat_urllib_error.URLError, socket.timeout, UnavailableVideoError):
+                        raise
+
+                    print('Retrying: {0} failed tries\n\n##########\n\n'.format(retry))
+                else:
+                    break
 
             for tc in test_cases:
                 if not test_case.get('params', {}).get('skip_download', False):