[youtube] Fix extraction.
[youtube-dl] / youtube_dl / downloader / dash.py
index 5f14658ba731cd5479caec1eb65f340714de0a4a..c6d674bc6c0690e00dee3c70879a97574c3911f3 100644 (file)
@@ -1,51 +1,80 @@
 from __future__ import unicode_literals
 
-import re
+from .fragment import FragmentFD
+from ..compat import compat_urllib_error
+from ..utils import (
+    DownloadError,
+    urljoin,
+)
 
-from .common import FileDownloader
-from ..compat import compat_urllib_request
 
-
-class DashSegmentsFD(FileDownloader):
+class DashSegmentsFD(FragmentFD):
     """
     Download segments in a DASH manifest
     """
+
+    FD_NAME = 'dashsegments'
+
     def real_download(self, filename, info_dict):
-        self.report_destination(filename)
-        tmpfilename = self.temp_name(filename)
-        base_url = info_dict['url']
-        segment_urls = info_dict['segment_urls']
-
-        self.byte_counter = 0
-
-        def append_url_to_file(outf, target_url, target_name):
-            self.to_screen('[DashSegments] %s: Downloading %s' % (info_dict['id'], target_name))
-            req = compat_urllib_request.Request(target_url)
-            data = self.ydl.urlopen(req).read()
-            outf.write(data)
-            self.byte_counter += len(data)
-
-        def combine_url(base_url, target_url):
-            if re.match(r'^https?://', target_url):
-                return target_url
-            return '%s/%s' % (base_url, target_url)
-
-        with open(tmpfilename, 'wb') as outf:
-            append_url_to_file(
-                outf, combine_url(base_url, info_dict['initialization_url']),
-                'initialization segment')
-            for i, segment_url in enumerate(segment_urls):
-                append_url_to_file(
-                    outf, combine_url(base_url, segment_url),
-                    'segment %d / %d' % (i + 1, len(segment_urls)))
-
-        self.try_rename(tmpfilename, filename)
-
-        self._hook_progress({
-            'downloaded_bytes': self.byte_counter,
-            'total_bytes': self.byte_counter,
+        fragment_base_url = info_dict.get('fragment_base_url')
+        fragments = info_dict['fragments'][:1] if self.params.get(
+            'test', False) else info_dict['fragments']
+
+        ctx = {
             'filename': filename,
-            'status': 'finished',
-        })
+            'total_frags': len(fragments),
+        }
+
+        self._prepare_and_start_frag_download(ctx)
+
+        fragment_retries = self.params.get('fragment_retries', 0)
+        skip_unavailable_fragments = self.params.get('skip_unavailable_fragments', True)
+
+        frag_index = 0
+        for i, fragment in enumerate(fragments):
+            frag_index += 1
+            if frag_index <= ctx['fragment_index']:
+                continue
+            # In DASH, the first segment contains necessary headers to
+            # generate a valid MP4 file, so always abort for the first segment
+            fatal = i == 0 or not skip_unavailable_fragments
+            count = 0
+            while count <= fragment_retries:
+                try:
+                    fragment_url = fragment.get('url')
+                    if not fragment_url:
+                        assert fragment_base_url
+                        fragment_url = urljoin(fragment_base_url, fragment['path'])
+                    success, frag_content = self._download_fragment(ctx, fragment_url, info_dict)
+                    if not success:
+                        return False
+                    self._append_fragment(ctx, frag_content)
+                    break
+                except compat_urllib_error.HTTPError as err:
+                    # YouTube may often return 404 HTTP error for a fragment causing the
+                    # whole download to fail. However if the same fragment is immediately
+                    # retried with the same request data this usually succeeds (1-2 attempts
+                    # is usually enough) thus allowing to download the whole file successfully.
+                    # To be future-proof we will retry all fragments that fail with any
+                    # HTTP error.
+                    count += 1
+                    if count <= fragment_retries:
+                        self.report_retry_fragment(err, frag_index, count, fragment_retries)
+                except DownloadError:
+                    # Don't retry fragment if error occurred during HTTP downloading
+                    # itself since it has own retry settings
+                    if not fatal:
+                        self.report_skip_fragment(frag_index)
+                        break
+                    raise
+
+            if count > fragment_retries:
+                if not fatal:
+                    self.report_skip_fragment(frag_index)
+                    continue
+                self.report_error('giving up after %s fragment retries' % fragment_retries)
+                return False
+
+        self._finish_frag_download(ctx)
 
         return True