[external:ffmpeg] In test harness, limit to 10k download size
[youtube-dl] / youtube_dl / downloader / fragment.py
index b4a798f8f9b4e52987f10417fbe070f92f141382..56f97526676cda29b8c3b15de0e07cb676cc8573 100644 (file)
@@ -6,8 +6,10 @@ import time
 from .common import FileDownloader
 from .http import HttpFD
 from ..utils import (
+    error_to_compat_str,
     encodeFilename,
     sanitize_open,
+    sanitized_Request,
 )
 
 
@@ -28,14 +30,18 @@ class FragmentFD(FileDownloader):
                         Skip unavailable fragments (DASH and hlsnative only)
     """
 
-    def report_retry_fragment(self, fragment_name, count, retries):
+    def report_retry_fragment(self, err, fragment_name, count, retries):
         self.to_screen(
             '[download] Got server HTTP error: %s. Retrying fragment %s (attempt %d of %s)...'
-            % (fragment_name, count, self.format_retries(retries)))
+            % (error_to_compat_str(err), fragment_name, count, self.format_retries(retries)))
 
     def report_skip_fragment(self, fragment_name):
         self.to_screen('[download] Skipping fragment %s...' % fragment_name)
 
+    def _prepare_url(self, info_dict, url):
+        headers = info_dict.get('http_headers')
+        return sanitized_Request(url, None, headers) if headers else url
+
     def _prepare_and_start_frag_download(self, ctx):
         self._prepare_frag_download(ctx)
         self._start_frag_download(ctx)
@@ -55,6 +61,7 @@ class FragmentFD(FileDownloader):
                 'noprogress': True,
                 'ratelimit': self.params.get('ratelimit'),
                 'retries': self.params.get('retries', 0),
+                'nopart': self.params.get('nopart', False),
                 'test': self.params.get('test', False),
             }
         )