X-Git-Url: http://git.bitcoin.ninja/index.cgi?a=blobdiff_plain;f=youtube_dl%2Fdownloader%2Ffragment.py;h=62de0a75bad4553ff963c7919f035975097af1f3;hb=d3f0687cf7b049b976420056e02c26b5d96adeed;hp=a5bae96699e0b0f81fd11deab4900fe5ed8b820d;hpb=1b5284b13f1b579b2cbac5ce6ab9faa5b95800fb;p=youtube-dl diff --git a/youtube_dl/downloader/fragment.py b/youtube_dl/downloader/fragment.py index a5bae9669..62de0a75b 100644 --- a/youtube_dl/downloader/fragment.py +++ b/youtube_dl/downloader/fragment.py @@ -2,12 +2,15 @@ from __future__ import division, unicode_literals import os import time +import json from .common import FileDownloader from .http import HttpFD from ..utils import ( + error_to_compat_str, encodeFilename, sanitize_open, + sanitized_Request, ) @@ -19,12 +22,68 @@ class HttpQuietDownloader(HttpFD): class FragmentFD(FileDownloader): """ A base file downloader class for fragmented media (e.g. f4m/m3u8 manifests). + + Available options: + + fragment_retries: Number of times to retry a fragment for HTTP error (DASH + and hlsnative only) + skip_unavailable_fragments: + Skip unavailable fragments (DASH and hlsnative only) """ + def report_retry_fragment(self, err, frag_index, count, retries): + self.to_screen( + '[download] Got server HTTP error: %s. Retrying fragment %d (attempt %d of %s)...' + % (error_to_compat_str(err), frag_index, count, self.format_retries(retries))) + + def report_skip_fragment(self, frag_index): + self.to_screen('[download] Skipping fragment %d...' % frag_index) + + def _prepare_url(self, info_dict, url): + headers = info_dict.get('http_headers') + return sanitized_Request(url, None, headers) if headers else url + def _prepare_and_start_frag_download(self, ctx): self._prepare_frag_download(ctx) self._start_frag_download(ctx) + def _read_ytdl_file(self, ctx): + stream, _ = sanitize_open(self.ytdl_filename(ctx['filename']), 'r') + ctx['fragment_index'] = json.loads(stream.read())['download']['current_fragment_index'] + stream.close() + + def _write_ytdl_file(self, ctx): + frag_index_stream, _ = sanitize_open(self.ytdl_filename(ctx['filename']), 'w') + frag_index_stream.write(json.dumps({ + 'download': { + 'current_fragment_index': ctx['fragment_index'] + }, + })) + frag_index_stream.close() + + def _download_fragment(self, ctx, frag_url, info_dict, headers=None): + fragment_filename = '%s-Frag%d' % (ctx['tmpfilename'], ctx['fragment_index']) + success = ctx['dl'].download(fragment_filename, { + 'url': frag_url, + 'http_headers': headers or info_dict.get('http_headers'), + }) + if not success: + return False, None + down, frag_sanitized = sanitize_open(fragment_filename, 'rb') + ctx['fragment_filename_sanitized'] = frag_sanitized + frag_content = down.read() + down.close() + return True, frag_content + + def _append_fragment(self, ctx, frag_content): + try: + ctx['dest_stream'].write(frag_content) + finally: + if not (ctx.get('live') or ctx['tmpfilename'] == '-'): + self._write_ytdl_file(ctx) + os.remove(ctx['fragment_filename_sanitized']) + del ctx['fragment_filename_sanitized'] + def _prepare_frag_download(self, ctx): if 'live' not in ctx: ctx['live'] = False @@ -40,15 +99,38 @@ class FragmentFD(FileDownloader): 'noprogress': True, 'ratelimit': self.params.get('ratelimit'), 'retries': self.params.get('retries', 0), + 'nopart': self.params.get('nopart', False), 'test': self.params.get('test', False), } ) tmpfilename = self.temp_name(ctx['filename']) - dest_stream, tmpfilename = sanitize_open(tmpfilename, 'wb') + open_mode = 'wb' + resume_len = 0 + + # Establish possible resume length + if os.path.isfile(encodeFilename(tmpfilename)): + open_mode = 'ab' + resume_len = os.path.getsize(encodeFilename(tmpfilename)) + + ctx['fragment_index'] = 0 + if os.path.isfile(encodeFilename(self.ytdl_filename(ctx['filename']))): + self._read_ytdl_file(ctx) + else: + self._write_ytdl_file(ctx) + + if ctx['fragment_index'] > 0: + assert resume_len > 0 + else: + assert resume_len == 0 + + dest_stream, tmpfilename = sanitize_open(tmpfilename, open_mode) + ctx.update({ 'dl': dl, 'dest_stream': dest_stream, 'tmpfilename': tmpfilename, + # Total complete fragments downloaded so far in bytes + 'complete_frags_downloaded_bytes': resume_len, }) def _start_frag_download(self, ctx): @@ -57,9 +139,9 @@ class FragmentFD(FileDownloader): # hook state = { 'status': 'downloading', - 'downloaded_bytes': 0, - 'frag_index': 0, - 'frag_count': total_frags, + 'downloaded_bytes': ctx['complete_frags_downloaded_bytes'], + 'fragment_index': ctx['fragment_index'], + 'fragment_count': total_frags, 'filename': ctx['filename'], 'tmpfilename': ctx['tmpfilename'], } @@ -67,8 +149,6 @@ class FragmentFD(FileDownloader): start = time.time() ctx.update({ 'started': start, - # Total complete fragments downloaded so far in bytes - 'complete_frags_downloaded_bytes': 0, # Amount of fragment's bytes downloaded by the time of the previous # frag progress hook invocation 'prev_frag_downloaded_bytes': 0, @@ -84,11 +164,12 @@ class FragmentFD(FileDownloader): if not ctx['live']: estimated_size = ( (ctx['complete_frags_downloaded_bytes'] + frag_total_bytes) / - (state['frag_index'] + 1) * total_frags) + (state['fragment_index'] + 1) * total_frags) state['total_bytes_estimate'] = estimated_size if s['status'] == 'finished': - state['frag_index'] += 1 + state['fragment_index'] += 1 + ctx['fragment_index'] = state['fragment_index'] state['downloaded_bytes'] += frag_total_bytes - ctx['prev_frag_downloaded_bytes'] ctx['complete_frags_downloaded_bytes'] = state['downloaded_bytes'] ctx['prev_frag_downloaded_bytes'] = 0 @@ -110,6 +191,9 @@ class FragmentFD(FileDownloader): def _finish_frag_download(self, ctx): ctx['dest_stream'].close() + ytdl_filename = encodeFilename(self.ytdl_filename(ctx['filename'])) + if os.path.isfile(ytdl_filename): + os.remove(ytdl_filename) elapsed = time.time() - ctx['started'] self.try_rename(ctx['tmpfilename'], ctx['filename']) fsize = os.path.getsize(encodeFilename(ctx['filename']))