X-Git-Url: http://git.bitcoin.ninja/index.cgi?a=blobdiff_plain;f=youtube_dl%2Fdownloader%2Fdash.py;h=41fc9cfc2b6b301bc09261c4b79668ca2395d544;hb=95843da5297965bb535262002c92a4d0afcb7e12;hp=a4685d307fa3b0cc1357f08c4a1398b7f251c3fe;hpb=3f724339dbe61fe84dd8e66e9c3b74ba6a9c6ddf;p=youtube-dl diff --git a/youtube_dl/downloader/dash.py b/youtube_dl/downloader/dash.py index a4685d307..41fc9cfc2 100644 --- a/youtube_dl/downloader/dash.py +++ b/youtube_dl/downloader/dash.py @@ -1,66 +1,92 @@ from __future__ import unicode_literals +import os import re -from .common import FileDownloader -from ..compat import compat_urllib_request +from .fragment import FragmentFD +from ..compat import compat_urllib_error +from ..utils import ( + sanitize_open, + encodeFilename, +) -class DashSegmentsFD(FileDownloader): +class DashSegmentsFD(FragmentFD): """ Download segments in a DASH manifest """ - def real_download(self, filename, info_dict): - self.report_destination(filename) - tmpfilename = self.temp_name(filename) - base_url = info_dict['url'] - segment_urls = info_dict['segment_urls'] - is_test = self.params.get('test', False) - remaining_bytes = self._TEST_FILE_SIZE if is_test else None - byte_counter = 0 + FD_NAME = 'dashsegments' - def append_url_to_file(outf, target_url, target_name, remaining_bytes=None): - self.to_screen('[DashSegments] %s: Downloading %s' % (info_dict['id'], target_name)) - req = compat_urllib_request.Request(target_url) - if remaining_bytes is not None: - req.add_header('Range', 'bytes=0-%d' % (remaining_bytes - 1)) - - data = self.ydl.urlopen(req).read() + def real_download(self, filename, info_dict): + base_url = info_dict['url'] + segment_urls = [info_dict['segment_urls'][0]] if self.params.get('test', False) else info_dict['segment_urls'] + initialization_url = info_dict.get('initialization_url') - if remaining_bytes is not None: - data = data[:remaining_bytes] + ctx = { + 'filename': filename, + 'total_frags': len(segment_urls) + (1 if initialization_url else 0), + } - outf.write(data) - return len(data) + self._prepare_and_start_frag_download(ctx) def combine_url(base_url, target_url): if re.match(r'^https?://', target_url): return target_url - return '%s/%s' % (base_url, target_url) - - with open(tmpfilename, 'wb') as outf: - append_url_to_file( - outf, combine_url(base_url, info_dict['initialization_url']), - 'initialization segment') - for i, segment_url in enumerate(segment_urls): - segment_len = append_url_to_file( - outf, combine_url(base_url, segment_url), - 'segment %d / %d' % (i + 1, len(segment_urls)), - remaining_bytes) - byte_counter += segment_len - if remaining_bytes is not None: - remaining_bytes -= segment_len - if remaining_bytes <= 0: - break - - self.try_rename(tmpfilename, filename) - - self._hook_progress({ - 'downloaded_bytes': byte_counter, - 'total_bytes': byte_counter, - 'filename': filename, - 'status': 'finished', - }) + return '%s%s%s' % (base_url, '' if base_url.endswith('/') else '/', target_url) + + segments_filenames = [] + + fragment_retries = self.params.get('fragment_retries', 0) + skip_unavailable_fragments = self.params.get('skip_unavailable_fragments', True) + + def process_segment(segment, tmp_filename, fatal): + target_url, segment_name = segment + target_filename = '%s-%s' % (tmp_filename, segment_name) + count = 0 + while count <= fragment_retries: + try: + success = ctx['dl'].download(target_filename, {'url': combine_url(base_url, target_url)}) + if not success: + return False + down, target_sanitized = sanitize_open(target_filename, 'rb') + ctx['dest_stream'].write(down.read()) + down.close() + segments_filenames.append(target_sanitized) + break + except compat_urllib_error.HTTPError as err: + # YouTube may often return 404 HTTP error for a fragment causing the + # whole download to fail. However if the same fragment is immediately + # retried with the same request data this usually succeeds (1-2 attemps + # is usually enough) thus allowing to download the whole file successfully. + # To be future-proof we will retry all fragments that fail with any + # HTTP error. + count += 1 + if count <= fragment_retries: + self.report_retry_fragment(err, segment_name, count, fragment_retries) + if count > fragment_retries: + if not fatal: + self.report_skip_fragment(segment_name) + return True + self.report_error('giving up after %s fragment retries' % fragment_retries) + return False + return True + + segments_to_download = [(initialization_url, 'Init')] if initialization_url else [] + segments_to_download.extend([ + (segment_url, 'Seg%d' % i) + for i, segment_url in enumerate(segment_urls)]) + + for i, segment in enumerate(segments_to_download): + # In DASH, the first segment contains necessary headers to + # generate a valid MP4 file, so always abort for the first segment + fatal = i == 0 or not skip_unavailable_fragments + if not process_segment(segment, ctx['tmpfilename'], fatal): + return False + + self._finish_frag_download(ctx) + + for segment_file in segments_filenames: + os.remove(encodeFilename(segment_file)) return True