]> Raphaƫl G. Git Repositories - youtubedl/blobdiff - youtube_dl/downloader/dash.py
Merge tag 'upstream/2016.06.25'
[youtubedl] / youtube_dl / downloader / dash.py
index 535f2a7fc7236a5717f1b0bf0375a5876cf00226..8bbab9dbc596c659db622fe9910d0ae90018a598 100644 (file)
@@ -1,66 +1,81 @@
 from __future__ import unicode_literals
 
 from __future__ import unicode_literals
 
+import os
 import re
 
 import re
 
-from .common import FileDownloader
-from ..utils import sanitized_Request
+from .fragment import FragmentFD
+from ..compat import compat_urllib_error
+from ..utils import (
+    sanitize_open,
+    encodeFilename,
+)
 
 
 
 
-class DashSegmentsFD(FileDownloader):
+class DashSegmentsFD(FragmentFD):
     """
     Download segments in a DASH manifest
     """
     """
     Download segments in a DASH manifest
     """
-    def real_download(self, filename, info_dict):
-        self.report_destination(filename)
-        tmpfilename = self.temp_name(filename)
-        base_url = info_dict['url']
-        segment_urls = info_dict['segment_urls']
-
-        is_test = self.params.get('test', False)
-        remaining_bytes = self._TEST_FILE_SIZE if is_test else None
-        byte_counter = 0
 
 
-        def append_url_to_file(outf, target_url, target_name, remaining_bytes=None):
-            self.to_screen('[DashSegments] %s: Downloading %s' % (info_dict['id'], target_name))
-            req = sanitized_Request(target_url)
-            if remaining_bytes is not None:
-                req.add_header('Range', 'bytes=0-%d' % (remaining_bytes - 1))
+    FD_NAME = 'dashsegments'
 
 
-            data = self.ydl.urlopen(req).read()
+    def real_download(self, filename, info_dict):
+        base_url = info_dict['url']
+        segment_urls = [info_dict['segment_urls'][0]] if self.params.get('test', False) else info_dict['segment_urls']
+        initialization_url = info_dict.get('initialization_url')
 
 
-            if remaining_bytes is not None:
-                data = data[:remaining_bytes]
+        ctx = {
+            'filename': filename,
+            'total_frags': len(segment_urls) + (1 if initialization_url else 0),
+        }
 
 
-            outf.write(data)
-            return len(data)
+        self._prepare_and_start_frag_download(ctx)
 
         def combine_url(base_url, target_url):
             if re.match(r'^https?://', target_url):
                 return target_url
             return '%s%s%s' % (base_url, '' if base_url.endswith('/') else '/', target_url)
 
 
         def combine_url(base_url, target_url):
             if re.match(r'^https?://', target_url):
                 return target_url
             return '%s%s%s' % (base_url, '' if base_url.endswith('/') else '/', target_url)
 
-        with open(tmpfilename, 'wb') as outf:
-            append_url_to_file(
-                outf, combine_url(base_url, info_dict['initialization_url']),
-                'initialization segment')
-            for i, segment_url in enumerate(segment_urls):
-                segment_len = append_url_to_file(
-                    outf, combine_url(base_url, segment_url),
-                    'segment %d / %d' % (i + 1, len(segment_urls)),
-                    remaining_bytes)
-                byte_counter += segment_len
-                if remaining_bytes is not None:
-                    remaining_bytes -= segment_len
-                    if remaining_bytes <= 0:
-                        break
-
-        self.try_rename(tmpfilename, filename)
-
-        self._hook_progress({
-            'downloaded_bytes': byte_counter,
-            'total_bytes': byte_counter,
-            'filename': filename,
-            'status': 'finished',
-        })
+        segments_filenames = []
+
+        fragment_retries = self.params.get('fragment_retries', 0)
+
+        def append_url_to_file(target_url, tmp_filename, segment_name):
+            target_filename = '%s-%s' % (tmp_filename, segment_name)
+            count = 0
+            while count <= fragment_retries:
+                try:
+                    success = ctx['dl'].download(target_filename, {'url': combine_url(base_url, target_url)})
+                    if not success:
+                        return False
+                    down, target_sanitized = sanitize_open(target_filename, 'rb')
+                    ctx['dest_stream'].write(down.read())
+                    down.close()
+                    segments_filenames.append(target_sanitized)
+                    break
+                except (compat_urllib_error.HTTPError, ) as err:
+                    # YouTube may often return 404 HTTP error for a fragment causing the
+                    # whole download to fail. However if the same fragment is immediately
+                    # retried with the same request data this usually succeeds (1-2 attemps
+                    # is usually enough) thus allowing to download the whole file successfully.
+                    # So, we will retry all fragments that fail with 404 HTTP error for now.
+                    if err.code != 404:
+                        raise
+                    # Retry fragment
+                    count += 1
+                    if count <= fragment_retries:
+                        self.report_retry_fragment(segment_name, count, fragment_retries)
+            if count > fragment_retries:
+                self.report_error('giving up after %s fragment retries' % fragment_retries)
+                return False
+
+        if initialization_url:
+            append_url_to_file(initialization_url, ctx['tmpfilename'], 'Init')
+        for i, segment_url in enumerate(segment_urls):
+            append_url_to_file(segment_url, ctx['tmpfilename'], 'Seg%d' % i)
+
+        self._finish_frag_download(ctx)
+
+        for segment_file in segments_filenames:
+            os.remove(encodeFilename(segment_file))
 
         return True
 
         return True