1 from __future__ 
import unicode_literals
 
   3 from .fragment 
import FragmentFD
 
   4 from ..compat 
import compat_urllib_error
 
   5 from ..utils 
import urljoin
 
   8 class DashSegmentsFD(FragmentFD
): 
  10     Download segments in a DASH manifest 
  13     FD_NAME 
= 'dashsegments' 
  15     def real_download(self
, filename
, info_dict
): 
  16         fragment_base_url 
= info_dict
.get('fragment_base_url') 
  17         fragments 
= info_dict
['fragments'][:1] if self
.params
.get( 
  18             'test', False) else info_dict
['fragments'] 
  22             'total_frags': len(fragments
), 
  25         self
._prepare
_and
_start
_frag
_download
(ctx
) 
  27         fragment_retries 
= self
.params
.get('fragment_retries', 0) 
  28         skip_unavailable_fragments 
= self
.params
.get('skip_unavailable_fragments', True) 
  31         for i
, fragment 
in enumerate(fragments
): 
  33             if frag_index 
<= ctx
['fragment_index']: 
  35             # In DASH, the first segment contains necessary headers to 
  36             # generate a valid MP4 file, so always abort for the first segment 
  37             fatal 
= i 
== 0 or not skip_unavailable_fragments
 
  39             while count 
<= fragment_retries
: 
  41                     fragment_url 
= fragment
.get('url') 
  43                         assert fragment_base_url
 
  44                         fragment_url 
= urljoin(fragment_base_url
, fragment
['path']) 
  45                     success
, frag_content 
= self
._download
_fragment
(ctx
, fragment_url
, info_dict
) 
  48                     self
._append
_fragment
(ctx
, frag_content
) 
  50                 except compat_urllib_error
.HTTPError 
as err
: 
  51                     # YouTube may often return 404 HTTP error for a fragment causing the 
  52                     # whole download to fail. However if the same fragment is immediately 
  53                     # retried with the same request data this usually succeeds (1-2 attemps 
  54                     # is usually enough) thus allowing to download the whole file successfully. 
  55                     # To be future-proof we will retry all fragments that fail with any 
  58                     if count 
<= fragment_retries
: 
  59                         self
.report_retry_fragment(err
, frag_index
, count
, fragment_retries
) 
  60             if count 
> fragment_retries
: 
  62                     self
.report_skip_fragment(frag_index
) 
  64                 self
.report_error('giving up after %s fragment retries' % fragment_retries
) 
  67         self
._finish
_frag
_download
(ctx
)