+ self._format_note(format),
+ ))
+
+ formats = info_dict.get('formats', [info_dict])
+ idlen = max(len('format code'),
+ max(len(f['format_id']) for f in formats))
+ formats_s = [line(f, idlen) for f in formats]
+ if len(formats) > 1:
+ formats_s[0] += (' ' if self._format_note(formats[0]) else '') + '(worst)'
+ formats_s[-1] += (' ' if self._format_note(formats[-1]) else '') + '(best)'
+
+ header_line = line({
+ 'format_id': 'format code', 'ext': 'extension',
+ 'resolution': 'resolution', 'format_note': 'note'}, idlen=idlen)
+ self.to_screen('[info] Available formats for %s:\n%s\n%s' %
+ (info_dict['id'], header_line, '\n'.join(formats_s)))
+
+ def urlopen(self, req):
+ """ Start an HTTP download """
+
+ # According to RFC 3986, URLs can not contain non-ASCII characters, however this is not
+ # always respected by websites, some tend to give out URLs with non percent-encoded
+ # non-ASCII characters (see telemb.py, ard.py [#3412])
+ # urllib chokes on URLs with non-ASCII characters (see http://bugs.python.org/issue3991)
+ # To work around aforementioned issue we will replace request's original URL with
+ # percent-encoded one
+ req_is_string = isinstance(req, basestring if sys.version_info < (3, 0) else compat_str)
+ url = req if req_is_string else req.get_full_url()
+ url_escaped = escape_url(url)
+
+ # Substitute URL if any change after escaping
+ if url != url_escaped:
+ if req_is_string:
+ req = url_escaped
+ else:
+ req = compat_urllib_request.Request(
+ url_escaped, data=req.data, headers=req.headers,
+ origin_req_host=req.origin_req_host, unverifiable=req.unverifiable)
+
+ return self._opener.open(req, timeout=self._socket_timeout)
+
+ def print_debug_header(self):
+ if not self.params.get('verbose'):
+ return
+
+ if type('') is not compat_str:
+ # Python 2.6 on SLES11 SP1 (https://github.com/rg3/youtube-dl/issues/3326)
+ self.report_warning(
+ 'Your Python is broken! Update to a newer and supported version')
+
+ stdout_encoding = getattr(
+ sys.stdout, 'encoding', 'missing (%s)' % type(sys.stdout).__name__)
+ encoding_str = (
+ '[debug] Encodings: locale %s, fs %s, out %s, pref %s\n' % (
+ locale.getpreferredencoding(),
+ sys.getfilesystemencoding(),
+ stdout_encoding,
+ self.get_encoding()))
+ write_string(encoding_str, encoding=None)
+
+ self._write_string('[debug] youtube-dl version ' + __version__ + '\n')
+ try:
+ sp = subprocess.Popen(
+ ['git', 'rev-parse', '--short', 'HEAD'],
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE,
+ cwd=os.path.dirname(os.path.abspath(__file__)))
+ out, err = sp.communicate()
+ out = out.decode().strip()
+ if re.match('[0-9a-f]+', out):
+ self._write_string('[debug] Git HEAD: ' + out + '\n')
+ except:
+ try:
+ sys.exc_clear()
+ except:
+ pass
+ self._write_string('[debug] Python version %s - %s\n' % (
+ platform.python_version(), platform_name()))
+
+ exe_versions = FFmpegPostProcessor.get_versions()
+ exe_versions['rtmpdump'] = rtmpdump_version()
+ exe_str = ', '.join(
+ '%s %s' % (exe, v)
+ for exe, v in sorted(exe_versions.items())
+ if v
+ )
+ if not exe_str:
+ exe_str = 'none'
+ self._write_string('[debug] exe versions: %s\n' % exe_str)
+
+ proxy_map = {}
+ for handler in self._opener.handlers:
+ if hasattr(handler, 'proxies'):
+ proxy_map.update(handler.proxies)
+ self._write_string('[debug] Proxy map: ' + compat_str(proxy_map) + '\n')
+
+ def _setup_opener(self):
+ timeout_val = self.params.get('socket_timeout')
+ self._socket_timeout = 600 if timeout_val is None else float(timeout_val)
+
+ opts_cookiefile = self.params.get('cookiefile')
+ opts_proxy = self.params.get('proxy')
+
+ if opts_cookiefile is None:
+ self.cookiejar = compat_cookiejar.CookieJar()
+ else:
+ self.cookiejar = compat_cookiejar.MozillaCookieJar(
+ opts_cookiefile)
+ if os.access(opts_cookiefile, os.R_OK):
+ self.cookiejar.load()
+
+ cookie_processor = compat_urllib_request.HTTPCookieProcessor(
+ self.cookiejar)
+ if opts_proxy is not None:
+ if opts_proxy == '':
+ proxies = {}
+ else:
+ proxies = {'http': opts_proxy, 'https': opts_proxy}
+ else:
+ proxies = compat_urllib_request.getproxies()
+ # Set HTTPS proxy to HTTP one if given (https://github.com/rg3/youtube-dl/issues/805)
+ if 'http' in proxies and 'https' not in proxies:
+ proxies['https'] = proxies['http']
+ proxy_handler = compat_urllib_request.ProxyHandler(proxies)
+
+ debuglevel = 1 if self.params.get('debug_printtraffic') else 0
+ https_handler = make_HTTPS_handler(
+ self.params.get('nocheckcertificate', False), debuglevel=debuglevel)
+ ydlh = YoutubeDLHandler(debuglevel=debuglevel)
+ opener = compat_urllib_request.build_opener(
+ https_handler, proxy_handler, cookie_processor, ydlh)
+ # Delete the default user-agent header, which would otherwise apply in
+ # cases where our custom HTTP handler doesn't come into play
+ # (See https://github.com/rg3/youtube-dl/issues/1309 for details)
+ opener.addheaders = []
+ self._opener = opener
+
+ def encode(self, s):
+ if isinstance(s, bytes):
+ return s # Already encoded
+
+ try:
+ return s.encode(self.get_encoding())
+ except UnicodeEncodeError as err:
+ err.reason = err.reason + '. Check your system encoding configuration or use the --encoding option.'
+ raise
+
+ def get_encoding(self):
+ encoding = self.params.get('encoding')
+ if encoding is None:
+ encoding = preferredencoding()
+ return encoding