[YoutubeDL/utils] Clarify rationale for URL escaping in comment, move escape routines to utils and add some tests

This commit is contained in:
Sergey M․ 2014-09-13 20:59:16 +07:00
parent 37419b4f99
commit d05cfe0600
3 changed files with 56 additions and 13 deletions

View file

@ -28,7 +28,7 @@ from .utils import (
compat_str,
compat_urllib_error,
compat_urllib_request,
compat_urllib_parse_urlparse,
escape_url,
ContentTooShortError,
date_from_str,
DateRange,
@ -1243,20 +1243,14 @@ class YoutubeDL(object):
def urlopen(self, req):
""" Start an HTTP download """
# According to RFC 3986, URLs can not contain non-ASCII characters, however this is not
# always respected by websites, some tend to give out URLs with non percent-encoded
# non-ASCII characters (see telemb.py, ard.py [#3412])
# urllib chokes on URLs with non-ASCII characters (see http://bugs.python.org/issue3991)
# Working around by replacing request's original URL with escaped one
# To work around aforementioned issue we will replace request's original URL with
# percent-encoded one
url = req if isinstance(req, compat_str) else req.get_full_url()
def escape(component):
return compat_cookiejar.escape_path(component.encode('utf-8'))
url_parsed = compat_urllib_parse_urlparse(url)
url_escaped = url_parsed._replace(
path=escape(url_parsed.path),
query=escape(url_parsed.query),
fragment=escape(url_parsed.fragment)
).geturl()
url_escaped = escape_url(url)
# Substitute URL if any change after escaping
if url != url_escaped:

View file

@ -1418,6 +1418,24 @@ def uppercase_escape(s):
lambda m: unicode_escape(m.group(0))[0],
s)
def escape_rfc3986(s):
"""Escape non-ASCII characters as suggested by RFC 3986"""
if sys.version_info < (3, 0) and isinstance(s, unicode):
s = s.encode('utf-8')
return compat_urllib_parse.quote(s, "%/;:@&=+$,!~*'()?#[]") #"%/;:@&=+$,!~*'()?#[]+" #?#[]+
def escape_url(url):
"""Escape URL as suggested by RFC 3986"""
url_parsed = compat_urllib_parse_urlparse(url)
return url_parsed._replace(
path=escape_rfc3986(url_parsed.path),
params=escape_rfc3986(url_parsed.params),
query=escape_rfc3986(url_parsed.query),
fragment=escape_rfc3986(url_parsed.fragment)
).geturl()
try:
struct.pack(u'!I', 0)
except TypeError: