mirror of
https://github.com/ytdl-org/youtube-dl.git
synced 2024-11-23 19:35:19 +01:00
Prepare urllib references for 2/3 compatibility
This commit is contained in:
parent
e08bee320e
commit
01ba00ca42
|
@ -9,7 +9,6 @@ import socket
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
import urllib2
|
|
||||||
|
|
||||||
if os.name == 'nt':
|
if os.name == 'nt':
|
||||||
import ctypes
|
import ctypes
|
||||||
|
@ -461,7 +460,7 @@ class FileDownloader(object):
|
||||||
success = self._do_download(filename, info_dict)
|
success = self._do_download(filename, info_dict)
|
||||||
except (OSError, IOError) as err:
|
except (OSError, IOError) as err:
|
||||||
raise UnavailableVideoError
|
raise UnavailableVideoError
|
||||||
except (urllib2.URLError, httplib.HTTPException, socket.error) as err:
|
except (compat_urllib_error.URLError, httplib.HTTPException, socket.error) as err:
|
||||||
self.trouble(u'ERROR: unable to download video data: %s' % str(err))
|
self.trouble(u'ERROR: unable to download video data: %s' % str(err))
|
||||||
return
|
return
|
||||||
except (ContentTooShortError, ) as err:
|
except (ContentTooShortError, ) as err:
|
||||||
|
@ -585,8 +584,8 @@ class FileDownloader(object):
|
||||||
|
|
||||||
# Do not include the Accept-Encoding header
|
# Do not include the Accept-Encoding header
|
||||||
headers = {'Youtubedl-no-compression': 'True'}
|
headers = {'Youtubedl-no-compression': 'True'}
|
||||||
basic_request = urllib2.Request(url, None, headers)
|
basic_request = compat_urllib_request.Request(url, None, headers)
|
||||||
request = urllib2.Request(url, None, headers)
|
request = compat_urllib_request.Request(url, None, headers)
|
||||||
|
|
||||||
# Establish possible resume length
|
# Establish possible resume length
|
||||||
if os.path.isfile(encodeFilename(tmpfilename)):
|
if os.path.isfile(encodeFilename(tmpfilename)):
|
||||||
|
@ -610,9 +609,9 @@ class FileDownloader(object):
|
||||||
try:
|
try:
|
||||||
if count == 0 and 'urlhandle' in info_dict:
|
if count == 0 and 'urlhandle' in info_dict:
|
||||||
data = info_dict['urlhandle']
|
data = info_dict['urlhandle']
|
||||||
data = urllib2.urlopen(request)
|
data = compat_urllib_request.urlopen(request)
|
||||||
break
|
break
|
||||||
except (urllib2.HTTPError, ) as err:
|
except (compat_urllib_error.HTTPError, ) as err:
|
||||||
if (err.code < 500 or err.code >= 600) and err.code != 416:
|
if (err.code < 500 or err.code >= 600) and err.code != 416:
|
||||||
# Unexpected HTTP error
|
# Unexpected HTTP error
|
||||||
raise
|
raise
|
||||||
|
@ -620,9 +619,9 @@ class FileDownloader(object):
|
||||||
# Unable to resume (requested range not satisfiable)
|
# Unable to resume (requested range not satisfiable)
|
||||||
try:
|
try:
|
||||||
# Open the connection again without the range header
|
# Open the connection again without the range header
|
||||||
data = urllib2.urlopen(basic_request)
|
data = compat_urllib_request.urlopen(basic_request)
|
||||||
content_length = data.info()['Content-Length']
|
content_length = data.info()['Content-Length']
|
||||||
except (urllib2.HTTPError, ) as err:
|
except (compat_urllib_error.HTTPError, ) as err:
|
||||||
if err.code < 500 or err.code >= 600:
|
if err.code < 500 or err.code >= 600:
|
||||||
raise
|
raise
|
||||||
else:
|
else:
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -29,7 +29,6 @@ UPDATE_URL_VERSION = 'https://raw.github.com/rg3/youtube-dl/master/LATEST_VERSIO
|
||||||
UPDATE_URL_EXE = 'https://raw.github.com/rg3/youtube-dl/master/youtube-dl.exe'
|
UPDATE_URL_EXE = 'https://raw.github.com/rg3/youtube-dl/master/youtube-dl.exe'
|
||||||
|
|
||||||
|
|
||||||
import cookielib
|
|
||||||
import getpass
|
import getpass
|
||||||
import optparse
|
import optparse
|
||||||
import os
|
import os
|
||||||
|
@ -38,7 +37,6 @@ import shlex
|
||||||
import socket
|
import socket
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import urllib2
|
|
||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
from utils import *
|
from utils import *
|
||||||
|
@ -55,7 +53,7 @@ def updateSelf(downloader, filename):
|
||||||
|
|
||||||
downloader.to_screen(u'Updating to latest version...')
|
downloader.to_screen(u'Updating to latest version...')
|
||||||
|
|
||||||
urlv = urllib2.urlopen(UPDATE_URL_VERSION)
|
urlv = compat_urllib_request.urlopen(UPDATE_URL_VERSION)
|
||||||
newversion = urlv.read().strip()
|
newversion = urlv.read().strip()
|
||||||
if newversion == __version__:
|
if newversion == __version__:
|
||||||
downloader.to_screen(u'youtube-dl is up-to-date (' + __version__ + ')')
|
downloader.to_screen(u'youtube-dl is up-to-date (' + __version__ + ')')
|
||||||
|
@ -69,7 +67,7 @@ def updateSelf(downloader, filename):
|
||||||
sys.exit('ERROR: no write permissions on %s' % directory)
|
sys.exit('ERROR: no write permissions on %s' % directory)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
urlh = urllib2.urlopen(UPDATE_URL_EXE)
|
urlh = compat_urllib_request.urlopen(UPDATE_URL_EXE)
|
||||||
newcontent = urlh.read()
|
newcontent = urlh.read()
|
||||||
urlh.close()
|
urlh.close()
|
||||||
with open(exe + '.new', 'wb') as outf:
|
with open(exe + '.new', 'wb') as outf:
|
||||||
|
@ -94,7 +92,7 @@ del "%s"
|
||||||
|
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
urlh = urllib2.urlopen(UPDATE_URL)
|
urlh = compat_urllib_request.urlopen(UPDATE_URL)
|
||||||
newcontent = urlh.read()
|
newcontent = urlh.read()
|
||||||
urlh.close()
|
urlh.close()
|
||||||
except (IOError, OSError) as err:
|
except (IOError, OSError) as err:
|
||||||
|
@ -380,10 +378,10 @@ def _real_main():
|
||||||
|
|
||||||
# Open appropriate CookieJar
|
# Open appropriate CookieJar
|
||||||
if opts.cookiefile is None:
|
if opts.cookiefile is None:
|
||||||
jar = cookielib.CookieJar()
|
jar = compat_cookiejar.CookieJar()
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
jar = cookielib.MozillaCookieJar(opts.cookiefile)
|
jar = compat_cookiejar.MozillaCookieJar(opts.cookiefile)
|
||||||
if os.path.isfile(opts.cookiefile) and os.access(opts.cookiefile, os.R_OK):
|
if os.path.isfile(opts.cookiefile) and os.access(opts.cookiefile, os.R_OK):
|
||||||
jar.load()
|
jar.load()
|
||||||
except (IOError, OSError) as err:
|
except (IOError, OSError) as err:
|
||||||
|
@ -414,10 +412,10 @@ def _real_main():
|
||||||
all_urls = map(lambda url: url.strip(), all_urls)
|
all_urls = map(lambda url: url.strip(), all_urls)
|
||||||
|
|
||||||
# General configuration
|
# General configuration
|
||||||
cookie_processor = urllib2.HTTPCookieProcessor(jar)
|
cookie_processor = compat_urllib_request.HTTPCookieProcessor(jar)
|
||||||
proxy_handler = urllib2.ProxyHandler()
|
proxy_handler = compat_urllib_request.ProxyHandler()
|
||||||
opener = urllib2.build_opener(proxy_handler, cookie_processor, YoutubeDLHandler())
|
opener = compat_urllib_request.build_opener(proxy_handler, cookie_processor, YoutubeDLHandler())
|
||||||
urllib2.install_opener(opener)
|
compat_urllib_request.install_opener(opener)
|
||||||
socket.setdefaulttimeout(300) # 5 minutes should be enough (famous last words)
|
socket.setdefaulttimeout(300) # 5 minutes should be enough (famous last words)
|
||||||
|
|
||||||
extractors = gen_extractors()
|
extractors = gen_extractors()
|
||||||
|
|
|
@ -9,7 +9,6 @@ import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import zlib
|
import zlib
|
||||||
import urllib2
|
|
||||||
import email.utils
|
import email.utils
|
||||||
import json
|
import json
|
||||||
|
|
||||||
|
@ -31,6 +30,26 @@ try:
|
||||||
except NameError:
|
except NameError:
|
||||||
compat_str = str
|
compat_str = str
|
||||||
|
|
||||||
|
try:
|
||||||
|
import urllib.request as compat_urllib_request
|
||||||
|
except ImportError: # Python 2
|
||||||
|
import urllib2 as compat_urllib_request
|
||||||
|
|
||||||
|
try:
|
||||||
|
import urllib.error as compat_urllib_error
|
||||||
|
except ImportError: # Python 2
|
||||||
|
import urllib2 as compat_urllib_error
|
||||||
|
|
||||||
|
try:
|
||||||
|
import urllib.parse as compat_urllib_parse
|
||||||
|
except ImportError: # Python 2
|
||||||
|
import urllib2 as compat_urllib_parse
|
||||||
|
|
||||||
|
try:
|
||||||
|
import http.cookiejar as compat_cookiejar
|
||||||
|
except ImportError: # Python 2
|
||||||
|
import cookielib as compat_cookiejar
|
||||||
|
|
||||||
def preferredencoding():
|
def preferredencoding():
|
||||||
"""Get preferred encoding.
|
"""Get preferred encoding.
|
||||||
|
|
||||||
|
@ -320,7 +339,7 @@ class Trouble(Exception):
|
||||||
FileDownloader.trouble
|
FileDownloader.trouble
|
||||||
"""
|
"""
|
||||||
|
|
||||||
class YoutubeDLHandler(urllib2.HTTPHandler):
|
class YoutubeDLHandler(compat_urllib_request.HTTPHandler):
|
||||||
"""Handler for HTTP requests and responses.
|
"""Handler for HTTP requests and responses.
|
||||||
|
|
||||||
This class, when installed with an OpenerDirector, automatically adds
|
This class, when installed with an OpenerDirector, automatically adds
|
||||||
|
@ -347,9 +366,9 @@ class YoutubeDLHandler(urllib2.HTTPHandler):
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def addinfourl_wrapper(stream, headers, url, code):
|
def addinfourl_wrapper(stream, headers, url, code):
|
||||||
if hasattr(urllib2.addinfourl, 'getcode'):
|
if hasattr(compat_urllib_request.addinfourl, 'getcode'):
|
||||||
return urllib2.addinfourl(stream, headers, url, code)
|
return compat_urllib_request.addinfourl(stream, headers, url, code)
|
||||||
ret = urllib2.addinfourl(stream, headers, url)
|
ret = compat_urllib_request.addinfourl(stream, headers, url)
|
||||||
ret.code = code
|
ret.code = code
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue
Block a user