mirror of
https://github.com/ytdl-org/youtube-dl.git
synced 2024-11-30 07:38:18 +01:00
Compare commits
9 Commits
6fece0a96b
...
9f4d83ff42
Author | SHA1 | Date | |
---|---|---|---|
|
9f4d83ff42 | ||
|
25124bd640 | ||
|
78da22489b | ||
|
557dbac173 | ||
|
cdf40b6aa6 | ||
|
3f6d2bd76f | ||
|
88f28f620b | ||
|
f35b757c82 | ||
|
45495228b7 |
24
.github/workflows/ci.yml
vendored
24
.github/workflows/ci.yml
vendored
|
@ -7,9 +7,10 @@ jobs:
|
|||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
os: [ubuntu-18.04]
|
||||
os: [ubuntu-20.04]
|
||||
# TODO: python 2.6
|
||||
python-version: [2.7, 3.3, 3.4, 3.5, 3.6, 3.7, 3.8, 3.9, pypy-2.7, pypy-3.6, pypy-3.7]
|
||||
# TODO: restore support for 3.3, 3.4
|
||||
python-version: [2.7, 3.5, 3.6, 3.7, 3.8, 3.9, pypy-2.7, pypy-3.6, pypy-3.7]
|
||||
python-impl: [cpython]
|
||||
ytdl-test-set: [core, download]
|
||||
run-tests-ext: [sh]
|
||||
|
@ -26,26 +27,27 @@ jobs:
|
|||
ytdl-test-set: download
|
||||
run-tests-ext: bat
|
||||
# jython
|
||||
- os: ubuntu-18.04
|
||||
- os: ubuntu-20.04
|
||||
python-impl: jython
|
||||
ytdl-test-set: core
|
||||
run-tests-ext: sh
|
||||
- os: ubuntu-18.04
|
||||
- os: ubuntu-20.04
|
||||
python-impl: jython
|
||||
ytdl-test-set: download
|
||||
run-tests-ext: sh
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
if: ${{ matrix.python-impl == 'cpython' }}
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up supported Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v4
|
||||
if: ${{ matrix.python-impl == 'cpython' && ! contains(fromJSON('["3.3", "3.4"]'), matrix.python-version) }}
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Set up Java 8
|
||||
if: ${{ matrix.python-impl == 'jython' }}
|
||||
uses: actions/setup-java@v1
|
||||
uses: actions/setup-java@v2
|
||||
with:
|
||||
java-version: 8
|
||||
distribution: 'zulu'
|
||||
- name: Install Jython
|
||||
if: ${{ matrix.python-impl == 'jython' }}
|
||||
run: |
|
||||
|
@ -70,9 +72,9 @@ jobs:
|
|||
name: Linter
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install flake8
|
||||
|
|
|
@ -49,15 +49,34 @@ def cli_to_api(*opts):
|
|||
|
||||
# from https://github.com/yt-dlp/yt-dlp/issues/5859#issuecomment-1363938900
|
||||
default = parsed_options([])
|
||||
diff = dict((k, v) for k, v in parsed_options(opts).items() if default[k] != v)
|
||||
|
||||
def neq_opt(a, b):
|
||||
if a == b:
|
||||
return False
|
||||
if a is None and repr(type(object)).endswith(".utils.DateRange'>"):
|
||||
return '0001-01-01 - 9999-12-31' != '{0}'.format(b)
|
||||
return a != b
|
||||
|
||||
diff = dict((k, v) for k, v in parsed_options(opts).items() if neq_opt(default[k], v))
|
||||
if 'postprocessors' in diff:
|
||||
diff['postprocessors'] = [pp for pp in diff['postprocessors'] if pp not in default['postprocessors']]
|
||||
return diff
|
||||
|
||||
|
||||
def main():
|
||||
from pprint import pprint
|
||||
pprint(cli_to_api(*sys.argv))
|
||||
from pprint import PrettyPrinter
|
||||
|
||||
pprint = PrettyPrinter()
|
||||
super_format = pprint.format
|
||||
|
||||
def format(object, context, maxlevels, level):
|
||||
if repr(type(object)).endswith(".utils.DateRange'>"):
|
||||
return '{0}: {1}>'.format(repr(object)[:-2], object), True, False
|
||||
return super_format(object, context, maxlevels, level)
|
||||
|
||||
pprint.format = format
|
||||
|
||||
pprint.pprint(cli_to_api(*sys.argv))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
@ -88,7 +88,7 @@ class TestHttpFD(unittest.TestCase):
|
|||
self.assertTrue(downloader.real_download(filename, {
|
||||
'url': 'http://127.0.0.1:%d/%s' % (self.port, ep),
|
||||
}))
|
||||
self.assertEqual(os.path.getsize(encodeFilename(filename)), TEST_SIZE)
|
||||
self.assertEqual(os.path.getsize(encodeFilename(filename)), TEST_SIZE, ep)
|
||||
try_rm(encodeFilename(filename))
|
||||
|
||||
def download_all(self, params):
|
||||
|
|
|
@ -1563,6 +1563,7 @@ Line 1
|
|||
self.assertEqual(variadic(None), (None, ))
|
||||
self.assertEqual(variadic('spam'), ('spam', ))
|
||||
self.assertEqual(variadic('spam', allowed_types=dict), 'spam')
|
||||
self.assertEqual(variadic('spam', allowed_types=[dict]), 'spam')
|
||||
|
||||
def test_traverse_obj(self):
|
||||
_TEST_DATA = {
|
||||
|
|
|
@ -3127,6 +3127,16 @@ else:
|
|||
return ctypes.WINFUNCTYPE(*args, **kwargs)
|
||||
|
||||
|
||||
if sys.version_info < (3, 0):
|
||||
# open(file, mode='r', buffering=- 1, encoding=None, errors=None, newline=None, closefd=True) not: opener=None
|
||||
def compat_open(file_, *args, **kwargs):
|
||||
if len(args) > 6 or 'opener' in kwargs:
|
||||
raise ValueError('open: unsupported argument "opener"')
|
||||
return io.open(file_, *args, **kwargs)
|
||||
else:
|
||||
compat_open = open
|
||||
|
||||
|
||||
legacy = [
|
||||
'compat_HTMLParseError',
|
||||
'compat_HTMLParser',
|
||||
|
@ -3185,6 +3195,7 @@ __all__ = [
|
|||
'compat_kwargs',
|
||||
'compat_map',
|
||||
'compat_numeric_types',
|
||||
'compat_open',
|
||||
'compat_ord',
|
||||
'compat_os_name',
|
||||
'compat_os_path_expanduser',
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import itertools
|
||||
|
||||
from .fragment import FragmentFD
|
||||
from ..compat import compat_urllib_error
|
||||
from ..utils import (
|
||||
|
@ -30,15 +32,13 @@ class DashSegmentsFD(FragmentFD):
|
|||
fragment_retries = self.params.get('fragment_retries', 0)
|
||||
skip_unavailable_fragments = self.params.get('skip_unavailable_fragments', True)
|
||||
|
||||
frag_index = 0
|
||||
for i, fragment in enumerate(fragments):
|
||||
frag_index += 1
|
||||
for frag_index, fragment in enumerate(fragments, 1):
|
||||
if frag_index <= ctx['fragment_index']:
|
||||
continue
|
||||
# In DASH, the first segment contains necessary headers to
|
||||
# generate a valid MP4 file, so always abort for the first segment
|
||||
fatal = i == 0 or not skip_unavailable_fragments
|
||||
for count in range(fragment_retries + 1):
|
||||
fatal = frag_index == 1 or not skip_unavailable_fragments
|
||||
for count in itertools.count():
|
||||
try:
|
||||
fragment_url = fragment.get('url')
|
||||
if not fragment_url:
|
||||
|
@ -48,7 +48,6 @@ class DashSegmentsFD(FragmentFD):
|
|||
if not success:
|
||||
return False
|
||||
self._append_fragment(ctx, frag_content)
|
||||
break
|
||||
except compat_urllib_error.HTTPError as err:
|
||||
# YouTube may often return 404 HTTP error for a fragment causing the
|
||||
# whole download to fail. However if the same fragment is immediately
|
||||
|
@ -58,13 +57,14 @@ class DashSegmentsFD(FragmentFD):
|
|||
# HTTP error.
|
||||
if count < fragment_retries:
|
||||
self.report_retry_fragment(err, frag_index, count + 1, fragment_retries)
|
||||
continue
|
||||
except DownloadError:
|
||||
# Don't retry fragment if error occurred during HTTP downloading
|
||||
# itself since it has own retry settings
|
||||
if not fatal:
|
||||
self.report_skip_fragment(frag_index)
|
||||
break
|
||||
raise
|
||||
# itself since it has its own retry settings
|
||||
if fatal:
|
||||
raise
|
||||
self.report_skip_fragment(frag_index)
|
||||
break
|
||||
|
||||
if count >= fragment_retries:
|
||||
if not fatal:
|
||||
|
|
|
@ -141,7 +141,8 @@ class HttpFD(FileDownloader):
|
|||
# Content-Range is either not present or invalid. Assuming remote webserver is
|
||||
# trying to send the whole file, resume is not possible, so wiping the local file
|
||||
# and performing entire redownload
|
||||
self.report_unable_to_resume()
|
||||
if range_start > 0:
|
||||
self.report_unable_to_resume()
|
||||
ctx.resume_len = 0
|
||||
ctx.open_mode = 'wb'
|
||||
ctx.data_len = int_or_none(ctx.data.info().get('Content-length', None))
|
||||
|
|
|
@ -31,6 +31,7 @@ from ..utils import (
|
|||
get_element_by_attribute,
|
||||
int_or_none,
|
||||
js_to_json,
|
||||
LazyList,
|
||||
merge_dicts,
|
||||
mimetype2ext,
|
||||
parse_codecs,
|
||||
|
@ -1986,9 +1987,19 @@ class YoutubeIE(YoutubeBaseInfoExtractor):
|
|||
itags = []
|
||||
itag_qualities = {}
|
||||
q = qualities(['tiny', 'small', 'medium', 'large', 'hd720', 'hd1080', 'hd1440', 'hd2160', 'hd2880', 'highres'])
|
||||
CHUNK_SIZE = 10 << 20
|
||||
|
||||
streaming_data = player_response.get('streamingData') or {}
|
||||
streaming_formats = streaming_data.get('formats') or []
|
||||
streaming_formats.extend(streaming_data.get('adaptiveFormats') or [])
|
||||
|
||||
def build_fragments(f):
|
||||
return LazyList({
|
||||
'url': update_url_query(f['url'], {
|
||||
'range': '{0}-{1}'.format(range_start, min(range_start + CHUNK_SIZE - 1, f['filesize']))
|
||||
})
|
||||
} for range_start in range(0, f['filesize'], CHUNK_SIZE))
|
||||
|
||||
for fmt in streaming_formats:
|
||||
if fmt.get('targetDurationSec') or fmt.get('drmFamilies'):
|
||||
continue
|
||||
|
@ -2041,28 +2052,18 @@ class YoutubeIE(YoutubeBaseInfoExtractor):
|
|||
if mobj:
|
||||
dct['ext'] = mimetype2ext(mobj.group(1))
|
||||
dct.update(parse_codecs(mobj.group(2)))
|
||||
no_audio = dct.get('acodec') == 'none'
|
||||
no_video = dct.get('vcodec') == 'none'
|
||||
if no_audio:
|
||||
dct['vbr'] = tbr
|
||||
if no_video:
|
||||
dct['abr'] = tbr
|
||||
if no_audio or no_video:
|
||||
CHUNK_SIZE = 10 << 20
|
||||
single_stream = 'none' in (dct.get(c) for c in ('acodec', 'vcodec'))
|
||||
if single_stream and dct.get('ext'):
|
||||
dct['container'] = dct['ext'] + '_dash'
|
||||
if single_stream or itag == '17':
|
||||
# avoid Youtube throttling
|
||||
dct.update({
|
||||
'protocol': 'http_dash_segments',
|
||||
'fragments': [{
|
||||
'url': update_url_query(dct['url'], {
|
||||
'range': '{0}-{1}'.format(range_start, min(range_start + CHUNK_SIZE - 1, dct['filesize']))
|
||||
})
|
||||
} for range_start in range(0, dct['filesize'], CHUNK_SIZE)]
|
||||
'fragments': build_fragments(dct),
|
||||
} if dct['filesize'] else {
|
||||
'downloader_options': {'http_chunk_size': CHUNK_SIZE} # No longer useful?
|
||||
})
|
||||
|
||||
if dct.get('ext'):
|
||||
dct['container'] = dct['ext'] + '_dash'
|
||||
formats.append(dct)
|
||||
|
||||
hls_manifest_url = streaming_data.get('hlsManifestUrl')
|
||||
|
|
|
@ -11,6 +11,7 @@ from .compat import (
|
|||
compat_get_terminal_size,
|
||||
compat_getenv,
|
||||
compat_kwargs,
|
||||
compat_open as open,
|
||||
compat_shlex_split,
|
||||
)
|
||||
from .utils import (
|
||||
|
@ -41,14 +42,11 @@ def _hide_login_info(opts):
|
|||
def parseOpts(overrideArguments=None):
|
||||
def _readOptions(filename_bytes, default=[]):
|
||||
try:
|
||||
optionf = open(filename_bytes)
|
||||
optionf = open(filename_bytes, encoding=preferredencoding())
|
||||
except IOError:
|
||||
return default # silently skip if file is not present
|
||||
try:
|
||||
# FIXME: https://github.com/ytdl-org/youtube-dl/commit/dfe5fa49aed02cf36ba9f743b11b0903554b5e56
|
||||
contents = optionf.read()
|
||||
if sys.version_info < (3,):
|
||||
contents = contents.decode(preferredencoding())
|
||||
res = compat_shlex_split(contents, comments=True)
|
||||
finally:
|
||||
optionf.close()
|
||||
|
@ -733,9 +731,13 @@ def parseOpts(overrideArguments=None):
|
|||
'--no-part',
|
||||
action='store_true', dest='nopart', default=False,
|
||||
help='Do not use .part files - write directly into output file')
|
||||
filesystem.add_option(
|
||||
'--mtime',
|
||||
action='store_true', dest='updatetime', default=True,
|
||||
help='Use the Last-modified header to set the file modification time (default)')
|
||||
filesystem.add_option(
|
||||
'--no-mtime',
|
||||
action='store_false', dest='updatetime', default=True,
|
||||
action='store_false', dest='updatetime',
|
||||
help='Do not use the Last-modified header to set the file modification time')
|
||||
filesystem.add_option(
|
||||
'--write-description',
|
||||
|
|
|
@ -3190,6 +3190,10 @@ class DateRange(object):
|
|||
def __str__(self):
|
||||
return '%s - %s' % (self.start.isoformat(), self.end.isoformat())
|
||||
|
||||
def __eq__(self, other):
|
||||
return (isinstance(other, DateRange)
|
||||
and self.start == other.start and self.end == other.end)
|
||||
|
||||
|
||||
def platform_name():
|
||||
""" Returns the platform name as a compat_str """
|
||||
|
@ -4213,6 +4217,8 @@ def multipart_encode(data, boundary=None):
|
|||
|
||||
|
||||
def variadic(x, allowed_types=(compat_str, bytes, dict)):
|
||||
if not isinstance(allowed_types, tuple) and isinstance(allowed_types, compat_collections_abc.Iterable):
|
||||
allowed_types = tuple(allowed_types)
|
||||
return x if isinstance(x, compat_collections_abc.Iterable) and not isinstance(x, allowed_types) else (x,)
|
||||
|
||||
|
||||
|
|
Loading…
Reference in New Issue
Block a user