apply future Ruff rules

This commit is contained in:
Mozi 2024-08-20 07:42:53 +00:00
parent 1e23756e50
commit dcefdfe508
2 changed files with 64 additions and 68 deletions

View File

@ -26,7 +26,7 @@ class NiconicoDmcFD(FileDownloader):
def real_download(self, filename, info_dict):
from ..extractor.niconico import NiconicoIE
self.to_screen('[%s] Downloading from DMC' % self.FD_NAME)
self.to_screen(f'[{self.FD_NAME}] Downloading from DMC')
ie = NiconicoIE(self.ydl)
info_dict, heartbeat_info_dict = ie._get_heartbeat_info(info_dict)
@ -45,7 +45,7 @@ def heartbeat():
try:
self.ydl.urlopen(request).read()
except Exception:
self.to_screen('[%s] Heartbeat failed' % self.FD_NAME)
self.to_screen(f'[{self.FD_NAME}] Heartbeat failed')
with heartbeat_lock:
if not download_complete:
@ -97,14 +97,14 @@ def communicate_ws():
'quality': 'abr',
'protocol': 'hls',
'latency': live_latency,
'chasePlay': False
'chasePlay': False,
},
'room': {
'protocol': 'webSocket',
'commentable': True
'commentable': True,
},
'reconnect': True,
}
},
}))
with self.ws:
while True:
@ -131,7 +131,7 @@ def communicate_ws():
elif self.ydl.params.get('verbose', False):
if len(recv) > 100:
recv = recv[:100] + '...'
self.to_screen('[debug] Server said: %s' % recv)
self.to_screen(f'[debug] Server said: {recv}')
stopped = threading.Event()
@ -146,7 +146,7 @@ def ws_main():
self.m3u8_lock.clear() # m3u8 url may be changed
self.to_screen('[%s] %s: Connection error occured, reconnecting after %d seconds: %s' % ('niconico:live', video_id, self._WEBSOCKET_RECONNECT_DELAY, str_or_none(e)))
self.to_screen('[{}] {}: Connection error occured, reconnecting after {} seconds: {}'.format('niconico:live', video_id, self._WEBSOCKET_RECONNECT_DELAY, str_or_none(e)))
time.sleep(self._WEBSOCKET_RECONNECT_DELAY)
self.m3u8_lock.set() # Release possible locks
@ -216,7 +216,7 @@ def real_download(self, filename, info_dict):
# Refresh master m3u8 (if possible) and get the url of the previously-chose format
master_m3u8_url = ws_context._master_m3u8_url()
formats = ie._extract_m3u8_formats(
master_m3u8_url, video_id, query={"start": downloaded_duration}, live=False, note=False, fatal=False)
master_m3u8_url, video_id, query={'start': downloaded_duration}, live=False, note=False, fatal=False)
media_m3u8_url = traverse_obj(formats, (format_index, {dict}, 'url'), get_all=False)
if not media_m3u8_url:
raise DownloadError('Unable to get playlist')
@ -244,7 +244,7 @@ def real_download(self, filename, info_dict):
return self._finish_frag_download(ctx, info_dict)
class DurationLimiter():
class DurationLimiter:
def __init__(self, target):
self.target = target

View File

@ -61,7 +61,7 @@ def _perform_login(self, username, password):
urljoin('https://account.nicovideo.jp', post_url), None,
note='Performing MFA', errnote='Unable to complete MFA',
data=urlencode_postdata({
'otp': self._get_tfa_info('6 digits code')
'otp': self._get_tfa_info('6 digits code'),
}), headers={
'Content-Type': 'application/x-www-form-urlencoded',
})
@ -267,7 +267,7 @@ def ping():
'http_output_download_parameters': {
'use_ssl': yesno(session_api_data['urls'][0]['isSsl']),
'use_well_known_port': yesno(session_api_data['urls'][0]['isWellKnownPort']),
}
},
}
elif dmc_protocol == 'hls':
protocol = 'm3u8'
@ -280,14 +280,14 @@ def ping():
'transfer_preset': '',
'use_ssl': yesno(session_api_data['urls'][0]['isSsl']),
'use_well_known_port': yesno(session_api_data['urls'][0]['isWellKnownPort']),
}
},
}
if 'hls_encryption' in parsed_token and encryption:
protocol_parameters['hls_parameters']['encryption'] = {
parsed_token['hls_encryption']: {
'encrypted_key': encryption['encryptedKey'],
'key_uri': encryption['keyUri'],
}
},
}
else:
protocol = 'm3u8_native'
@ -298,7 +298,7 @@ def ping():
session_api_endpoint['url'], video_id,
query={'_format': 'json'},
headers={'Content-Type': 'application/json'},
note='Downloading JSON metadata for %s' % info_dict['format_id'],
note='Downloading JSON metadata for {}'.format(info_dict['format_id']),
data=json.dumps({
'session': {
'client_info': {
@ -308,7 +308,7 @@ def ping():
'auth_type': try_get(session_api_data, lambda x: x['authTypes'][session_api_data['protocols'][0]]),
'content_key_timeout': session_api_data.get('contentKeyTimeout'),
'service_id': 'nicovideo',
'service_user_id': session_api_data.get('serviceUserId')
'service_user_id': session_api_data.get('serviceUserId'),
},
'content_id': session_api_data.get('contentId'),
'content_src_id_sets': [{
@ -316,34 +316,34 @@ def ping():
'src_id_to_mux': {
'audio_src_ids': [audio_src_id],
'video_src_ids': [video_src_id],
}
}]
},
}],
}],
'content_type': 'movie',
'content_uri': '',
'keep_method': {
'heartbeat': {
'lifetime': session_api_data.get('heartbeatLifetime')
}
'lifetime': session_api_data.get('heartbeatLifetime'),
},
},
'priority': session_api_data['priority'],
'protocol': {
'name': 'http',
'parameters': {
'http_parameters': {
'parameters': protocol_parameters
}
}
'parameters': protocol_parameters,
},
},
},
'recipe_id': session_api_data.get('recipeId'),
'session_operation_auth': {
'session_operation_auth_by_signature': {
'signature': session_api_data.get('signature'),
'token': session_api_data.get('token'),
}
},
},
'timing_constraint': 'unlimited'
}
'timing_constraint': 'unlimited',
},
}).encode())
info_dict['url'] = session_response['data']['session']['content_uri']
@ -355,7 +355,7 @@ def ping():
'data': json.dumps(session_response['data']),
# interval, convert milliseconds to seconds, then halve to make a buffer.
'interval': float_or_none(session_api_data.get('heartbeatLifetime'), scale=3000),
'ping': ping
'ping': ping,
}
return info_dict, heartbeat_info_dict
@ -371,7 +371,7 @@ def _extract_format_for_quality(self, video_id, audio_quality, video_quality, dm
vid_qual_label = traverse_obj(video_quality, ('metadata', 'label'))
return {
'url': 'niconico_dmc:%s/%s/%s' % (video_id, video_quality['id'], audio_quality['id']),
'url': 'niconico_dmc:{}/{}/{}'.format(video_id, video_quality['id'], audio_quality['id']),
'format_id': format_id,
'format_note': join_nonempty('DMC', vid_qual_label, dmc_protocol.upper(), delim=' '),
'ext': 'mp4', # Session API are used in HTML5, which always serves mp4
@ -392,7 +392,7 @@ def _extract_format_for_quality(self, video_id, audio_quality, video_quality, dm
'http_headers': {
'Origin': 'https://www.nicovideo.jp',
'Referer': 'https://www.nicovideo.jp/watch/' + video_id,
}
},
}
def _yield_dmc_formats(self, api_data, video_id):
@ -419,7 +419,7 @@ def _yield_dms_formats(self, api_data, video_id):
dms_m3u8_url = self._download_json(
f'https://nvapi.nicovideo.jp/v1/watch/{video_id}/access-rights/hls', video_id,
data=json.dumps({
'outputs': list(itertools.product((v['id'] for v in videos), (a['id'] for a in audios)))
'outputs': list(itertools.product((v['id'] for v in videos), (a['id'] for a in audios))),
}).encode(), query={'actionTrackId': track_id}, headers={
'x-access-right-key': access_key,
'x-frontend-id': 6,
@ -467,7 +467,7 @@ def _real_extract(self, url):
except ExtractorError as e:
try:
api_data = self._download_json(
'https://www.nicovideo.jp/api/watch/v3/%s?_frontendId=6&_frontendVersion=0&actionTrackId=AAAAAAAAAA_%d' % (video_id, round(time.time() * 1000)), video_id,
f'https://www.nicovideo.jp/api/watch/v3/{video_id}?_frontendId=6&_frontendVersion=0&actionTrackId=AAAAAAAAAA_{round(time.time() * 1000)}', video_id,
note='Downloading API JSON', errnote='Unable to fetch data')['data']
except ExtractorError:
if not isinstance(e.cause, HTTPError):
@ -589,7 +589,7 @@ class NiconicoPlaylistBaseIE(InfoExtractor):
_API_HEADERS = {
'X-Frontend-ID': '6',
'X-Frontend-Version': '0',
'X-Niconico-Language': 'en-us'
'X-Niconico-Language': 'en-us',
}
def _call_api(self, list_id, resource, query):
@ -604,7 +604,7 @@ def _parse_owner(item):
def _fetch_page(self, list_id, page):
page += 1
resp = self._call_api(list_id, 'page %d' % page, {
resp = self._call_api(list_id, f'page {page}', {
'page': page,
'pageSize': self._PAGE_SIZE,
})
@ -792,14 +792,14 @@ class NicovideoSearchURLIE(NicovideoSearchBaseIE):
'url': 'http://www.nicovideo.jp/search/sm9',
'info_dict': {
'id': 'sm9',
'title': 'sm9'
'title': 'sm9',
},
'playlist_mincount': 40,
}, {
'url': 'https://www.nicovideo.jp/search/sm9?sort=h&order=d&end=2020-12-31&start=2020-01-01',
'info_dict': {
'id': 'sm9',
'title': 'sm9'
'title': 'sm9',
},
'playlist_count': 31,
}]
@ -817,7 +817,7 @@ class NicovideoSearchDateIE(NicovideoSearchBaseIE, SearchInfoExtractor):
'url': 'nicosearchdateall:a',
'info_dict': {
'id': 'a',
'title': 'a'
'title': 'a',
},
'playlist_mincount': 1610,
}]
@ -864,7 +864,7 @@ class NicovideoTagURLIE(NicovideoSearchBaseIE):
'url': 'https://www.nicovideo.jp/tag/ドキュメンタリー淫夢',
'info_dict': {
'id': 'ドキュメンタリー淫夢',
'title': 'ドキュメンタリー淫夢'
'title': 'ドキュメンタリー淫夢',
},
'playlist_mincount': 400,
}]
@ -883,12 +883,12 @@ class NiconicoUserIE(InfoExtractor):
},
'playlist_mincount': 101,
}
_API_URL = "https://nvapi.nicovideo.jp/v1/users/%s/videos?sortKey=registeredAt&sortOrder=desc&pageSize=%s&page=%s"
_API_URL = 'https://nvapi.nicovideo.jp/v1/users/%s/videos?sortKey=registeredAt&sortOrder=desc&pageSize=%s&page=%s'
_PAGE_SIZE = 100
_API_HEADERS = {
'X-Frontend-ID': '6',
'X-Frontend-Version': '0'
'X-Frontend-Version': '0',
}
def _entries(self, list_id):
@ -898,12 +898,12 @@ def _entries(self, list_id):
json_parsed = self._download_json(
self._API_URL % (list_id, self._PAGE_SIZE, page_num + 1), list_id,
headers=self._API_HEADERS,
note='Downloading JSON metadata%s' % (' page %d' % page_num if page_num else ''))
note='Downloading JSON metadata%s' % (f' page {page_num}' if page_num else ''))
if not page_num:
total_count = int_or_none(json_parsed['data'].get('totalCount'))
for entry in json_parsed["data"]["items"]:
for entry in json_parsed['data']['items']:
count += 1
yield self.url_result('https://www.nicovideo.jp/watch/%s' % entry['id'])
yield self.url_result('https://www.nicovideo.jp/watch/{}'.format(entry['id']))
page_num += 1
def _real_extract(self, url):
@ -920,7 +920,7 @@ class NiconicoLiveIE(NiconicoBaseIE):
'url': 'https://live.nicovideo.jp/watch/lv339533123',
'info_dict': {
'id': 'lv339533123',
'title': '激辛ペヤング食べます( ;ᯅ; )(歌枠オーディション参加中)',
'title': '激辛ペヤング食べます\u202a( ;ᯅ; )\u202c(歌枠オーディション参加中)',
'view_count': int,
'comment_count': int,
'description': '初めましてもかって言います❕\nのんびり自由に適当に暮らしてます',
@ -970,14 +970,14 @@ def _yield_formats(self, ws_url, headers, latency, video_id, is_live):
'quality': 'abr',
'protocol': 'hls',
'latency': latency,
'chasePlay': False
'chasePlay': False,
},
'room': {
'protocol': 'webSocket',
'commentable': True
'commentable': True,
},
'reconnect': False,
}
},
}))
while True:
@ -1001,7 +1001,7 @@ def _yield_formats(self, ws_url, headers, latency, video_id, is_live):
elif self.get_param('verbose', False):
if len(recv) > 100:
recv = recv[:100] + '...'
self.write_debug('Server said: %s' % recv)
self.write_debug(f'Server said: {recv}')
ws.close()
@ -1134,27 +1134,23 @@ def _check_status_and_availability(self, embedded_data, video_id):
self.report_warning('Timeshift viewing period has ended', video_id)
live_status = 'was_live'
availability = self._availability(**{
'needs_premium': 'notLogin' in rejected_reasons,
'needs_subscription': any(x in [
'notSocialGroupMember',
'notCommunityMember',
'notChannelMember',
'notCommunityMemberAndNotHaveTimeshiftTicket',
'notChannelMemberAndNotHaveTimeshiftTicket',
] for x in rejected_reasons),
'needs_auth': any(x in [
'timeshiftTicketExpired',
'notHaveTimeshiftTicket',
'notCommunityMemberAndNotHaveTimeshiftTicket',
'notChannelMemberAndNotHaveTimeshiftTicket',
'notHavePayTicket',
'notActivatedBySerial',
'notHavePayTicketAndNotActivatedBySerial',
'notUseTimeshiftTicket',
'notUseTimeshiftTicketOnOnceTimeshift',
'notUseTimeshiftTicketOnUnlimitedTimeshift',
] for x in rejected_reasons),
})
availability = self._availability(needs_premium='notLogin' in rejected_reasons, needs_subscription=any(x in [
'notSocialGroupMember',
'notCommunityMember',
'notChannelMember',
'notCommunityMemberAndNotHaveTimeshiftTicket',
'notChannelMemberAndNotHaveTimeshiftTicket',
] for x in rejected_reasons), needs_auth=any(x in [
'timeshiftTicketExpired',
'notHaveTimeshiftTicket',
'notCommunityMemberAndNotHaveTimeshiftTicket',
'notChannelMemberAndNotHaveTimeshiftTicket',
'notHavePayTicket',
'notActivatedBySerial',
'notHavePayTicketAndNotActivatedBySerial',
'notUseTimeshiftTicket',
'notUseTimeshiftTicketOnOnceTimeshift',
'notUseTimeshiftTicketOnUnlimitedTimeshift',
] for x in rejected_reasons))
return live_status, availability