From 18b4296aa361ae1c436b4ed7e0e2a4f539bc1ade Mon Sep 17 00:00:00 2001 From: ringus1 Date: Wed, 31 Jan 2024 16:38:02 +0100 Subject: [PATCH] MR fixes --- yt_dlp/extractor/facebook.py | 32 +++++++++++++------------------- 1 file changed, 13 insertions(+), 19 deletions(-) diff --git a/yt_dlp/extractor/facebook.py b/yt_dlp/extractor/facebook.py index f3b1580d39..59f5aa5137 100644 --- a/yt_dlp/extractor/facebook.py +++ b/yt_dlp/extractor/facebook.py @@ -395,7 +395,7 @@ def _perform_login(self, username, password): try: login_results = self._download_webpage(request, None, note='Logging in', errnote='unable to fetch login page') - if self._html_search_regex(r'(Your Request Couldn.+? be Processed)', login_results, 'request error', default=None) is not None: + if self._html_search_regex(r'(Your Request Couldn\'t be Processed)', login_results, 'request error', default=None) is not None: raise ExtractorError('Failed to perform login request.') if re.search(r'', login_results) is not None: @@ -436,7 +436,13 @@ def _extract_from_url(self, url, video_id): sjs_data = [self._parse_json(j, video_id, fatal=False) for j in re.findall( r'data-sjs>({.*?ScheduledServerJS.*?})', webpage)] - if (self.get_param('username') and self.get_param('password')) or self.get_param('cookiefile'): + + cookies = self._get_cookies(url) + if all(k in cookies for k in ["c_user", "xs"]): + if get_first(sjs_data, ( + 'require', ..., ..., ..., '__bbox', 'define', + lambda _, v: 'CurrentUserInitialData' in v, ..., 'ACCOUNT_ID'), default="0") == "0": + raise ExtractorError('Failed to login with provided data.', expected=True) if any(content in webpage for content in ['180 days left to appeal', 'suspended your account']): raise ExtractorError('Login account is suspended.', expected=True) if 'send a code to confirm the mobile number you give us' in webpage: @@ -444,23 +450,11 @@ def _extract_from_url(self, url, video_id): if 'your account has been locked' in webpage: raise ExtractorError('Account is locked.', expected=True) - userinfo = get_first(sjs_data, ( - 'require', ..., ..., ..., '__bbox', 'define', - lambda _, v: 'CurrentUserInitialData' in v, lambda _, v: 'ACCOUNT_ID' in v)) - try: - user_id = int(userinfo['ACCOUNT_ID']) - except (TypeError, ValueError): - user_id = 0 - if user_id == 0: - raise ExtractorError('Failed to login with provided data.', expected=True) - if props := get_first(sjs_data, ( - 'require', ..., ..., ..., '__bbox', 'require', - ..., ..., ..., 'rootView', 'props'), expected_type=dict, default={} - ): - - if props.get('title') in ('This content isn\'t available at the moment', 'This content isn\'t available right now'): - raise ExtractorError('Content removed or not accessible. Facebook said: "%s"' % props.get('body', ''), expected=True) + 'require', ..., ..., ..., '__bbox', 'require', ..., ..., ..., 'rootView', 'props', + lambda _, v: v['title'].startswith('This content isn\'t available'))): + raise ExtractorError( + f'Content unavailable. Facebook said: {props.get("body") or props["title"]}', expected=True) def extract_metadata(webpage): post_data = [self._parse_json(j, video_id, fatal=False) for j in re.findall( @@ -587,7 +581,7 @@ def extract_relay_data(_filter): def extract_relay_prefetched_data(_filter): return traverse_obj(extract_relay_data(_filter), ( 'require', (None, (..., ..., ..., '__bbox', 'require')), - lambda _, v: any(key.startswith('RelayPrefetchedStreamCache') for key in v if isinstance(key, str)), + lambda _, v: any(key.startswith('RelayPrefetchedStreamCache') for key in v), ..., ..., '__bbox', 'result', 'data', {dict}), get_all=False) or {} if not video_data: