2022-06-24 12:54:43 +02:00
|
|
|
import base64
|
2023-05-27 09:08:19 +02:00
|
|
|
import collections
|
2021-12-27 02:28:44 +01:00
|
|
|
import contextlib
|
2024-02-25 01:16:34 +01:00
|
|
|
import datetime as dt
|
2024-07-02 00:51:27 +02:00
|
|
|
import functools
|
2024-01-31 09:43:52 +01:00
|
|
|
import glob
|
2024-07-02 00:51:27 +02:00
|
|
|
import hashlib
|
2022-06-24 13:06:16 +02:00
|
|
|
import http.cookiejar
|
2022-09-16 19:02:00 +02:00
|
|
|
import http.cookies
|
2023-05-27 09:08:19 +02:00
|
|
|
import io
|
2021-07-21 22:32:49 +02:00
|
|
|
import json
|
|
|
|
import os
|
2022-08-30 18:54:46 +02:00
|
|
|
import re
|
2021-07-21 22:32:49 +02:00
|
|
|
import shutil
|
|
|
|
import struct
|
|
|
|
import subprocess
|
|
|
|
import sys
|
|
|
|
import tempfile
|
2022-05-12 07:24:49 +02:00
|
|
|
import time
|
2023-05-27 09:08:19 +02:00
|
|
|
import urllib.request
|
2021-12-27 02:28:44 +01:00
|
|
|
from enum import Enum, auto
|
2021-07-21 22:32:49 +02:00
|
|
|
|
2022-01-31 15:49:33 +01:00
|
|
|
from .aes import (
|
|
|
|
aes_cbc_decrypt_bytes,
|
|
|
|
aes_gcm_decrypt_and_verify_bytes,
|
|
|
|
unpad_pkcs7,
|
|
|
|
)
|
2024-01-31 09:56:14 +01:00
|
|
|
from .compat import compat_os_name
|
2022-04-20 21:05:57 +02:00
|
|
|
from .dependencies import (
|
|
|
|
_SECRETSTORAGE_UNAVAILABLE_REASON,
|
|
|
|
secretstorage,
|
|
|
|
sqlite3,
|
|
|
|
)
|
2022-04-09 21:31:48 +02:00
|
|
|
from .minicurses import MultilinePrinter, QuietMultilinePrinter
|
2022-09-01 13:19:03 +02:00
|
|
|
from .utils import (
|
2024-01-31 09:56:14 +01:00
|
|
|
DownloadError,
|
2024-10-01 02:13:48 +02:00
|
|
|
YoutubeDLError,
|
2022-09-01 13:19:03 +02:00
|
|
|
Popen,
|
|
|
|
error_to_str,
|
|
|
|
expand_path,
|
2022-09-11 11:02:35 +02:00
|
|
|
is_path_like,
|
2023-05-27 09:08:19 +02:00
|
|
|
sanitize_url,
|
|
|
|
str_or_none,
|
2022-09-01 13:19:03 +02:00
|
|
|
try_call,
|
2023-05-27 09:08:19 +02:00
|
|
|
write_string,
|
2022-09-01 13:19:03 +02:00
|
|
|
)
|
2023-07-15 08:11:08 +02:00
|
|
|
from .utils._utils import _YDLLogger
|
2023-07-29 00:40:20 +02:00
|
|
|
from .utils.networking import normalize_url
|
2021-07-21 22:32:49 +02:00
|
|
|
|
2024-05-17 16:33:12 +02:00
|
|
|
CHROMIUM_BASED_BROWSERS = {'brave', 'chrome', 'chromium', 'edge', 'opera', 'vivaldi', 'whale'}
|
2021-07-21 22:32:49 +02:00
|
|
|
SUPPORTED_BROWSERS = CHROMIUM_BASED_BROWSERS | {'firefox', 'safari'}
|
|
|
|
|
|
|
|
|
2023-07-15 08:11:08 +02:00
|
|
|
class YDLLogger(_YDLLogger):
|
|
|
|
def warning(self, message, only_once=False): # compat
|
|
|
|
return super().warning(message, once=only_once)
|
2021-07-21 22:32:49 +02:00
|
|
|
|
2022-05-12 07:24:49 +02:00
|
|
|
class ProgressBar(MultilinePrinter):
|
|
|
|
_DELAY, _timer = 0.1, 0
|
|
|
|
|
|
|
|
def print(self, message):
|
|
|
|
if time.time() - self._timer > self._DELAY:
|
|
|
|
self.print_at_line(f'[Cookies] {message}', 0)
|
|
|
|
self._timer = time.time()
|
|
|
|
|
2022-04-09 21:31:48 +02:00
|
|
|
def progress_bar(self):
|
|
|
|
"""Return a context manager with a print method. (Optional)"""
|
|
|
|
# Do not print to files/pipes, loggers, or when --no-progress is used
|
|
|
|
if not self._ydl or self._ydl.params.get('noprogress') or self._ydl.params.get('logger'):
|
|
|
|
return
|
2022-05-17 15:06:29 +02:00
|
|
|
file = self._ydl._out_files.error
|
2022-04-09 21:31:48 +02:00
|
|
|
try:
|
|
|
|
if not file.isatty():
|
|
|
|
return
|
|
|
|
except BaseException:
|
|
|
|
return
|
2022-05-12 07:24:49 +02:00
|
|
|
return self.ProgressBar(file, preserve_output=False)
|
2022-04-09 21:31:48 +02:00
|
|
|
|
|
|
|
|
|
|
|
def _create_progress_bar(logger):
|
|
|
|
if hasattr(logger, 'progress_bar'):
|
|
|
|
printer = logger.progress_bar()
|
|
|
|
if printer:
|
|
|
|
return printer
|
|
|
|
printer = QuietMultilinePrinter()
|
|
|
|
printer.print = lambda _: None
|
|
|
|
return printer
|
|
|
|
|
2021-07-21 22:32:49 +02:00
|
|
|
|
2024-10-01 02:13:48 +02:00
|
|
|
class CookieLoadError(YoutubeDLError):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2021-07-21 22:32:49 +02:00
|
|
|
def load_cookies(cookie_file, browser_specification, ydl):
|
2024-10-01 02:13:48 +02:00
|
|
|
try:
|
|
|
|
cookie_jars = []
|
|
|
|
if browser_specification is not None:
|
|
|
|
browser_name, profile, keyring, container = _parse_browser_specification(*browser_specification)
|
|
|
|
cookie_jars.append(
|
|
|
|
extract_cookies_from_browser(browser_name, profile, YDLLogger(ydl), keyring=keyring, container=container))
|
|
|
|
|
|
|
|
if cookie_file is not None:
|
|
|
|
is_filename = is_path_like(cookie_file)
|
|
|
|
if is_filename:
|
|
|
|
cookie_file = expand_path(cookie_file)
|
|
|
|
|
|
|
|
jar = YoutubeDLCookieJar(cookie_file)
|
|
|
|
if not is_filename or os.access(cookie_file, os.R_OK):
|
|
|
|
jar.load()
|
|
|
|
cookie_jars.append(jar)
|
|
|
|
|
|
|
|
return _merge_cookie_jars(cookie_jars)
|
|
|
|
except Exception:
|
|
|
|
raise CookieLoadError('failed to load cookies')
|
2021-07-21 22:32:49 +02:00
|
|
|
|
|
|
|
|
2022-08-30 18:54:46 +02:00
|
|
|
def extract_cookies_from_browser(browser_name, profile=None, logger=YDLLogger(), *, keyring=None, container=None):
|
2021-07-21 22:32:49 +02:00
|
|
|
if browser_name == 'firefox':
|
2022-08-30 18:54:46 +02:00
|
|
|
return _extract_firefox_cookies(profile, container, logger)
|
2021-07-21 22:32:49 +02:00
|
|
|
elif browser_name == 'safari':
|
|
|
|
return _extract_safari_cookies(profile, logger)
|
|
|
|
elif browser_name in CHROMIUM_BASED_BROWSERS:
|
2021-12-27 02:28:44 +01:00
|
|
|
return _extract_chrome_cookies(browser_name, profile, keyring, logger)
|
2021-07-21 22:32:49 +02:00
|
|
|
else:
|
2022-04-11 17:10:28 +02:00
|
|
|
raise ValueError(f'unknown browser: {browser_name}')
|
2021-07-21 22:32:49 +02:00
|
|
|
|
|
|
|
|
2022-08-30 18:54:46 +02:00
|
|
|
def _extract_firefox_cookies(profile, container, logger):
|
2021-07-21 22:32:49 +02:00
|
|
|
logger.info('Extracting cookies from firefox')
|
2022-04-20 21:05:57 +02:00
|
|
|
if not sqlite3:
|
2021-07-23 16:26:19 +02:00
|
|
|
logger.warning('Cannot extract cookies from firefox without sqlite3 support. '
|
2024-03-10 20:18:47 +01:00
|
|
|
'Please use a Python interpreter compiled with sqlite3 support')
|
2021-07-23 16:26:19 +02:00
|
|
|
return YoutubeDLCookieJar()
|
2021-07-21 22:32:49 +02:00
|
|
|
|
|
|
|
if profile is None:
|
2024-01-31 09:43:52 +01:00
|
|
|
search_roots = list(_firefox_browser_dirs())
|
2021-07-21 22:32:49 +02:00
|
|
|
elif _is_path(profile):
|
2024-01-31 09:43:52 +01:00
|
|
|
search_roots = [profile]
|
2021-07-21 22:32:49 +02:00
|
|
|
else:
|
2024-01-31 09:43:52 +01:00
|
|
|
search_roots = [os.path.join(path, profile) for path in _firefox_browser_dirs()]
|
|
|
|
search_root = ', '.join(map(repr, search_roots))
|
2021-07-21 22:32:49 +02:00
|
|
|
|
2024-01-31 09:43:52 +01:00
|
|
|
cookie_database_path = _newest(_firefox_cookie_dbs(search_roots))
|
2022-09-01 11:52:59 +02:00
|
|
|
if cookie_database_path is None:
|
|
|
|
raise FileNotFoundError(f'could not find firefox cookies database in {search_root}')
|
|
|
|
logger.debug(f'Extracting cookies from: "{cookie_database_path}"')
|
|
|
|
|
2022-08-30 18:54:46 +02:00
|
|
|
container_id = None
|
2022-09-01 11:52:59 +02:00
|
|
|
if container not in (None, 'none'):
|
|
|
|
containers_path = os.path.join(os.path.dirname(cookie_database_path), 'containers.json')
|
2022-08-30 18:54:46 +02:00
|
|
|
if not os.path.isfile(containers_path) or not os.access(containers_path, os.R_OK):
|
|
|
|
raise FileNotFoundError(f'could not read containers.json in {search_root}')
|
2023-08-12 23:30:23 +02:00
|
|
|
with open(containers_path, encoding='utf8') as containers:
|
2022-08-30 18:54:46 +02:00
|
|
|
identities = json.load(containers).get('identities', [])
|
|
|
|
container_id = next((context.get('userContextId') for context in identities if container in (
|
|
|
|
context.get('name'),
|
2024-06-12 01:09:58 +02:00
|
|
|
try_call(lambda: re.fullmatch(r'userContext([^\.]+)\.label', context['l10nID']).group()),
|
2022-08-30 18:54:46 +02:00
|
|
|
)), None)
|
|
|
|
if not isinstance(container_id, int):
|
|
|
|
raise ValueError(f'could not find firefox container "{container}" in containers.json')
|
|
|
|
|
2021-10-31 10:15:59 +01:00
|
|
|
with tempfile.TemporaryDirectory(prefix='yt_dlp') as tmpdir:
|
2021-07-21 22:32:49 +02:00
|
|
|
cursor = None
|
|
|
|
try:
|
|
|
|
cursor = _open_database_copy(cookie_database_path, tmpdir)
|
2022-08-30 18:54:46 +02:00
|
|
|
if isinstance(container_id, int):
|
|
|
|
logger.debug(
|
|
|
|
f'Only loading cookies from firefox container "{container}", ID {container_id}')
|
|
|
|
cursor.execute(
|
2022-09-01 11:52:59 +02:00
|
|
|
'SELECT host, name, value, path, expiry, isSecure FROM moz_cookies WHERE originAttributes LIKE ? OR originAttributes LIKE ?',
|
|
|
|
(f'%userContextId={container_id}', f'%userContextId={container_id}&%'))
|
|
|
|
elif container == 'none':
|
|
|
|
logger.debug('Only loading cookies not belonging to any container')
|
|
|
|
cursor.execute(
|
|
|
|
'SELECT host, name, value, path, expiry, isSecure FROM moz_cookies WHERE NOT INSTR(originAttributes,"userContextId=")')
|
|
|
|
else:
|
2022-08-30 18:54:46 +02:00
|
|
|
cursor.execute('SELECT host, name, value, path, expiry, isSecure FROM moz_cookies')
|
2021-07-21 22:32:49 +02:00
|
|
|
jar = YoutubeDLCookieJar()
|
2022-04-09 21:31:48 +02:00
|
|
|
with _create_progress_bar(logger) as progress_bar:
|
|
|
|
table = cursor.fetchall()
|
|
|
|
total_cookie_count = len(table)
|
|
|
|
for i, (host, name, value, path, expiry, is_secure) in enumerate(table):
|
|
|
|
progress_bar.print(f'Loading cookie {i: 6d}/{total_cookie_count: 6d}')
|
2022-06-24 10:10:17 +02:00
|
|
|
cookie = http.cookiejar.Cookie(
|
2022-04-09 21:31:48 +02:00
|
|
|
version=0, name=name, value=value, port=None, port_specified=False,
|
|
|
|
domain=host, domain_specified=bool(host), domain_initial_dot=host.startswith('.'),
|
|
|
|
path=path, path_specified=bool(path), secure=is_secure, expires=expiry, discard=False,
|
|
|
|
comment=None, comment_url=None, rest={})
|
|
|
|
jar.set_cookie(cookie)
|
2022-04-11 17:10:28 +02:00
|
|
|
logger.info(f'Extracted {len(jar)} cookies from firefox')
|
2021-07-21 22:32:49 +02:00
|
|
|
return jar
|
|
|
|
finally:
|
|
|
|
if cursor is not None:
|
|
|
|
cursor.connection.close()
|
|
|
|
|
|
|
|
|
2024-01-31 09:43:52 +01:00
|
|
|
def _firefox_browser_dirs():
|
2022-06-06 23:17:49 +02:00
|
|
|
if sys.platform in ('cygwin', 'win32'):
|
2024-01-31 09:43:52 +01:00
|
|
|
yield os.path.expandvars(R'%APPDATA%\Mozilla\Firefox\Profiles')
|
|
|
|
|
2021-07-21 22:32:49 +02:00
|
|
|
elif sys.platform == 'darwin':
|
2024-01-31 09:43:52 +01:00
|
|
|
yield os.path.expanduser('~/Library/Application Support/Firefox/Profiles')
|
|
|
|
|
|
|
|
else:
|
2024-04-07 17:28:59 +02:00
|
|
|
yield from map(os.path.expanduser, (
|
|
|
|
'~/.mozilla/firefox',
|
|
|
|
'~/snap/firefox/common/.mozilla/firefox',
|
|
|
|
'~/.var/app/org.mozilla.firefox/.mozilla/firefox',
|
|
|
|
))
|
2024-01-31 09:43:52 +01:00
|
|
|
|
|
|
|
|
|
|
|
def _firefox_cookie_dbs(roots):
|
|
|
|
for root in map(os.path.abspath, roots):
|
|
|
|
for pattern in ('', '*/', 'Profiles/*/'):
|
|
|
|
yield from glob.iglob(os.path.join(root, pattern, 'cookies.sqlite'))
|
2021-07-21 22:32:49 +02:00
|
|
|
|
|
|
|
|
|
|
|
def _get_chromium_based_browser_settings(browser_name):
|
|
|
|
# https://chromium.googlesource.com/chromium/src/+/HEAD/docs/user_data_dir.md
|
2022-06-06 23:17:49 +02:00
|
|
|
if sys.platform in ('cygwin', 'win32'):
|
2021-07-21 22:32:49 +02:00
|
|
|
appdata_local = os.path.expandvars('%LOCALAPPDATA%')
|
|
|
|
appdata_roaming = os.path.expandvars('%APPDATA%')
|
|
|
|
browser_dir = {
|
2022-04-17 22:58:28 +02:00
|
|
|
'brave': os.path.join(appdata_local, R'BraveSoftware\Brave-Browser\User Data'),
|
|
|
|
'chrome': os.path.join(appdata_local, R'Google\Chrome\User Data'),
|
|
|
|
'chromium': os.path.join(appdata_local, R'Chromium\User Data'),
|
|
|
|
'edge': os.path.join(appdata_local, R'Microsoft\Edge\User Data'),
|
|
|
|
'opera': os.path.join(appdata_roaming, R'Opera Software\Opera Stable'),
|
|
|
|
'vivaldi': os.path.join(appdata_local, R'Vivaldi\User Data'),
|
2024-05-17 16:33:12 +02:00
|
|
|
'whale': os.path.join(appdata_local, R'Naver\Naver Whale\User Data'),
|
2021-07-21 22:32:49 +02:00
|
|
|
}[browser_name]
|
|
|
|
|
|
|
|
elif sys.platform == 'darwin':
|
|
|
|
appdata = os.path.expanduser('~/Library/Application Support')
|
|
|
|
browser_dir = {
|
|
|
|
'brave': os.path.join(appdata, 'BraveSoftware/Brave-Browser'),
|
|
|
|
'chrome': os.path.join(appdata, 'Google/Chrome'),
|
|
|
|
'chromium': os.path.join(appdata, 'Chromium'),
|
|
|
|
'edge': os.path.join(appdata, 'Microsoft Edge'),
|
|
|
|
'opera': os.path.join(appdata, 'com.operasoftware.Opera'),
|
|
|
|
'vivaldi': os.path.join(appdata, 'Vivaldi'),
|
2024-05-17 16:33:12 +02:00
|
|
|
'whale': os.path.join(appdata, 'Naver/Whale'),
|
2021-07-21 22:32:49 +02:00
|
|
|
}[browser_name]
|
|
|
|
|
|
|
|
else:
|
2022-06-06 23:17:49 +02:00
|
|
|
config = _config_home()
|
|
|
|
browser_dir = {
|
|
|
|
'brave': os.path.join(config, 'BraveSoftware/Brave-Browser'),
|
|
|
|
'chrome': os.path.join(config, 'google-chrome'),
|
|
|
|
'chromium': os.path.join(config, 'chromium'),
|
|
|
|
'edge': os.path.join(config, 'microsoft-edge'),
|
|
|
|
'opera': os.path.join(config, 'opera'),
|
|
|
|
'vivaldi': os.path.join(config, 'vivaldi'),
|
2024-05-17 16:33:12 +02:00
|
|
|
'whale': os.path.join(config, 'naver-whale'),
|
2022-06-06 23:17:49 +02:00
|
|
|
}[browser_name]
|
2021-07-21 22:32:49 +02:00
|
|
|
|
|
|
|
# Linux keyring names can be determined by snooping on dbus while opening the browser in KDE:
|
|
|
|
# dbus-monitor "interface='org.kde.KWallet'" "type=method_return"
|
|
|
|
keyring_name = {
|
|
|
|
'brave': 'Brave',
|
|
|
|
'chrome': 'Chrome',
|
|
|
|
'chromium': 'Chromium',
|
2021-07-21 23:30:21 +02:00
|
|
|
'edge': 'Microsoft Edge' if sys.platform == 'darwin' else 'Chromium',
|
2021-07-21 22:32:49 +02:00
|
|
|
'opera': 'Opera' if sys.platform == 'darwin' else 'Chromium',
|
|
|
|
'vivaldi': 'Vivaldi' if sys.platform == 'darwin' else 'Chrome',
|
2024-05-17 16:33:12 +02:00
|
|
|
'whale': 'Whale',
|
2021-07-21 22:32:49 +02:00
|
|
|
}[browser_name]
|
|
|
|
|
|
|
|
browsers_without_profiles = {'opera'}
|
|
|
|
|
|
|
|
return {
|
|
|
|
'browser_dir': browser_dir,
|
|
|
|
'keyring_name': keyring_name,
|
2024-06-12 01:09:58 +02:00
|
|
|
'supports_profiles': browser_name not in browsers_without_profiles,
|
2021-07-21 22:32:49 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2021-12-27 02:28:44 +01:00
|
|
|
def _extract_chrome_cookies(browser_name, profile, keyring, logger):
|
2022-04-11 17:10:28 +02:00
|
|
|
logger.info(f'Extracting cookies from {browser_name}')
|
2021-07-23 16:26:19 +02:00
|
|
|
|
2022-04-20 21:05:57 +02:00
|
|
|
if not sqlite3:
|
2022-04-17 22:58:28 +02:00
|
|
|
logger.warning(f'Cannot extract cookies from {browser_name} without sqlite3 support. '
|
2024-03-10 20:18:47 +01:00
|
|
|
'Please use a Python interpreter compiled with sqlite3 support')
|
2021-07-23 16:26:19 +02:00
|
|
|
return YoutubeDLCookieJar()
|
|
|
|
|
2021-07-21 22:32:49 +02:00
|
|
|
config = _get_chromium_based_browser_settings(browser_name)
|
|
|
|
|
|
|
|
if profile is None:
|
|
|
|
search_root = config['browser_dir']
|
|
|
|
elif _is_path(profile):
|
|
|
|
search_root = profile
|
|
|
|
config['browser_dir'] = os.path.dirname(profile) if config['supports_profiles'] else profile
|
|
|
|
else:
|
|
|
|
if config['supports_profiles']:
|
|
|
|
search_root = os.path.join(config['browser_dir'], profile)
|
|
|
|
else:
|
2022-04-11 17:10:28 +02:00
|
|
|
logger.error(f'{browser_name} does not support profiles')
|
2021-07-21 22:32:49 +02:00
|
|
|
search_root = config['browser_dir']
|
|
|
|
|
2024-01-31 09:43:52 +01:00
|
|
|
cookie_database_path = _newest(_find_files(search_root, 'Cookies', logger))
|
2021-07-21 22:32:49 +02:00
|
|
|
if cookie_database_path is None:
|
2022-04-11 17:10:28 +02:00
|
|
|
raise FileNotFoundError(f'could not find {browser_name} cookies database in "{search_root}"')
|
|
|
|
logger.debug(f'Extracting cookies from: "{cookie_database_path}"')
|
2021-07-21 22:32:49 +02:00
|
|
|
|
2021-12-27 02:28:44 +01:00
|
|
|
decryptor = get_cookie_decryptor(config['browser_dir'], config['keyring_name'], logger, keyring=keyring)
|
2021-07-21 22:32:49 +02:00
|
|
|
|
2021-10-31 10:15:59 +01:00
|
|
|
with tempfile.TemporaryDirectory(prefix='yt_dlp') as tmpdir:
|
2021-07-21 22:32:49 +02:00
|
|
|
cursor = None
|
|
|
|
try:
|
|
|
|
cursor = _open_database_copy(cookie_database_path, tmpdir)
|
|
|
|
cursor.connection.text_factory = bytes
|
|
|
|
column_names = _get_column_names(cursor, 'cookies')
|
|
|
|
secure_column = 'is_secure' if 'is_secure' in column_names else 'secure'
|
2022-04-17 22:58:28 +02:00
|
|
|
cursor.execute(f'SELECT host_key, name, value, encrypted_value, path, expires_utc, {secure_column} FROM cookies')
|
2021-07-21 22:32:49 +02:00
|
|
|
jar = YoutubeDLCookieJar()
|
|
|
|
failed_cookies = 0
|
2021-12-27 02:28:44 +01:00
|
|
|
unencrypted_cookies = 0
|
2022-04-09 21:31:48 +02:00
|
|
|
with _create_progress_bar(logger) as progress_bar:
|
|
|
|
table = cursor.fetchall()
|
|
|
|
total_cookie_count = len(table)
|
|
|
|
for i, line in enumerate(table):
|
|
|
|
progress_bar.print(f'Loading cookie {i: 6d}/{total_cookie_count: 6d}')
|
|
|
|
is_encrypted, cookie = _process_chrome_cookie(decryptor, *line)
|
|
|
|
if not cookie:
|
2021-07-21 22:32:49 +02:00
|
|
|
failed_cookies += 1
|
|
|
|
continue
|
2022-04-09 21:31:48 +02:00
|
|
|
elif not is_encrypted:
|
|
|
|
unencrypted_cookies += 1
|
|
|
|
jar.set_cookie(cookie)
|
2021-07-21 22:32:49 +02:00
|
|
|
if failed_cookies > 0:
|
2022-04-11 17:10:28 +02:00
|
|
|
failed_message = f' ({failed_cookies} could not be decrypted)'
|
2021-07-21 22:32:49 +02:00
|
|
|
else:
|
|
|
|
failed_message = ''
|
2022-04-11 17:10:28 +02:00
|
|
|
logger.info(f'Extracted {len(jar)} cookies from {browser_name}{failed_message}')
|
2022-05-16 16:06:36 +02:00
|
|
|
counts = decryptor._cookie_counts.copy()
|
2021-12-27 02:28:44 +01:00
|
|
|
counts['unencrypted'] = unencrypted_cookies
|
2022-04-11 17:10:28 +02:00
|
|
|
logger.debug(f'cookie version breakdown: {counts}')
|
2021-07-21 22:32:49 +02:00
|
|
|
return jar
|
2024-01-31 09:56:14 +01:00
|
|
|
except PermissionError as error:
|
|
|
|
if compat_os_name == 'nt' and error.errno == 13:
|
|
|
|
message = 'Could not copy Chrome cookie database. See https://github.com/yt-dlp/yt-dlp/issues/7271 for more info'
|
|
|
|
logger.error(message)
|
|
|
|
raise DownloadError(message) # force exit
|
|
|
|
raise
|
2021-07-21 22:32:49 +02:00
|
|
|
finally:
|
|
|
|
if cursor is not None:
|
|
|
|
cursor.connection.close()
|
|
|
|
|
|
|
|
|
2022-04-09 21:31:48 +02:00
|
|
|
def _process_chrome_cookie(decryptor, host_key, name, value, encrypted_value, path, expires_utc, is_secure):
|
2022-05-09 13:54:28 +02:00
|
|
|
host_key = host_key.decode()
|
|
|
|
name = name.decode()
|
|
|
|
value = value.decode()
|
|
|
|
path = path.decode()
|
2022-04-09 21:31:48 +02:00
|
|
|
is_encrypted = not value and encrypted_value
|
|
|
|
|
|
|
|
if is_encrypted:
|
|
|
|
value = decryptor.decrypt(encrypted_value)
|
|
|
|
if value is None:
|
|
|
|
return is_encrypted, None
|
|
|
|
|
2024-05-11 19:25:39 +02:00
|
|
|
# In chrome, session cookies have expires_utc set to 0
|
|
|
|
# In our cookie-store, cookies that do not expire should have expires set to None
|
|
|
|
if not expires_utc:
|
|
|
|
expires_utc = None
|
|
|
|
|
2022-06-24 10:10:17 +02:00
|
|
|
return is_encrypted, http.cookiejar.Cookie(
|
2022-04-09 21:31:48 +02:00
|
|
|
version=0, name=name, value=value, port=None, port_specified=False,
|
|
|
|
domain=host_key, domain_specified=bool(host_key), domain_initial_dot=host_key.startswith('.'),
|
|
|
|
path=path, path_specified=bool(path), secure=is_secure, expires=expires_utc, discard=False,
|
|
|
|
comment=None, comment_url=None, rest={})
|
|
|
|
|
|
|
|
|
2021-07-21 22:32:49 +02:00
|
|
|
class ChromeCookieDecryptor:
|
|
|
|
"""
|
|
|
|
Overview:
|
|
|
|
|
|
|
|
Linux:
|
|
|
|
- cookies are either v10 or v11
|
|
|
|
- v10: AES-CBC encrypted with a fixed key
|
2023-05-29 15:51:35 +02:00
|
|
|
- also attempts empty password if decryption fails
|
2021-07-21 22:32:49 +02:00
|
|
|
- v11: AES-CBC encrypted with an OS protected key (keyring)
|
2023-05-29 15:51:35 +02:00
|
|
|
- also attempts empty password if decryption fails
|
2021-07-21 22:32:49 +02:00
|
|
|
- v11 keys can be stored in various places depending on the activate desktop environment [2]
|
|
|
|
|
|
|
|
Mac:
|
|
|
|
- cookies are either v10 or not v10
|
|
|
|
- v10: AES-CBC encrypted with an OS protected key (keyring) and more key derivation iterations than linux
|
|
|
|
- not v10: 'old data' stored as plaintext
|
|
|
|
|
|
|
|
Windows:
|
|
|
|
- cookies are either v10 or not v10
|
|
|
|
- v10: AES-GCM encrypted with a key which is encrypted with DPAPI
|
|
|
|
- not v10: encrypted with DPAPI
|
|
|
|
|
|
|
|
Sources:
|
|
|
|
- [1] https://chromium.googlesource.com/chromium/src/+/refs/heads/main/components/os_crypt/
|
2023-05-29 15:51:35 +02:00
|
|
|
- [2] https://chromium.googlesource.com/chromium/src/+/refs/heads/main/components/os_crypt/sync/key_storage_linux.cc
|
2021-07-21 22:32:49 +02:00
|
|
|
- KeyStorageLinux::CreateService
|
|
|
|
"""
|
|
|
|
|
2022-05-16 21:25:37 +02:00
|
|
|
_cookie_counts = {}
|
2021-07-21 22:32:49 +02:00
|
|
|
|
2022-05-16 21:25:37 +02:00
|
|
|
def decrypt(self, encrypted_value):
|
2022-04-17 22:58:28 +02:00
|
|
|
raise NotImplementedError('Must be implemented by sub classes')
|
2021-12-27 02:28:44 +01:00
|
|
|
|
2021-07-21 22:32:49 +02:00
|
|
|
|
2021-12-27 02:28:44 +01:00
|
|
|
def get_cookie_decryptor(browser_root, browser_keyring_name, logger, *, keyring=None):
|
2022-06-10 21:03:54 +02:00
|
|
|
if sys.platform == 'darwin':
|
2021-07-21 22:32:49 +02:00
|
|
|
return MacChromeCookieDecryptor(browser_keyring_name, logger)
|
2022-06-10 21:03:54 +02:00
|
|
|
elif sys.platform in ('win32', 'cygwin'):
|
2021-07-21 22:32:49 +02:00
|
|
|
return WindowsChromeCookieDecryptor(browser_root, logger)
|
2022-06-10 21:03:54 +02:00
|
|
|
return LinuxChromeCookieDecryptor(browser_keyring_name, logger, keyring=keyring)
|
2021-07-21 22:32:49 +02:00
|
|
|
|
|
|
|
|
|
|
|
class LinuxChromeCookieDecryptor(ChromeCookieDecryptor):
|
2021-12-27 02:28:44 +01:00
|
|
|
def __init__(self, browser_keyring_name, logger, *, keyring=None):
|
2021-07-21 22:32:49 +02:00
|
|
|
self._logger = logger
|
|
|
|
self._v10_key = self.derive_key(b'peanuts')
|
2023-05-29 15:51:35 +02:00
|
|
|
self._empty_key = self.derive_key(b'')
|
2021-12-27 02:28:44 +01:00
|
|
|
self._cookie_counts = {'v10': 0, 'v11': 0, 'other': 0}
|
2023-03-08 21:49:24 +01:00
|
|
|
self._browser_keyring_name = browser_keyring_name
|
|
|
|
self._keyring = keyring
|
|
|
|
|
|
|
|
@functools.cached_property
|
|
|
|
def _v11_key(self):
|
|
|
|
password = _get_linux_keyring_password(self._browser_keyring_name, self._keyring, self._logger)
|
|
|
|
return None if password is None else self.derive_key(password)
|
2021-07-21 22:32:49 +02:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def derive_key(password):
|
|
|
|
# values from
|
2023-05-29 15:51:35 +02:00
|
|
|
# https://chromium.googlesource.com/chromium/src/+/refs/heads/main/components/os_crypt/sync/os_crypt_linux.cc
|
2021-07-21 22:32:49 +02:00
|
|
|
return pbkdf2_sha1(password, salt=b'saltysalt', iterations=1, key_length=16)
|
|
|
|
|
|
|
|
def decrypt(self, encrypted_value):
|
2023-05-29 15:51:35 +02:00
|
|
|
"""
|
|
|
|
|
|
|
|
following the same approach as the fix in [1]: if cookies fail to decrypt then attempt to decrypt
|
|
|
|
with an empty password. The failure detection is not the same as what chromium uses so the
|
|
|
|
results won't be perfect
|
|
|
|
|
|
|
|
References:
|
|
|
|
- [1] https://chromium.googlesource.com/chromium/src/+/bbd54702284caca1f92d656fdcadf2ccca6f4165%5E%21/
|
|
|
|
- a bugfix to try an empty password as a fallback
|
|
|
|
"""
|
2021-07-21 22:32:49 +02:00
|
|
|
version = encrypted_value[:3]
|
|
|
|
ciphertext = encrypted_value[3:]
|
|
|
|
|
|
|
|
if version == b'v10':
|
2021-12-27 02:28:44 +01:00
|
|
|
self._cookie_counts['v10'] += 1
|
2023-05-29 15:51:35 +02:00
|
|
|
return _decrypt_aes_cbc_multi(ciphertext, (self._v10_key, self._empty_key), self._logger)
|
2021-07-21 22:32:49 +02:00
|
|
|
|
|
|
|
elif version == b'v11':
|
2021-12-27 02:28:44 +01:00
|
|
|
self._cookie_counts['v11'] += 1
|
2021-07-21 22:32:49 +02:00
|
|
|
if self._v11_key is None:
|
2021-12-27 02:28:44 +01:00
|
|
|
self._logger.warning('cannot decrypt v11 cookies: no key found', only_once=True)
|
2021-07-21 22:32:49 +02:00
|
|
|
return None
|
2023-05-29 15:51:35 +02:00
|
|
|
return _decrypt_aes_cbc_multi(ciphertext, (self._v11_key, self._empty_key), self._logger)
|
2021-07-21 22:32:49 +02:00
|
|
|
|
|
|
|
else:
|
2023-05-29 15:51:35 +02:00
|
|
|
self._logger.warning(f'unknown cookie version: "{version}"', only_once=True)
|
2021-12-27 02:28:44 +01:00
|
|
|
self._cookie_counts['other'] += 1
|
2021-07-21 22:32:49 +02:00
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
class MacChromeCookieDecryptor(ChromeCookieDecryptor):
|
|
|
|
def __init__(self, browser_keyring_name, logger):
|
|
|
|
self._logger = logger
|
2021-09-25 17:34:16 +02:00
|
|
|
password = _get_mac_keyring_password(browser_keyring_name, logger)
|
2021-07-21 22:32:49 +02:00
|
|
|
self._v10_key = None if password is None else self.derive_key(password)
|
2021-12-27 02:28:44 +01:00
|
|
|
self._cookie_counts = {'v10': 0, 'other': 0}
|
2021-07-21 22:32:49 +02:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def derive_key(password):
|
|
|
|
# values from
|
2023-05-29 15:51:35 +02:00
|
|
|
# https://chromium.googlesource.com/chromium/src/+/refs/heads/main/components/os_crypt/sync/os_crypt_mac.mm
|
2021-07-21 22:32:49 +02:00
|
|
|
return pbkdf2_sha1(password, salt=b'saltysalt', iterations=1003, key_length=16)
|
|
|
|
|
|
|
|
def decrypt(self, encrypted_value):
|
|
|
|
version = encrypted_value[:3]
|
|
|
|
ciphertext = encrypted_value[3:]
|
|
|
|
|
|
|
|
if version == b'v10':
|
2021-12-27 02:28:44 +01:00
|
|
|
self._cookie_counts['v10'] += 1
|
2021-07-21 22:32:49 +02:00
|
|
|
if self._v10_key is None:
|
|
|
|
self._logger.warning('cannot decrypt v10 cookies: no key found', only_once=True)
|
|
|
|
return None
|
|
|
|
|
2023-05-29 15:51:35 +02:00
|
|
|
return _decrypt_aes_cbc_multi(ciphertext, (self._v10_key,), self._logger)
|
2021-07-21 22:32:49 +02:00
|
|
|
|
|
|
|
else:
|
2021-12-27 02:28:44 +01:00
|
|
|
self._cookie_counts['other'] += 1
|
2021-07-21 22:32:49 +02:00
|
|
|
# other prefixes are considered 'old data' which were stored as plaintext
|
2023-05-29 15:51:35 +02:00
|
|
|
# https://chromium.googlesource.com/chromium/src/+/refs/heads/main/components/os_crypt/sync/os_crypt_mac.mm
|
2021-07-21 22:32:49 +02:00
|
|
|
return encrypted_value
|
|
|
|
|
|
|
|
|
|
|
|
class WindowsChromeCookieDecryptor(ChromeCookieDecryptor):
|
|
|
|
def __init__(self, browser_root, logger):
|
|
|
|
self._logger = logger
|
|
|
|
self._v10_key = _get_windows_v10_key(browser_root, logger)
|
2021-12-27 02:28:44 +01:00
|
|
|
self._cookie_counts = {'v10': 0, 'other': 0}
|
|
|
|
|
2021-07-21 22:32:49 +02:00
|
|
|
def decrypt(self, encrypted_value):
|
|
|
|
version = encrypted_value[:3]
|
|
|
|
ciphertext = encrypted_value[3:]
|
|
|
|
|
|
|
|
if version == b'v10':
|
2021-12-27 02:28:44 +01:00
|
|
|
self._cookie_counts['v10'] += 1
|
2021-07-21 22:32:49 +02:00
|
|
|
if self._v10_key is None:
|
|
|
|
self._logger.warning('cannot decrypt v10 cookies: no key found', only_once=True)
|
|
|
|
return None
|
|
|
|
|
2023-05-29 15:51:35 +02:00
|
|
|
# https://chromium.googlesource.com/chromium/src/+/refs/heads/main/components/os_crypt/sync/os_crypt_win.cc
|
2021-07-21 22:32:49 +02:00
|
|
|
# kNonceLength
|
|
|
|
nonce_length = 96 // 8
|
|
|
|
# boringssl
|
|
|
|
# EVP_AEAD_AES_GCM_TAG_LEN
|
|
|
|
authentication_tag_length = 16
|
|
|
|
|
|
|
|
raw_ciphertext = ciphertext
|
|
|
|
nonce = raw_ciphertext[:nonce_length]
|
|
|
|
ciphertext = raw_ciphertext[nonce_length:-authentication_tag_length]
|
|
|
|
authentication_tag = raw_ciphertext[-authentication_tag_length:]
|
|
|
|
|
|
|
|
return _decrypt_aes_gcm(ciphertext, self._v10_key, nonce, authentication_tag, self._logger)
|
|
|
|
|
|
|
|
else:
|
2021-12-27 02:28:44 +01:00
|
|
|
self._cookie_counts['other'] += 1
|
2021-07-21 22:32:49 +02:00
|
|
|
# any other prefix means the data is DPAPI encrypted
|
2023-05-29 15:51:35 +02:00
|
|
|
# https://chromium.googlesource.com/chromium/src/+/refs/heads/main/components/os_crypt/sync/os_crypt_win.cc
|
2022-05-09 13:54:28 +02:00
|
|
|
return _decrypt_windows_dpapi(encrypted_value, self._logger).decode()
|
2021-07-21 22:32:49 +02:00
|
|
|
|
|
|
|
|
|
|
|
def _extract_safari_cookies(profile, logger):
|
|
|
|
if sys.platform != 'darwin':
|
2022-04-11 17:10:28 +02:00
|
|
|
raise ValueError(f'unsupported platform: {sys.platform}')
|
2021-07-21 22:32:49 +02:00
|
|
|
|
2023-05-29 08:05:51 +02:00
|
|
|
if profile:
|
|
|
|
cookies_path = os.path.expanduser(profile)
|
|
|
|
if not os.path.isfile(cookies_path):
|
|
|
|
raise FileNotFoundError('custom safari cookies database not found')
|
|
|
|
|
|
|
|
else:
|
|
|
|
cookies_path = os.path.expanduser('~/Library/Cookies/Cookies.binarycookies')
|
2021-07-21 22:32:49 +02:00
|
|
|
|
2022-02-14 15:36:51 +01:00
|
|
|
if not os.path.isfile(cookies_path):
|
2023-05-29 08:05:51 +02:00
|
|
|
logger.debug('Trying secondary cookie location')
|
|
|
|
cookies_path = os.path.expanduser('~/Library/Containers/com.apple.Safari/Data/Library/Cookies/Cookies.binarycookies')
|
|
|
|
if not os.path.isfile(cookies_path):
|
|
|
|
raise FileNotFoundError('could not find safari cookies database')
|
2021-07-21 22:32:49 +02:00
|
|
|
|
|
|
|
with open(cookies_path, 'rb') as f:
|
|
|
|
cookies_data = f.read()
|
|
|
|
|
|
|
|
jar = parse_safari_cookies(cookies_data, logger=logger)
|
2022-04-11 17:10:28 +02:00
|
|
|
logger.info(f'Extracted {len(jar)} cookies from safari')
|
2021-07-21 22:32:49 +02:00
|
|
|
return jar
|
|
|
|
|
|
|
|
|
|
|
|
class ParserError(Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
class DataParser:
|
|
|
|
def __init__(self, data, logger):
|
|
|
|
self._data = data
|
|
|
|
self.cursor = 0
|
|
|
|
self._logger = logger
|
|
|
|
|
|
|
|
def read_bytes(self, num_bytes):
|
|
|
|
if num_bytes < 0:
|
2022-04-11 17:10:28 +02:00
|
|
|
raise ParserError(f'invalid read of {num_bytes} bytes')
|
2021-07-21 22:32:49 +02:00
|
|
|
end = self.cursor + num_bytes
|
|
|
|
if end > len(self._data):
|
|
|
|
raise ParserError('reached end of input')
|
|
|
|
data = self._data[self.cursor:end]
|
|
|
|
self.cursor = end
|
|
|
|
return data
|
|
|
|
|
|
|
|
def expect_bytes(self, expected_value, message):
|
|
|
|
value = self.read_bytes(len(expected_value))
|
|
|
|
if value != expected_value:
|
2022-04-11 17:10:28 +02:00
|
|
|
raise ParserError(f'unexpected value: {value} != {expected_value} ({message})')
|
2021-07-21 22:32:49 +02:00
|
|
|
|
|
|
|
def read_uint(self, big_endian=False):
|
|
|
|
data_format = '>I' if big_endian else '<I'
|
|
|
|
return struct.unpack(data_format, self.read_bytes(4))[0]
|
|
|
|
|
|
|
|
def read_double(self, big_endian=False):
|
|
|
|
data_format = '>d' if big_endian else '<d'
|
|
|
|
return struct.unpack(data_format, self.read_bytes(8))[0]
|
|
|
|
|
|
|
|
def read_cstring(self):
|
|
|
|
buffer = []
|
|
|
|
while True:
|
|
|
|
c = self.read_bytes(1)
|
|
|
|
if c == b'\x00':
|
2022-05-09 13:54:28 +02:00
|
|
|
return b''.join(buffer).decode()
|
2021-07-21 22:32:49 +02:00
|
|
|
else:
|
|
|
|
buffer.append(c)
|
|
|
|
|
|
|
|
def skip(self, num_bytes, description='unknown'):
|
|
|
|
if num_bytes > 0:
|
2022-04-17 22:58:28 +02:00
|
|
|
self._logger.debug(f'skipping {num_bytes} bytes ({description}): {self.read_bytes(num_bytes)!r}')
|
2021-07-21 22:32:49 +02:00
|
|
|
elif num_bytes < 0:
|
2022-04-11 17:10:28 +02:00
|
|
|
raise ParserError(f'invalid skip of {num_bytes} bytes')
|
2021-07-21 22:32:49 +02:00
|
|
|
|
|
|
|
def skip_to(self, offset, description='unknown'):
|
|
|
|
self.skip(offset - self.cursor, description)
|
|
|
|
|
|
|
|
def skip_to_end(self, description='unknown'):
|
|
|
|
self.skip_to(len(self._data), description)
|
|
|
|
|
|
|
|
|
|
|
|
def _mac_absolute_time_to_posix(timestamp):
|
2024-02-25 01:16:34 +01:00
|
|
|
return int((dt.datetime(2001, 1, 1, 0, 0, tzinfo=dt.timezone.utc) + dt.timedelta(seconds=timestamp)).timestamp())
|
2021-07-21 22:32:49 +02:00
|
|
|
|
|
|
|
|
|
|
|
def _parse_safari_cookies_header(data, logger):
|
|
|
|
p = DataParser(data, logger)
|
|
|
|
p.expect_bytes(b'cook', 'database signature')
|
|
|
|
number_of_pages = p.read_uint(big_endian=True)
|
|
|
|
page_sizes = [p.read_uint(big_endian=True) for _ in range(number_of_pages)]
|
|
|
|
return page_sizes, p.cursor
|
|
|
|
|
|
|
|
|
|
|
|
def _parse_safari_cookies_page(data, jar, logger):
|
|
|
|
p = DataParser(data, logger)
|
|
|
|
p.expect_bytes(b'\x00\x00\x01\x00', 'page signature')
|
|
|
|
number_of_cookies = p.read_uint()
|
|
|
|
record_offsets = [p.read_uint() for _ in range(number_of_cookies)]
|
|
|
|
if number_of_cookies == 0:
|
2022-04-11 17:10:28 +02:00
|
|
|
logger.debug(f'a cookies page of size {len(data)} has no cookies')
|
2021-07-21 22:32:49 +02:00
|
|
|
return
|
|
|
|
|
|
|
|
p.skip_to(record_offsets[0], 'unknown page header field')
|
|
|
|
|
2022-04-09 21:31:48 +02:00
|
|
|
with _create_progress_bar(logger) as progress_bar:
|
|
|
|
for i, record_offset in enumerate(record_offsets):
|
|
|
|
progress_bar.print(f'Loading cookie {i: 6d}/{number_of_cookies: 6d}')
|
|
|
|
p.skip_to(record_offset, 'space between records')
|
|
|
|
record_length = _parse_safari_cookies_record(data[record_offset:], jar, logger)
|
|
|
|
p.read_bytes(record_length)
|
2021-07-21 22:32:49 +02:00
|
|
|
p.skip_to_end('space in between pages')
|
|
|
|
|
|
|
|
|
|
|
|
def _parse_safari_cookies_record(data, jar, logger):
|
|
|
|
p = DataParser(data, logger)
|
|
|
|
record_size = p.read_uint()
|
|
|
|
p.skip(4, 'unknown record field 1')
|
|
|
|
flags = p.read_uint()
|
|
|
|
is_secure = bool(flags & 0x0001)
|
|
|
|
p.skip(4, 'unknown record field 2')
|
|
|
|
domain_offset = p.read_uint()
|
|
|
|
name_offset = p.read_uint()
|
|
|
|
path_offset = p.read_uint()
|
|
|
|
value_offset = p.read_uint()
|
|
|
|
p.skip(8, 'unknown record field 3')
|
|
|
|
expiration_date = _mac_absolute_time_to_posix(p.read_double())
|
|
|
|
_creation_date = _mac_absolute_time_to_posix(p.read_double()) # noqa: F841
|
|
|
|
|
|
|
|
try:
|
|
|
|
p.skip_to(domain_offset)
|
|
|
|
domain = p.read_cstring()
|
|
|
|
|
|
|
|
p.skip_to(name_offset)
|
|
|
|
name = p.read_cstring()
|
|
|
|
|
|
|
|
p.skip_to(path_offset)
|
|
|
|
path = p.read_cstring()
|
|
|
|
|
|
|
|
p.skip_to(value_offset)
|
|
|
|
value = p.read_cstring()
|
|
|
|
except UnicodeDecodeError:
|
2021-09-25 17:34:16 +02:00
|
|
|
logger.warning('failed to parse Safari cookie because UTF-8 decoding failed', only_once=True)
|
2021-07-21 22:32:49 +02:00
|
|
|
return record_size
|
|
|
|
|
|
|
|
p.skip_to(record_size, 'space at the end of the record')
|
|
|
|
|
2022-06-24 10:10:17 +02:00
|
|
|
cookie = http.cookiejar.Cookie(
|
2021-07-21 22:32:49 +02:00
|
|
|
version=0, name=name, value=value, port=None, port_specified=False,
|
|
|
|
domain=domain, domain_specified=bool(domain), domain_initial_dot=domain.startswith('.'),
|
|
|
|
path=path, path_specified=bool(path), secure=is_secure, expires=expiration_date, discard=False,
|
|
|
|
comment=None, comment_url=None, rest={})
|
|
|
|
jar.set_cookie(cookie)
|
|
|
|
return record_size
|
|
|
|
|
|
|
|
|
|
|
|
def parse_safari_cookies(data, jar=None, logger=YDLLogger()):
|
|
|
|
"""
|
|
|
|
References:
|
|
|
|
- https://github.com/libyal/dtformats/blob/main/documentation/Safari%20Cookies.asciidoc
|
|
|
|
- this data appears to be out of date but the important parts of the database structure is the same
|
|
|
|
- there are a few bytes here and there which are skipped during parsing
|
|
|
|
"""
|
|
|
|
if jar is None:
|
|
|
|
jar = YoutubeDLCookieJar()
|
|
|
|
page_sizes, body_start = _parse_safari_cookies_header(data, logger)
|
|
|
|
p = DataParser(data[body_start:], logger)
|
|
|
|
for page_size in page_sizes:
|
|
|
|
_parse_safari_cookies_page(p.read_bytes(page_size), jar, logger)
|
|
|
|
p.skip_to_end('footer')
|
|
|
|
return jar
|
|
|
|
|
|
|
|
|
2021-12-27 02:28:44 +01:00
|
|
|
class _LinuxDesktopEnvironment(Enum):
|
|
|
|
"""
|
|
|
|
https://chromium.googlesource.com/chromium/src/+/refs/heads/main/base/nix/xdg_util.h
|
|
|
|
DesktopEnvironment
|
|
|
|
"""
|
|
|
|
OTHER = auto()
|
|
|
|
CINNAMON = auto()
|
2023-05-29 15:51:35 +02:00
|
|
|
DEEPIN = auto()
|
2021-12-27 02:28:44 +01:00
|
|
|
GNOME = auto()
|
2023-05-29 15:51:35 +02:00
|
|
|
KDE3 = auto()
|
|
|
|
KDE4 = auto()
|
|
|
|
KDE5 = auto()
|
|
|
|
KDE6 = auto()
|
2021-12-27 02:28:44 +01:00
|
|
|
PANTHEON = auto()
|
2023-05-29 15:51:35 +02:00
|
|
|
UKUI = auto()
|
2021-12-27 02:28:44 +01:00
|
|
|
UNITY = auto()
|
|
|
|
XFCE = auto()
|
2023-05-29 15:51:35 +02:00
|
|
|
LXQT = auto()
|
2021-07-21 22:32:49 +02:00
|
|
|
|
|
|
|
|
2021-12-27 02:28:44 +01:00
|
|
|
class _LinuxKeyring(Enum):
|
|
|
|
"""
|
2023-05-29 15:51:35 +02:00
|
|
|
https://chromium.googlesource.com/chromium/src/+/refs/heads/main/components/os_crypt/sync/key_storage_util_linux.h
|
2021-12-27 02:28:44 +01:00
|
|
|
SelectedLinuxBackend
|
|
|
|
"""
|
2023-06-19 11:15:59 +02:00
|
|
|
KWALLET = auto() # KDE4
|
2023-05-29 15:51:35 +02:00
|
|
|
KWALLET5 = auto()
|
|
|
|
KWALLET6 = auto()
|
2023-06-19 11:15:59 +02:00
|
|
|
GNOMEKEYRING = auto()
|
|
|
|
BASICTEXT = auto()
|
2021-12-27 02:28:44 +01:00
|
|
|
|
|
|
|
|
|
|
|
SUPPORTED_KEYRINGS = _LinuxKeyring.__members__.keys()
|
|
|
|
|
|
|
|
|
2023-05-29 15:51:35 +02:00
|
|
|
def _get_linux_desktop_environment(env, logger):
|
2021-12-27 02:28:44 +01:00
|
|
|
"""
|
|
|
|
https://chromium.googlesource.com/chromium/src/+/refs/heads/main/base/nix/xdg_util.cc
|
|
|
|
GetDesktopEnvironment
|
|
|
|
"""
|
|
|
|
xdg_current_desktop = env.get('XDG_CURRENT_DESKTOP', None)
|
|
|
|
desktop_session = env.get('DESKTOP_SESSION', None)
|
|
|
|
if xdg_current_desktop is not None:
|
2024-06-23 01:25:16 +02:00
|
|
|
for part in map(str.strip, xdg_current_desktop.split(':')):
|
|
|
|
if part == 'Unity':
|
|
|
|
if desktop_session is not None and 'gnome-fallback' in desktop_session:
|
|
|
|
return _LinuxDesktopEnvironment.GNOME
|
|
|
|
else:
|
|
|
|
return _LinuxDesktopEnvironment.UNITY
|
|
|
|
elif part == 'Deepin':
|
|
|
|
return _LinuxDesktopEnvironment.DEEPIN
|
|
|
|
elif part == 'GNOME':
|
2021-12-27 02:28:44 +01:00
|
|
|
return _LinuxDesktopEnvironment.GNOME
|
2024-06-23 01:25:16 +02:00
|
|
|
elif part == 'X-Cinnamon':
|
|
|
|
return _LinuxDesktopEnvironment.CINNAMON
|
|
|
|
elif part == 'KDE':
|
|
|
|
kde_version = env.get('KDE_SESSION_VERSION', None)
|
|
|
|
if kde_version == '5':
|
|
|
|
return _LinuxDesktopEnvironment.KDE5
|
|
|
|
elif kde_version == '6':
|
|
|
|
return _LinuxDesktopEnvironment.KDE6
|
|
|
|
elif kde_version == '4':
|
|
|
|
return _LinuxDesktopEnvironment.KDE4
|
|
|
|
else:
|
|
|
|
logger.info(f'unknown KDE version: "{kde_version}". Assuming KDE4')
|
|
|
|
return _LinuxDesktopEnvironment.KDE4
|
|
|
|
elif part == 'Pantheon':
|
|
|
|
return _LinuxDesktopEnvironment.PANTHEON
|
|
|
|
elif part == 'XFCE':
|
|
|
|
return _LinuxDesktopEnvironment.XFCE
|
|
|
|
elif part == 'UKUI':
|
|
|
|
return _LinuxDesktopEnvironment.UKUI
|
|
|
|
elif part == 'LXQt':
|
|
|
|
return _LinuxDesktopEnvironment.LXQT
|
|
|
|
logger.info(f'XDG_CURRENT_DESKTOP is set to an unknown value: "{xdg_current_desktop}"')
|
2023-05-29 15:51:35 +02:00
|
|
|
|
2021-12-27 02:28:44 +01:00
|
|
|
elif desktop_session is not None:
|
2023-05-29 15:51:35 +02:00
|
|
|
if desktop_session == 'deepin':
|
|
|
|
return _LinuxDesktopEnvironment.DEEPIN
|
|
|
|
elif desktop_session in ('mate', 'gnome'):
|
2021-12-27 02:28:44 +01:00
|
|
|
return _LinuxDesktopEnvironment.GNOME
|
2023-05-29 15:51:35 +02:00
|
|
|
elif desktop_session in ('kde4', 'kde-plasma'):
|
|
|
|
return _LinuxDesktopEnvironment.KDE4
|
|
|
|
elif desktop_session == 'kde':
|
|
|
|
if 'KDE_SESSION_VERSION' in env:
|
|
|
|
return _LinuxDesktopEnvironment.KDE4
|
|
|
|
else:
|
|
|
|
return _LinuxDesktopEnvironment.KDE3
|
|
|
|
elif 'xfce' in desktop_session or desktop_session == 'xubuntu':
|
2021-12-27 02:28:44 +01:00
|
|
|
return _LinuxDesktopEnvironment.XFCE
|
2023-05-29 15:51:35 +02:00
|
|
|
elif desktop_session == 'ukui':
|
|
|
|
return _LinuxDesktopEnvironment.UKUI
|
|
|
|
else:
|
|
|
|
logger.info(f'DESKTOP_SESSION is set to an unknown value: "{desktop_session}"')
|
|
|
|
|
2021-12-27 02:28:44 +01:00
|
|
|
else:
|
|
|
|
if 'GNOME_DESKTOP_SESSION_ID' in env:
|
|
|
|
return _LinuxDesktopEnvironment.GNOME
|
|
|
|
elif 'KDE_FULL_SESSION' in env:
|
2023-05-29 15:51:35 +02:00
|
|
|
if 'KDE_SESSION_VERSION' in env:
|
|
|
|
return _LinuxDesktopEnvironment.KDE4
|
|
|
|
else:
|
|
|
|
return _LinuxDesktopEnvironment.KDE3
|
2022-01-24 18:22:04 +01:00
|
|
|
return _LinuxDesktopEnvironment.OTHER
|
2021-12-27 02:28:44 +01:00
|
|
|
|
|
|
|
|
|
|
|
def _choose_linux_keyring(logger):
|
|
|
|
"""
|
2023-05-29 15:51:35 +02:00
|
|
|
SelectBackend in [1]
|
|
|
|
|
|
|
|
There is currently support for forcing chromium to use BASIC_TEXT by creating a file called
|
|
|
|
`Disable Local Encryption` [1] in the user data dir. The function to write this file (`WriteBackendUse()` [1])
|
|
|
|
does not appear to be called anywhere other than in tests, so the user would have to create this file manually
|
|
|
|
and so would be aware enough to tell yt-dlp to use the BASIC_TEXT keyring.
|
|
|
|
|
|
|
|
References:
|
|
|
|
- [1] https://chromium.googlesource.com/chromium/src/+/refs/heads/main/components/os_crypt/sync/key_storage_util_linux.cc
|
2021-12-27 02:28:44 +01:00
|
|
|
"""
|
2023-05-29 15:51:35 +02:00
|
|
|
desktop_environment = _get_linux_desktop_environment(os.environ, logger)
|
2022-04-11 17:10:28 +02:00
|
|
|
logger.debug(f'detected desktop environment: {desktop_environment.name}')
|
2023-05-29 15:51:35 +02:00
|
|
|
if desktop_environment == _LinuxDesktopEnvironment.KDE4:
|
2023-06-19 11:15:59 +02:00
|
|
|
linux_keyring = _LinuxKeyring.KWALLET
|
2023-05-29 15:51:35 +02:00
|
|
|
elif desktop_environment == _LinuxDesktopEnvironment.KDE5:
|
|
|
|
linux_keyring = _LinuxKeyring.KWALLET5
|
|
|
|
elif desktop_environment == _LinuxDesktopEnvironment.KDE6:
|
|
|
|
linux_keyring = _LinuxKeyring.KWALLET6
|
|
|
|
elif desktop_environment in (
|
2024-06-12 01:09:58 +02:00
|
|
|
_LinuxDesktopEnvironment.KDE3, _LinuxDesktopEnvironment.LXQT, _LinuxDesktopEnvironment.OTHER,
|
2023-05-29 15:51:35 +02:00
|
|
|
):
|
2023-06-19 11:15:59 +02:00
|
|
|
linux_keyring = _LinuxKeyring.BASICTEXT
|
2021-07-21 22:32:49 +02:00
|
|
|
else:
|
2023-06-19 11:15:59 +02:00
|
|
|
linux_keyring = _LinuxKeyring.GNOMEKEYRING
|
2021-12-27 02:28:44 +01:00
|
|
|
return linux_keyring
|
|
|
|
|
|
|
|
|
2023-05-29 15:51:35 +02:00
|
|
|
def _get_kwallet_network_wallet(keyring, logger):
|
2021-12-27 02:28:44 +01:00
|
|
|
""" The name of the wallet used to store network passwords.
|
|
|
|
|
2023-05-29 15:51:35 +02:00
|
|
|
https://chromium.googlesource.com/chromium/src/+/refs/heads/main/components/os_crypt/sync/kwallet_dbus.cc
|
2021-12-27 02:28:44 +01:00
|
|
|
KWalletDBus::NetworkWallet
|
|
|
|
which does a dbus call to the following function:
|
|
|
|
https://api.kde.org/frameworks/kwallet/html/classKWallet_1_1Wallet.html
|
|
|
|
Wallet::NetworkWallet
|
|
|
|
"""
|
|
|
|
default_wallet = 'kdewallet'
|
|
|
|
try:
|
2023-06-19 11:15:59 +02:00
|
|
|
if keyring == _LinuxKeyring.KWALLET:
|
2023-05-29 15:51:35 +02:00
|
|
|
service_name = 'org.kde.kwalletd'
|
|
|
|
wallet_path = '/modules/kwalletd'
|
|
|
|
elif keyring == _LinuxKeyring.KWALLET5:
|
|
|
|
service_name = 'org.kde.kwalletd5'
|
|
|
|
wallet_path = '/modules/kwalletd5'
|
|
|
|
elif keyring == _LinuxKeyring.KWALLET6:
|
|
|
|
service_name = 'org.kde.kwalletd6'
|
|
|
|
wallet_path = '/modules/kwalletd6'
|
|
|
|
else:
|
|
|
|
raise ValueError(keyring)
|
|
|
|
|
2022-06-15 22:55:43 +02:00
|
|
|
stdout, _, returncode = Popen.run([
|
2021-12-27 02:28:44 +01:00
|
|
|
'dbus-send', '--session', '--print-reply=literal',
|
2023-05-29 15:51:35 +02:00
|
|
|
f'--dest={service_name}',
|
|
|
|
wallet_path,
|
2024-06-12 01:09:58 +02:00
|
|
|
'org.kde.KWallet.networkWallet',
|
2022-06-15 22:55:43 +02:00
|
|
|
], text=True, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL)
|
2021-12-27 02:28:44 +01:00
|
|
|
|
2022-06-15 22:55:43 +02:00
|
|
|
if returncode:
|
2021-12-27 02:28:44 +01:00
|
|
|
logger.warning('failed to read NetworkWallet')
|
|
|
|
return default_wallet
|
|
|
|
else:
|
2022-06-15 22:55:43 +02:00
|
|
|
logger.debug(f'NetworkWallet = "{stdout.strip()}"')
|
|
|
|
return stdout.strip()
|
2022-03-27 04:20:43 +02:00
|
|
|
except Exception as e:
|
2022-04-11 17:10:28 +02:00
|
|
|
logger.warning(f'exception while obtaining NetworkWallet: {e}')
|
2021-12-27 02:28:44 +01:00
|
|
|
return default_wallet
|
|
|
|
|
|
|
|
|
2023-05-29 15:51:35 +02:00
|
|
|
def _get_kwallet_password(browser_keyring_name, keyring, logger):
|
|
|
|
logger.debug(f'using kwallet-query to obtain password from {keyring.name}')
|
2021-12-27 02:28:44 +01:00
|
|
|
|
|
|
|
if shutil.which('kwallet-query') is None:
|
|
|
|
logger.error('kwallet-query command not found. KWallet and kwallet-query '
|
|
|
|
'must be installed to read from KWallet. kwallet-query should be'
|
|
|
|
'included in the kwallet package for your distribution')
|
|
|
|
return b''
|
|
|
|
|
2023-05-29 15:51:35 +02:00
|
|
|
network_wallet = _get_kwallet_network_wallet(keyring, logger)
|
2021-12-27 02:28:44 +01:00
|
|
|
|
|
|
|
try:
|
2022-06-15 22:55:43 +02:00
|
|
|
stdout, _, returncode = Popen.run([
|
2021-12-27 02:28:44 +01:00
|
|
|
'kwallet-query',
|
2022-04-11 17:10:28 +02:00
|
|
|
'--read-password', f'{browser_keyring_name} Safe Storage',
|
|
|
|
'--folder', f'{browser_keyring_name} Keys',
|
2024-06-12 01:09:58 +02:00
|
|
|
network_wallet,
|
2021-12-27 02:28:44 +01:00
|
|
|
], stdout=subprocess.PIPE, stderr=subprocess.DEVNULL)
|
|
|
|
|
2022-06-15 22:55:43 +02:00
|
|
|
if returncode:
|
|
|
|
logger.error(f'kwallet-query failed with return code {returncode}. '
|
|
|
|
'Please consult the kwallet-query man page for details')
|
2021-12-27 02:28:44 +01:00
|
|
|
return b''
|
|
|
|
else:
|
|
|
|
if stdout.lower().startswith(b'failed to read'):
|
|
|
|
logger.debug('failed to read password from kwallet. Using empty string instead')
|
|
|
|
# this sometimes occurs in KDE because chrome does not check hasEntry and instead
|
|
|
|
# just tries to read the value (which kwallet returns "") whereas kwallet-query
|
|
|
|
# checks hasEntry. To verify this:
|
|
|
|
# dbus-monitor "interface='org.kde.KWallet'" "type=method_return"
|
|
|
|
# while starting chrome.
|
2023-05-29 15:51:35 +02:00
|
|
|
# this was identified as a bug later and fixed in
|
|
|
|
# https://chromium.googlesource.com/chromium/src/+/bbd54702284caca1f92d656fdcadf2ccca6f4165%5E%21/#F0
|
|
|
|
# https://chromium.googlesource.com/chromium/src/+/5463af3c39d7f5b6d11db7fbd51e38cc1974d764
|
2021-12-27 02:28:44 +01:00
|
|
|
return b''
|
|
|
|
else:
|
|
|
|
logger.debug('password found')
|
2022-06-15 22:55:43 +02:00
|
|
|
return stdout.rstrip(b'\n')
|
2022-03-27 04:20:43 +02:00
|
|
|
except Exception as e:
|
|
|
|
logger.warning(f'exception running kwallet-query: {error_to_str(e)}')
|
2021-12-27 02:28:44 +01:00
|
|
|
return b''
|
|
|
|
|
|
|
|
|
|
|
|
def _get_gnome_keyring_password(browser_keyring_name, logger):
|
2022-04-20 21:05:57 +02:00
|
|
|
if not secretstorage:
|
|
|
|
logger.error(f'secretstorage not available {_SECRETSTORAGE_UNAVAILABLE_REASON}')
|
2021-12-27 02:28:44 +01:00
|
|
|
return b''
|
|
|
|
# the Gnome keyring does not seem to organise keys in the same way as KWallet,
|
|
|
|
# using `dbus-monitor` during startup, it can be observed that chromium lists all keys
|
|
|
|
# and presumably searches for its key in the list. It appears that we must do the same.
|
|
|
|
# https://github.com/jaraco/keyring/issues/556
|
|
|
|
with contextlib.closing(secretstorage.dbus_init()) as con:
|
|
|
|
col = secretstorage.get_default_collection(con)
|
|
|
|
for item in col.get_all_items():
|
2022-04-11 17:10:28 +02:00
|
|
|
if item.get_label() == f'{browser_keyring_name} Safe Storage':
|
2021-12-27 02:28:44 +01:00
|
|
|
return item.get_secret()
|
2024-06-12 01:09:58 +02:00
|
|
|
logger.error('failed to read from keyring')
|
|
|
|
return b''
|
2021-12-27 02:28:44 +01:00
|
|
|
|
|
|
|
|
|
|
|
def _get_linux_keyring_password(browser_keyring_name, keyring, logger):
|
|
|
|
# note: chrome/chromium can be run with the following flags to determine which keyring backend
|
|
|
|
# it has chosen to use
|
|
|
|
# chromium --enable-logging=stderr --v=1 2>&1 | grep key_storage_
|
|
|
|
# Chromium supports a flag: --password-store=<basic|gnome|kwallet> so the automatic detection
|
|
|
|
# will not be sufficient in all cases.
|
|
|
|
|
2021-12-30 03:45:48 +01:00
|
|
|
keyring = _LinuxKeyring[keyring] if keyring else _choose_linux_keyring(logger)
|
2021-12-27 02:28:44 +01:00
|
|
|
logger.debug(f'Chosen keyring: {keyring.name}')
|
|
|
|
|
2023-06-19 11:15:59 +02:00
|
|
|
if keyring in (_LinuxKeyring.KWALLET, _LinuxKeyring.KWALLET5, _LinuxKeyring.KWALLET6):
|
2023-05-29 15:51:35 +02:00
|
|
|
return _get_kwallet_password(browser_keyring_name, keyring, logger)
|
2023-06-19 11:15:59 +02:00
|
|
|
elif keyring == _LinuxKeyring.GNOMEKEYRING:
|
2021-12-27 02:28:44 +01:00
|
|
|
return _get_gnome_keyring_password(browser_keyring_name, logger)
|
2023-06-19 11:15:59 +02:00
|
|
|
elif keyring == _LinuxKeyring.BASICTEXT:
|
2021-12-27 02:28:44 +01:00
|
|
|
# when basic text is chosen, all cookies are stored as v10 (so no keyring password is required)
|
|
|
|
return None
|
|
|
|
assert False, f'Unknown keyring {keyring}'
|
|
|
|
|
|
|
|
|
|
|
|
def _get_mac_keyring_password(browser_keyring_name, logger):
|
|
|
|
logger.debug('using find-generic-password to obtain password from OSX keychain')
|
|
|
|
try:
|
2022-09-25 23:23:08 +02:00
|
|
|
stdout, _, returncode = Popen.run(
|
2021-10-20 18:19:40 +02:00
|
|
|
['security', 'find-generic-password',
|
|
|
|
'-w', # write password to stdout
|
|
|
|
'-a', browser_keyring_name, # match 'account'
|
2022-04-11 17:10:28 +02:00
|
|
|
'-s', f'{browser_keyring_name} Safe Storage'], # match 'service'
|
2021-10-20 18:19:40 +02:00
|
|
|
stdout=subprocess.PIPE, stderr=subprocess.DEVNULL)
|
2022-09-25 23:23:08 +02:00
|
|
|
if returncode:
|
|
|
|
logger.warning('find-generic-password failed')
|
|
|
|
return None
|
2022-06-15 22:55:43 +02:00
|
|
|
return stdout.rstrip(b'\n')
|
2022-03-27 04:20:43 +02:00
|
|
|
except Exception as e:
|
|
|
|
logger.warning(f'exception running find-generic-password: {error_to_str(e)}')
|
2021-12-27 02:28:44 +01:00
|
|
|
return None
|
2021-07-21 22:32:49 +02:00
|
|
|
|
|
|
|
|
|
|
|
def _get_windows_v10_key(browser_root, logger):
|
2023-05-29 15:51:35 +02:00
|
|
|
"""
|
|
|
|
References:
|
|
|
|
- [1] https://chromium.googlesource.com/chromium/src/+/refs/heads/main/components/os_crypt/sync/os_crypt_win.cc
|
|
|
|
"""
|
2024-01-31 09:43:52 +01:00
|
|
|
path = _newest(_find_files(browser_root, 'Local State', logger))
|
2021-07-21 22:32:49 +02:00
|
|
|
if path is None:
|
|
|
|
logger.error('could not find local state file')
|
|
|
|
return None
|
2022-04-09 21:31:48 +02:00
|
|
|
logger.debug(f'Found local state file at "{path}"')
|
2022-04-11 17:10:28 +02:00
|
|
|
with open(path, encoding='utf8') as f:
|
2021-07-21 22:32:49 +02:00
|
|
|
data = json.load(f)
|
|
|
|
try:
|
2023-05-29 15:51:35 +02:00
|
|
|
# kOsCryptEncryptedKeyPrefName in [1]
|
2021-07-21 22:32:49 +02:00
|
|
|
base64_key = data['os_crypt']['encrypted_key']
|
|
|
|
except KeyError:
|
|
|
|
logger.error('no encrypted key in Local State')
|
|
|
|
return None
|
2022-06-24 12:54:43 +02:00
|
|
|
encrypted_key = base64.b64decode(base64_key)
|
2023-05-29 15:51:35 +02:00
|
|
|
# kDPAPIKeyPrefix in [1]
|
2021-07-21 22:32:49 +02:00
|
|
|
prefix = b'DPAPI'
|
|
|
|
if not encrypted_key.startswith(prefix):
|
|
|
|
logger.error('invalid key')
|
|
|
|
return None
|
|
|
|
return _decrypt_windows_dpapi(encrypted_key[len(prefix):], logger)
|
|
|
|
|
|
|
|
|
|
|
|
def pbkdf2_sha1(password, salt, iterations, key_length):
|
2024-07-02 00:51:27 +02:00
|
|
|
return hashlib.pbkdf2_hmac('sha1', password, salt, iterations, key_length)
|
2021-07-21 22:32:49 +02:00
|
|
|
|
|
|
|
|
2023-05-29 15:51:35 +02:00
|
|
|
def _decrypt_aes_cbc_multi(ciphertext, keys, logger, initialization_vector=b' ' * 16):
|
|
|
|
for key in keys:
|
|
|
|
plaintext = unpad_pkcs7(aes_cbc_decrypt_bytes(ciphertext, key, initialization_vector))
|
|
|
|
try:
|
|
|
|
return plaintext.decode()
|
|
|
|
except UnicodeDecodeError:
|
|
|
|
pass
|
|
|
|
logger.warning('failed to decrypt cookie (AES-CBC) because UTF-8 decoding failed. Possibly the key is wrong?', only_once=True)
|
|
|
|
return None
|
2021-07-21 22:32:49 +02:00
|
|
|
|
|
|
|
|
|
|
|
def _decrypt_aes_gcm(ciphertext, key, nonce, authentication_tag, logger):
|
|
|
|
try:
|
2021-09-19 14:22:31 +02:00
|
|
|
plaintext = aes_gcm_decrypt_and_verify_bytes(ciphertext, key, authentication_tag, nonce)
|
2021-07-21 22:32:49 +02:00
|
|
|
except ValueError:
|
2021-09-25 17:34:16 +02:00
|
|
|
logger.warning('failed to decrypt cookie (AES-GCM) because the MAC check failed. Possibly the key is wrong?', only_once=True)
|
2021-07-21 22:32:49 +02:00
|
|
|
return None
|
|
|
|
|
|
|
|
try:
|
2022-05-09 13:54:28 +02:00
|
|
|
return plaintext.decode()
|
2021-07-21 22:32:49 +02:00
|
|
|
except UnicodeDecodeError:
|
2021-09-25 17:34:16 +02:00
|
|
|
logger.warning('failed to decrypt cookie (AES-GCM) because UTF-8 decoding failed. Possibly the key is wrong?', only_once=True)
|
2021-07-21 22:32:49 +02:00
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
def _decrypt_windows_dpapi(ciphertext, logger):
|
|
|
|
"""
|
|
|
|
References:
|
|
|
|
- https://docs.microsoft.com/en-us/windows/win32/api/dpapi/nf-dpapi-cryptunprotectdata
|
|
|
|
"""
|
2022-08-03 14:17:38 +02:00
|
|
|
|
|
|
|
import ctypes
|
|
|
|
import ctypes.wintypes
|
2021-07-21 22:32:49 +02:00
|
|
|
|
|
|
|
class DATA_BLOB(ctypes.Structure):
|
2022-08-03 14:17:38 +02:00
|
|
|
_fields_ = [('cbData', ctypes.wintypes.DWORD),
|
2021-07-21 22:32:49 +02:00
|
|
|
('pbData', ctypes.POINTER(ctypes.c_char))]
|
|
|
|
|
|
|
|
buffer = ctypes.create_string_buffer(ciphertext)
|
|
|
|
blob_in = DATA_BLOB(ctypes.sizeof(buffer), buffer)
|
|
|
|
blob_out = DATA_BLOB()
|
|
|
|
ret = ctypes.windll.crypt32.CryptUnprotectData(
|
|
|
|
ctypes.byref(blob_in), # pDataIn
|
|
|
|
None, # ppszDataDescr: human readable description of pDataIn
|
|
|
|
None, # pOptionalEntropy: salt?
|
|
|
|
None, # pvReserved: must be NULL
|
|
|
|
None, # pPromptStruct: information about prompts to display
|
|
|
|
0, # dwFlags
|
2024-06-12 01:09:58 +02:00
|
|
|
ctypes.byref(blob_out), # pDataOut
|
2021-07-21 22:32:49 +02:00
|
|
|
)
|
|
|
|
if not ret:
|
2024-09-25 23:13:54 +02:00
|
|
|
message = 'Failed to decrypt with DPAPI. See https://github.com/yt-dlp/yt-dlp/issues/10927 for more info'
|
|
|
|
logger.error(message)
|
|
|
|
raise DownloadError(message) # force exit
|
2021-07-21 22:32:49 +02:00
|
|
|
|
|
|
|
result = ctypes.string_at(blob_out.pbData, blob_out.cbData)
|
|
|
|
ctypes.windll.kernel32.LocalFree(blob_out.pbData)
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
|
|
|
def _config_home():
|
|
|
|
return os.environ.get('XDG_CONFIG_HOME', os.path.expanduser('~/.config'))
|
|
|
|
|
|
|
|
|
|
|
|
def _open_database_copy(database_path, tmpdir):
|
|
|
|
# cannot open sqlite databases if they are already in use (e.g. by the browser)
|
|
|
|
database_copy_path = os.path.join(tmpdir, 'temporary.sqlite')
|
|
|
|
shutil.copy(database_path, database_copy_path)
|
|
|
|
conn = sqlite3.connect(database_copy_path)
|
|
|
|
return conn.cursor()
|
|
|
|
|
|
|
|
|
|
|
|
def _get_column_names(cursor, table_name):
|
2022-04-11 17:10:28 +02:00
|
|
|
table_info = cursor.execute(f'PRAGMA table_info({table_name})').fetchall()
|
2022-05-09 13:54:28 +02:00
|
|
|
return [row[1].decode() for row in table_info]
|
2021-07-21 22:32:49 +02:00
|
|
|
|
|
|
|
|
2024-01-31 09:43:52 +01:00
|
|
|
def _newest(files):
|
|
|
|
return max(files, key=lambda path: os.lstat(path).st_mtime, default=None)
|
|
|
|
|
|
|
|
|
|
|
|
def _find_files(root, filename, logger):
|
2021-07-21 22:32:49 +02:00
|
|
|
# if there are multiple browser profiles, take the most recently used one
|
2024-01-31 09:43:52 +01:00
|
|
|
i = 0
|
2022-04-09 21:31:48 +02:00
|
|
|
with _create_progress_bar(logger) as progress_bar:
|
2024-01-31 09:43:52 +01:00
|
|
|
for curr_root, _, files in os.walk(root):
|
2022-04-09 21:31:48 +02:00
|
|
|
for file in files:
|
|
|
|
i += 1
|
|
|
|
progress_bar.print(f'Searching for "{filename}": {i: 6d} files searched')
|
|
|
|
if file == filename:
|
2024-01-31 09:43:52 +01:00
|
|
|
yield os.path.join(curr_root, file)
|
2021-07-21 22:32:49 +02:00
|
|
|
|
|
|
|
|
|
|
|
def _merge_cookie_jars(jars):
|
|
|
|
output_jar = YoutubeDLCookieJar()
|
|
|
|
for jar in jars:
|
|
|
|
for cookie in jar:
|
|
|
|
output_jar.set_cookie(cookie)
|
|
|
|
if jar.filename is not None:
|
|
|
|
output_jar.filename = jar.filename
|
|
|
|
return output_jar
|
|
|
|
|
|
|
|
|
|
|
|
def _is_path(value):
|
2024-01-31 09:43:52 +01:00
|
|
|
return any(sep in value for sep in (os.path.sep, os.path.altsep) if sep)
|
2021-07-21 22:32:49 +02:00
|
|
|
|
|
|
|
|
2022-08-30 18:54:46 +02:00
|
|
|
def _parse_browser_specification(browser_name, profile=None, keyring=None, container=None):
|
2021-07-21 22:32:49 +02:00
|
|
|
if browser_name not in SUPPORTED_BROWSERS:
|
|
|
|
raise ValueError(f'unsupported browser: "{browser_name}"')
|
2021-12-27 02:28:44 +01:00
|
|
|
if keyring not in (None, *SUPPORTED_KEYRINGS):
|
|
|
|
raise ValueError(f'unsupported keyring: "{keyring}"')
|
2022-09-17 06:44:44 +02:00
|
|
|
if profile is not None and _is_path(expand_path(profile)):
|
|
|
|
profile = expand_path(profile)
|
2022-08-30 18:54:46 +02:00
|
|
|
return browser_name, profile, keyring, container
|
2022-09-16 19:02:00 +02:00
|
|
|
|
|
|
|
|
|
|
|
class LenientSimpleCookie(http.cookies.SimpleCookie):
|
|
|
|
"""More lenient version of http.cookies.SimpleCookie"""
|
|
|
|
# From https://github.com/python/cpython/blob/v3.10.7/Lib/http/cookies.py
|
2022-10-11 05:39:12 +02:00
|
|
|
# We use Morsel's legal key chars to avoid errors on setting values
|
|
|
|
_LEGAL_KEY_CHARS = r'\w\d' + re.escape('!#$%&\'*+-.:^_`|~')
|
|
|
|
_LEGAL_VALUE_CHARS = _LEGAL_KEY_CHARS + re.escape('(),/<=>?@[]{}')
|
2022-09-16 19:02:00 +02:00
|
|
|
|
|
|
|
_RESERVED = {
|
2024-06-12 01:09:58 +02:00
|
|
|
'expires',
|
|
|
|
'path',
|
|
|
|
'comment',
|
|
|
|
'domain',
|
|
|
|
'max-age',
|
|
|
|
'secure',
|
|
|
|
'httponly',
|
|
|
|
'version',
|
|
|
|
'samesite',
|
2022-09-16 19:02:00 +02:00
|
|
|
}
|
|
|
|
|
2024-06-12 01:09:58 +02:00
|
|
|
_FLAGS = {'secure', 'httponly'}
|
2022-09-16 19:02:00 +02:00
|
|
|
|
|
|
|
# Added 'bad' group to catch the remaining value
|
2024-06-12 01:09:58 +02:00
|
|
|
_COOKIE_PATTERN = re.compile(r'''
|
2022-09-16 19:02:00 +02:00
|
|
|
\s* # Optional whitespace at start of cookie
|
|
|
|
(?P<key> # Start of group 'key'
|
2024-06-12 01:09:58 +02:00
|
|
|
[''' + _LEGAL_KEY_CHARS + r''']+?# Any word of at least one letter
|
2022-09-16 19:02:00 +02:00
|
|
|
) # End of group 'key'
|
|
|
|
( # Optional group: there may not be a value.
|
|
|
|
\s*=\s* # Equal Sign
|
|
|
|
( # Start of potential value
|
|
|
|
(?P<val> # Start of group 'val'
|
|
|
|
"(?:[^\\"]|\\.)*" # Any doublequoted string
|
|
|
|
| # or
|
|
|
|
\w{3},\s[\w\d\s-]{9,11}\s[\d:]{8}\sGMT # Special case for "expires" attr
|
|
|
|
| # or
|
2024-06-12 01:09:58 +02:00
|
|
|
[''' + _LEGAL_VALUE_CHARS + r''']* # Any word or empty string
|
2022-09-16 19:02:00 +02:00
|
|
|
) # End of group 'val'
|
|
|
|
| # or
|
|
|
|
(?P<bad>(?:\\;|[^;])*?) # 'bad' group fallback for invalid values
|
|
|
|
) # End of potential value
|
|
|
|
)? # End of optional value group
|
|
|
|
\s* # Any number of spaces.
|
|
|
|
(\s+|;|$) # Ending either at space, semicolon, or EOS.
|
2024-06-12 01:09:58 +02:00
|
|
|
''', re.ASCII | re.VERBOSE)
|
2022-09-16 19:02:00 +02:00
|
|
|
|
|
|
|
def load(self, data):
|
|
|
|
# Workaround for https://github.com/yt-dlp/yt-dlp/issues/4776
|
|
|
|
if not isinstance(data, str):
|
|
|
|
return super().load(data)
|
|
|
|
|
|
|
|
morsel = None
|
2022-10-11 05:39:12 +02:00
|
|
|
for match in self._COOKIE_PATTERN.finditer(data):
|
|
|
|
if match.group('bad'):
|
2022-09-16 19:02:00 +02:00
|
|
|
morsel = None
|
|
|
|
continue
|
|
|
|
|
2022-10-11 05:39:12 +02:00
|
|
|
key, value = match.group('key', 'val')
|
2022-09-16 19:02:00 +02:00
|
|
|
|
2022-10-11 05:39:12 +02:00
|
|
|
is_attribute = False
|
|
|
|
if key.startswith('$'):
|
|
|
|
key = key[1:]
|
|
|
|
is_attribute = True
|
2022-09-16 19:02:00 +02:00
|
|
|
|
|
|
|
lower_key = key.lower()
|
|
|
|
if lower_key in self._RESERVED:
|
|
|
|
if morsel is None:
|
|
|
|
continue
|
|
|
|
|
|
|
|
if value is None:
|
|
|
|
if lower_key not in self._FLAGS:
|
|
|
|
morsel = None
|
|
|
|
continue
|
|
|
|
value = True
|
|
|
|
else:
|
|
|
|
value, _ = self.value_decode(value)
|
|
|
|
|
|
|
|
morsel[key] = value
|
|
|
|
|
2022-10-11 05:39:12 +02:00
|
|
|
elif is_attribute:
|
|
|
|
morsel = None
|
|
|
|
|
2022-09-16 19:02:00 +02:00
|
|
|
elif value is not None:
|
|
|
|
morsel = self.get(key, http.cookies.Morsel())
|
|
|
|
real_value, coded_value = self.value_decode(value)
|
|
|
|
morsel.set(key, real_value, coded_value)
|
|
|
|
self[key] = morsel
|
|
|
|
|
|
|
|
else:
|
|
|
|
morsel = None
|
2023-05-27 09:08:19 +02:00
|
|
|
|
|
|
|
|
|
|
|
class YoutubeDLCookieJar(http.cookiejar.MozillaCookieJar):
|
|
|
|
"""
|
|
|
|
See [1] for cookie file format.
|
|
|
|
|
|
|
|
1. https://curl.haxx.se/docs/http-cookies.html
|
|
|
|
"""
|
|
|
|
_HTTPONLY_PREFIX = '#HttpOnly_'
|
|
|
|
_ENTRY_LEN = 7
|
|
|
|
_HEADER = '''# Netscape HTTP Cookie File
|
|
|
|
# This file is generated by yt-dlp. Do not edit.
|
|
|
|
|
|
|
|
'''
|
|
|
|
_CookieFileEntry = collections.namedtuple(
|
|
|
|
'CookieFileEntry',
|
|
|
|
('domain_name', 'include_subdomains', 'path', 'https_only', 'expires_at', 'name', 'value'))
|
|
|
|
|
|
|
|
def __init__(self, filename=None, *args, **kwargs):
|
|
|
|
super().__init__(None, *args, **kwargs)
|
|
|
|
if is_path_like(filename):
|
|
|
|
filename = os.fspath(filename)
|
|
|
|
self.filename = filename
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def _true_or_false(cndn):
|
|
|
|
return 'TRUE' if cndn else 'FALSE'
|
|
|
|
|
|
|
|
@contextlib.contextmanager
|
|
|
|
def open(self, file, *, write=False):
|
|
|
|
if is_path_like(file):
|
|
|
|
with open(file, 'w' if write else 'r', encoding='utf-8') as f:
|
|
|
|
yield f
|
|
|
|
else:
|
|
|
|
if write:
|
|
|
|
file.truncate(0)
|
|
|
|
yield file
|
|
|
|
|
2023-07-22 05:38:12 +02:00
|
|
|
def _really_save(self, f, ignore_discard, ignore_expires):
|
2023-05-27 09:08:19 +02:00
|
|
|
now = time.time()
|
|
|
|
for cookie in self:
|
|
|
|
if (not ignore_discard and cookie.discard
|
|
|
|
or not ignore_expires and cookie.is_expired(now)):
|
|
|
|
continue
|
|
|
|
name, value = cookie.name, cookie.value
|
|
|
|
if value is None:
|
|
|
|
# cookies.txt regards 'Set-Cookie: foo' as a cookie
|
|
|
|
# with no name, whereas http.cookiejar regards it as a
|
|
|
|
# cookie with no value.
|
|
|
|
name, value = '', name
|
2024-06-12 01:09:58 +02:00
|
|
|
f.write('{}\n'.format('\t'.join((
|
2023-05-27 09:08:19 +02:00
|
|
|
cookie.domain,
|
|
|
|
self._true_or_false(cookie.domain.startswith('.')),
|
|
|
|
cookie.path,
|
|
|
|
self._true_or_false(cookie.secure),
|
|
|
|
str_or_none(cookie.expires, default=''),
|
2024-06-12 01:09:58 +02:00
|
|
|
name, value,
|
|
|
|
))))
|
2023-05-27 09:08:19 +02:00
|
|
|
|
2023-07-22 05:38:12 +02:00
|
|
|
def save(self, filename=None, ignore_discard=True, ignore_expires=True):
|
2023-05-27 09:08:19 +02:00
|
|
|
"""
|
|
|
|
Save cookies to a file.
|
|
|
|
Code is taken from CPython 3.6
|
|
|
|
https://github.com/python/cpython/blob/8d999cbf4adea053be6dbb612b9844635c4dfb8e/Lib/http/cookiejar.py#L2091-L2117 """
|
|
|
|
|
|
|
|
if filename is None:
|
|
|
|
if self.filename is not None:
|
|
|
|
filename = self.filename
|
|
|
|
else:
|
|
|
|
raise ValueError(http.cookiejar.MISSING_FILENAME_TEXT)
|
|
|
|
|
|
|
|
# Store session cookies with `expires` set to 0 instead of an empty string
|
|
|
|
for cookie in self:
|
|
|
|
if cookie.expires is None:
|
|
|
|
cookie.expires = 0
|
|
|
|
|
|
|
|
with self.open(filename, write=True) as f:
|
|
|
|
f.write(self._HEADER)
|
2023-07-22 05:38:12 +02:00
|
|
|
self._really_save(f, ignore_discard, ignore_expires)
|
2023-05-27 09:08:19 +02:00
|
|
|
|
2023-07-22 05:38:12 +02:00
|
|
|
def load(self, filename=None, ignore_discard=True, ignore_expires=True):
|
2023-05-27 09:08:19 +02:00
|
|
|
"""Load cookies from a file."""
|
|
|
|
if filename is None:
|
|
|
|
if self.filename is not None:
|
|
|
|
filename = self.filename
|
|
|
|
else:
|
|
|
|
raise ValueError(http.cookiejar.MISSING_FILENAME_TEXT)
|
|
|
|
|
|
|
|
def prepare_line(line):
|
|
|
|
if line.startswith(self._HTTPONLY_PREFIX):
|
|
|
|
line = line[len(self._HTTPONLY_PREFIX):]
|
|
|
|
# comments and empty lines are fine
|
|
|
|
if line.startswith('#') or not line.strip():
|
|
|
|
return line
|
|
|
|
cookie_list = line.split('\t')
|
|
|
|
if len(cookie_list) != self._ENTRY_LEN:
|
2024-06-12 01:09:58 +02:00
|
|
|
raise http.cookiejar.LoadError(f'invalid length {len(cookie_list)}')
|
2023-05-27 09:08:19 +02:00
|
|
|
cookie = self._CookieFileEntry(*cookie_list)
|
|
|
|
if cookie.expires_at and not cookie.expires_at.isdigit():
|
2024-06-12 01:09:58 +02:00
|
|
|
raise http.cookiejar.LoadError(f'invalid expires at {cookie.expires_at}')
|
2023-05-27 09:08:19 +02:00
|
|
|
return line
|
|
|
|
|
|
|
|
cf = io.StringIO()
|
|
|
|
with self.open(filename) as f:
|
|
|
|
for line in f:
|
|
|
|
try:
|
|
|
|
cf.write(prepare_line(line))
|
|
|
|
except http.cookiejar.LoadError as e:
|
|
|
|
if f'{line.strip()} '[0] in '[{"':
|
|
|
|
raise http.cookiejar.LoadError(
|
|
|
|
'Cookies file must be Netscape formatted, not JSON. See '
|
|
|
|
'https://github.com/yt-dlp/yt-dlp/wiki/FAQ#how-do-i-pass-cookies-to-yt-dlp')
|
|
|
|
write_string(f'WARNING: skipping cookie file entry due to {e}: {line!r}\n')
|
|
|
|
continue
|
|
|
|
cf.seek(0)
|
|
|
|
self._really_load(cf, filename, ignore_discard, ignore_expires)
|
|
|
|
# Session cookies are denoted by either `expires` field set to
|
|
|
|
# an empty string or 0. MozillaCookieJar only recognizes the former
|
|
|
|
# (see [1]). So we need force the latter to be recognized as session
|
|
|
|
# cookies on our own.
|
|
|
|
# Session cookies may be important for cookies-based authentication,
|
|
|
|
# e.g. usually, when user does not check 'Remember me' check box while
|
|
|
|
# logging in on a site, some important cookies are stored as session
|
|
|
|
# cookies so that not recognizing them will result in failed login.
|
|
|
|
# 1. https://bugs.python.org/issue17164
|
|
|
|
for cookie in self:
|
|
|
|
# Treat `expires=0` cookies as session cookies
|
|
|
|
if cookie.expires == 0:
|
|
|
|
cookie.expires = None
|
|
|
|
cookie.discard = True
|
|
|
|
|
|
|
|
def get_cookie_header(self, url):
|
|
|
|
"""Generate a Cookie HTTP header for a given url"""
|
2023-07-29 00:40:20 +02:00
|
|
|
cookie_req = urllib.request.Request(normalize_url(sanitize_url(url)))
|
2023-05-27 09:08:19 +02:00
|
|
|
self.add_cookie_header(cookie_req)
|
|
|
|
return cookie_req.get_header('Cookie')
|
2023-06-21 05:51:20 +02:00
|
|
|
|
2023-07-05 22:16:28 +02:00
|
|
|
def get_cookies_for_url(self, url):
|
|
|
|
"""Generate a list of Cookie objects for a given url"""
|
|
|
|
# Policy `_now` attribute must be set before calling `_cookies_for_request`
|
|
|
|
# Ref: https://github.com/python/cpython/blob/3.7/Lib/http/cookiejar.py#L1360
|
|
|
|
self._policy._now = self._now = int(time.time())
|
2023-07-29 00:40:20 +02:00
|
|
|
return self._cookies_for_request(urllib.request.Request(normalize_url(sanitize_url(url))))
|
2023-07-05 22:16:28 +02:00
|
|
|
|
2023-06-21 05:51:20 +02:00
|
|
|
def clear(self, *args, **kwargs):
|
|
|
|
with contextlib.suppress(KeyError):
|
|
|
|
return super().clear(*args, **kwargs)
|