mirror of
https://github.com/yt-dlp/yt-dlp.git
synced 2026-04-08 01:47:45 +09:00
* Add `pin`, `pin-curl-cffi`, `pin-secretstorage` and `pin-deno` extras * Check in a `uv.lock` for devs * Add `devscripts/update_requirements.py` for dependency upgrades Authored by: bashonly, Grub4K Co-authored-by: Simon Sawicki <contact@grub4k.dev>
498 lines
16 KiB
Python
Executable File
498 lines
16 KiB
Python
Executable File
#!/usr/bin/env python3
|
|
from __future__ import annotations
|
|
|
|
# Allow direct execution
|
|
import os
|
|
import sys
|
|
|
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
|
|
import collections.abc
|
|
import dataclasses
|
|
import hashlib
|
|
import io
|
|
import pathlib
|
|
import re
|
|
import zipfile
|
|
|
|
from devscripts.tomlparse import parse_toml
|
|
from devscripts.utils import (
|
|
call_github_api,
|
|
request,
|
|
run_process,
|
|
zipf_files_and_folders,
|
|
)
|
|
|
|
|
|
BASE_PATH = pathlib.Path(__file__).parent.parent
|
|
PYPROJECT_PATH = BASE_PATH / 'pyproject.toml'
|
|
MAKEFILE_PATH = BASE_PATH / 'Makefile'
|
|
LOCKFILE_PATH = BASE_PATH / 'uv.lock'
|
|
REQUIREMENTS_PATH = BASE_PATH / 'bundle/requirements'
|
|
REQS_OUTPUT_TMPL = 'requirements-{}.txt'
|
|
CUSTOM_COMPILE_COMMAND = 'python -m devscripts.update_requirements'
|
|
|
|
EXTRAS_TABLE = 'project.optional-dependencies'
|
|
GROUPS_TABLE = 'dependency-groups'
|
|
|
|
PINNED_EXTRAS = {
|
|
'pin': 'default',
|
|
'pin-curl-cffi': 'curl-cffi',
|
|
'pin-secretstorage': 'secretstorage',
|
|
'pin-deno': 'deno',
|
|
}
|
|
|
|
EJS_ASSETS = {
|
|
'yt.solver.lib.js': False,
|
|
'yt.solver.lib.min.js': False,
|
|
'yt.solver.deno.lib.js': True,
|
|
'yt.solver.bun.lib.js': True,
|
|
'yt.solver.core.min.js': False,
|
|
'yt.solver.core.js': True,
|
|
}
|
|
|
|
EJS_TEMPLATE = '''\
|
|
# This file is generated by devscripts/update_requirements.py. DO NOT MODIFY!
|
|
|
|
VERSION = {version!r}
|
|
HASHES = {{
|
|
{hash_mapping}
|
|
}}
|
|
'''
|
|
|
|
|
|
@dataclasses.dataclass
|
|
class Target:
|
|
extras: list[str] = dataclasses.field(default_factory=list)
|
|
groups: list[str] = dataclasses.field(default_factory=list)
|
|
prune_packages: list[str] = dataclasses.field(default_factory=list)
|
|
omit_packages: list[str] = dataclasses.field(default_factory=list)
|
|
|
|
|
|
LINUX_TARGET = Target(
|
|
extras=['default', 'curl-cffi', 'secretstorage'],
|
|
groups=['pyinstaller'],
|
|
)
|
|
WIN64_TARGET = Target(
|
|
extras=['default', 'curl-cffi'],
|
|
)
|
|
|
|
BUNDLE_TARGETS = {
|
|
'linux-x86_64': LINUX_TARGET,
|
|
'linux-aarch64': LINUX_TARGET,
|
|
'linux-armv7l': LINUX_TARGET,
|
|
'musllinux-x86_64': LINUX_TARGET,
|
|
'musllinux-aarch64': LINUX_TARGET,
|
|
'win-x64': WIN64_TARGET,
|
|
'win-arm64': WIN64_TARGET,
|
|
'win-x86': Target(extras=['default']),
|
|
'macos': Target(
|
|
extras=['default', 'curl-cffi'],
|
|
# NB: Resolve delocate and PyInstaller together since they share dependencies
|
|
groups=['delocate', 'pyinstaller'],
|
|
# curl-cffi and cffi don't provide universal2 wheels, so only directly install their deps
|
|
omit_packages=['curl-cffi', 'cffi'],
|
|
),
|
|
# We fuse our own universal2 wheels for curl-cffi+cffi, so we need a separate requirements file
|
|
'macos-curl_cffi': Target(
|
|
extras=['curl-cffi'],
|
|
# Only need curl-cffi+cffi in this requirements file; their deps are installed directly
|
|
# XXX: Try to keep these in sync with curl-cffi's and cffi's transitive dependencies
|
|
prune_packages=['rich'],
|
|
omit_packages=['certifi', 'pycparser'],
|
|
),
|
|
}
|
|
|
|
PYINSTALLER_BUILDS_TARGETS = {
|
|
'win-x64-pyinstaller': 'win_amd64',
|
|
'win-x86-pyinstaller': 'win32',
|
|
'win-arm64-pyinstaller': 'win_arm64',
|
|
}
|
|
|
|
PYINSTALLER_BUILDS_URL = 'https://api.github.com/repos/yt-dlp/Pyinstaller-Builds/releases/latest'
|
|
|
|
PYINSTALLER_BUILDS_TMPL = '''\
|
|
{}pyinstaller @ {} \\
|
|
--hash={}
|
|
'''
|
|
|
|
PYINSTALLER_VERSION_RE = re.compile(r'pyinstaller-(?P<version>[0-9]+\.[0-9]+\.[0-9]+)-')
|
|
|
|
|
|
def generate_table_lines(
|
|
table_name: str,
|
|
table: dict[str, str | list[str | dict[str, str]]],
|
|
) -> collections.abc.Iterator[str]:
|
|
yield f'[{table_name}]\n'
|
|
for name, value in table.items():
|
|
assert isinstance(value, (str, list)), 'only string & array table values are supported'
|
|
|
|
if isinstance(value, str):
|
|
yield f'{name} = "{value}"\n'
|
|
continue
|
|
|
|
yield f'{name} = ['
|
|
if value:
|
|
yield '\n'
|
|
for element in value:
|
|
yield ' '
|
|
if isinstance(element, dict):
|
|
yield '{ ' + ', '.join(f'{k} = "{v}"' for k, v in element.items()) + ' }'
|
|
else:
|
|
yield f'"{element}"'
|
|
yield ',\n'
|
|
yield ']\n'
|
|
yield '\n'
|
|
|
|
|
|
def replace_table_in_pyproject(
|
|
pyproject_text: str,
|
|
table_name: str,
|
|
table: dict[str, str | list[str | dict[str, str]]],
|
|
) -> collections.abc.Iterator[str]:
|
|
INSIDE = 1
|
|
BEYOND = 2
|
|
|
|
state = 0
|
|
for line in pyproject_text.splitlines(True):
|
|
if state == INSIDE:
|
|
if line == '\n':
|
|
state = BEYOND
|
|
continue
|
|
if line != f'[{table_name}]\n' or state == BEYOND:
|
|
yield line
|
|
continue
|
|
yield from generate_table_lines(table_name, table)
|
|
state = INSIDE
|
|
|
|
|
|
def modify_and_write_pyproject(
|
|
pyproject_text: str,
|
|
table_name: str,
|
|
table: dict[str, str | list[str | dict[str, str]]],
|
|
) -> None:
|
|
with PYPROJECT_PATH.open(mode='w') as f:
|
|
f.writelines(replace_table_in_pyproject(pyproject_text, table_name, table))
|
|
|
|
|
|
@dataclasses.dataclass
|
|
class Dependency:
|
|
name: str
|
|
direct_reference: str | None
|
|
version: str | None
|
|
markers: str | None
|
|
|
|
|
|
def parse_dependency(line: str, comp_op: str = '==') -> Dependency:
|
|
line = line.rstrip().removesuffix('\\')
|
|
before, sep, after = map(str.strip, line.partition('@'))
|
|
name, _, version_and_markers = map(str.strip, before.partition(comp_op))
|
|
assertion_msg = f'unable to parse Dependency from line:\n {line}'
|
|
assert name, assertion_msg
|
|
|
|
if sep:
|
|
# Direct reference
|
|
version = version_and_markers
|
|
direct_reference, _, markers = map(str.strip, after.partition(';'))
|
|
assert direct_reference, assertion_msg
|
|
else:
|
|
# No direct reference
|
|
direct_reference = None
|
|
version, _, markers = map(str.strip, version_and_markers.partition(';'))
|
|
|
|
return Dependency(
|
|
name=name,
|
|
direct_reference=direct_reference,
|
|
version=version or None,
|
|
markers=markers or None)
|
|
|
|
|
|
def run_uv_export(
|
|
*,
|
|
extras: list[str] | None = None,
|
|
groups: list[str] | None = None,
|
|
prune_packages: list[str] | None = None,
|
|
omit_packages: list[str] | None = None,
|
|
bare: bool = False,
|
|
output_file: pathlib.Path | None = None,
|
|
) -> str:
|
|
return run_process(
|
|
'uv', 'export',
|
|
'--no-python-downloads',
|
|
'--quiet',
|
|
'--no-progress',
|
|
'--color=never',
|
|
'--format=requirements.txt',
|
|
'--frozen',
|
|
'--refresh',
|
|
'--no-emit-project',
|
|
'--no-default-groups',
|
|
*(f'--extra={extra}' for extra in (extras or [])),
|
|
*(f'--group={group}' for group in (groups or [])),
|
|
*(f'--prune={package}' for package in (prune_packages or [])),
|
|
*(f'--no-emit-package={package}' for package in (omit_packages or [])),
|
|
*(['--no-annotate', '--no-hashes', '--no-header'] if bare else []),
|
|
*([f'--output-file={output_file.relative_to(BASE_PATH)}'] if output_file else []),
|
|
).stdout
|
|
|
|
|
|
def run_pip_compile(
|
|
*args: str,
|
|
input_line: str,
|
|
output_file: pathlib.Path | None = None,
|
|
env: dict[str, str] | None = None,
|
|
) -> str:
|
|
return run_process(
|
|
'uv', 'pip', 'compile',
|
|
'--no-python-downloads',
|
|
'--quiet',
|
|
'--no-progress',
|
|
'--color=never',
|
|
'--format=requirements.txt',
|
|
'--refresh',
|
|
'--generate-hashes',
|
|
'--no-strip-markers',
|
|
f'--custom-compile-command={CUSTOM_COMPILE_COMMAND}',
|
|
'--universal',
|
|
*args,
|
|
*([f'--output-file={output_file.relative_to(BASE_PATH)}'] if output_file else []),
|
|
'-', # Read from stdin
|
|
input=f'{input_line}\n',
|
|
env=env,
|
|
).stdout
|
|
|
|
|
|
def makefile_variables(
|
|
prefix: str,
|
|
filetypes: list[str] | None = None,
|
|
*,
|
|
version: str | None = None,
|
|
name: str | None = None,
|
|
digest: str | None = None,
|
|
data: bytes | None = None,
|
|
keys_only: bool = False,
|
|
) -> dict[str, str | None]:
|
|
|
|
variables = {
|
|
f'{prefix}_VERSION': version,
|
|
f'{prefix}_WHEEL_NAME': name,
|
|
f'{prefix}_WHEEL_HASH': digest,
|
|
}
|
|
for ft in filetypes or []:
|
|
variables.update({
|
|
f'{prefix}_{ft.upper()}_FOLDERS': None,
|
|
f'{prefix}_{ft.upper()}_FILES': None,
|
|
})
|
|
|
|
if keys_only:
|
|
return variables
|
|
|
|
assert all(arg is not None for arg in (version, name, digest, not filetypes or data))
|
|
|
|
if filetypes:
|
|
with io.BytesIO(data) as buf, zipfile.ZipFile(buf) as zipf:
|
|
for ft in filetypes:
|
|
files, folders = zipf_files_and_folders(zipf, f'*.{ft.lower()}')
|
|
variables[f'{prefix}_{ft.upper()}_FOLDERS'] = ' '.join(folders)
|
|
variables[f'{prefix}_{ft.upper()}_FILES'] = ' '.join(files)
|
|
|
|
return variables
|
|
|
|
|
|
def ejs_makefile_variables(**kwargs) -> dict[str, str | None]:
|
|
return makefile_variables('EJS', filetypes=['PY', 'JS'], **kwargs)
|
|
|
|
|
|
def update_ejs(verify: bool = False):
|
|
PACKAGE_NAME = 'yt-dlp-ejs'
|
|
PREFIX = f' "{PACKAGE_NAME}=='
|
|
LIBRARY_NAME = PACKAGE_NAME.replace('-', '_')
|
|
PACKAGE_PATH = BASE_PATH / 'yt_dlp/extractor/youtube/jsc/_builtin/vendor'
|
|
RELEASE_URL = 'https://api.github.com/repos/yt-dlp/ejs/releases/latest'
|
|
|
|
current_version = None
|
|
with PYPROJECT_PATH.open() as file:
|
|
for line in file:
|
|
if not line.startswith(PREFIX):
|
|
continue
|
|
current_version, _, _ = line.removeprefix(PREFIX).partition('"')
|
|
|
|
if not current_version:
|
|
print(f'{PACKAGE_NAME} dependency line could not be found')
|
|
return
|
|
|
|
makefile_info = ejs_makefile_variables(keys_only=True)
|
|
prefixes = tuple(f'{key} = ' for key in makefile_info)
|
|
with MAKEFILE_PATH.open() as file:
|
|
for line in file:
|
|
if not line.startswith(prefixes):
|
|
continue
|
|
key, _, val = line.partition(' = ')
|
|
makefile_info[key] = val.rstrip()
|
|
|
|
info = call_github_api(RELEASE_URL)
|
|
version = info['tag_name']
|
|
if version == current_version:
|
|
print(f'{PACKAGE_NAME} is up to date! ({version})')
|
|
return
|
|
|
|
print(f'Updating {PACKAGE_NAME} from {current_version} to {version}')
|
|
hashes = []
|
|
wheel_info = {}
|
|
for asset in info['assets']:
|
|
name = asset['name']
|
|
digest = asset['digest']
|
|
|
|
is_wheel = name.startswith(f'{LIBRARY_NAME}-') and name.endswith('.whl')
|
|
if not is_wheel and name not in EJS_ASSETS:
|
|
continue
|
|
|
|
with request(asset['browser_download_url']) as resp:
|
|
data = resp.read()
|
|
|
|
# verify digest from github
|
|
algo, _, expected = digest.partition(':')
|
|
hexdigest = hashlib.new(algo, data).hexdigest()
|
|
assert hexdigest == expected, f'downloaded attest mismatch ({hexdigest!r} != {expected!r})'
|
|
|
|
if is_wheel:
|
|
wheel_info = ejs_makefile_variables(version=version, name=name, digest=digest, data=data)
|
|
continue
|
|
|
|
# calculate sha3-512 digest
|
|
asset_hash = hashlib.sha3_512(data).hexdigest()
|
|
hashes.append(f' {name!r}: {asset_hash!r},')
|
|
|
|
if EJS_ASSETS[name]:
|
|
(PACKAGE_PATH / name).write_bytes(data)
|
|
|
|
hash_mapping = '\n'.join(hashes)
|
|
for asset_name in EJS_ASSETS:
|
|
assert asset_name in hash_mapping, f'{asset_name} not found in release'
|
|
|
|
assert all(wheel_info.get(key) for key in makefile_info), 'wheel info not found in release'
|
|
|
|
(PACKAGE_PATH / '_info.py').write_text(EJS_TEMPLATE.format(
|
|
version=version,
|
|
hash_mapping=hash_mapping,
|
|
))
|
|
|
|
content = PYPROJECT_PATH.read_text()
|
|
updated = content.replace(PREFIX + current_version, PREFIX + version)
|
|
PYPROJECT_PATH.write_text(updated)
|
|
|
|
makefile = MAKEFILE_PATH.read_text()
|
|
for key in wheel_info:
|
|
makefile = makefile.replace(f'{key} = {makefile_info[key]}', f'{key} = {wheel_info[key]}')
|
|
MAKEFILE_PATH.write_text(makefile)
|
|
|
|
update_requirements(upgrade_only=PACKAGE_NAME, verify=verify)
|
|
|
|
|
|
def update_requirements(upgrade_only: str | None = None, verify: bool = False):
|
|
# Are we upgrading all packages or only one (e.g. 'yt-dlp-ejs' or 'protobug')?
|
|
upgrade_arg = f'--upgrade-package={upgrade_only}' if upgrade_only else '--upgrade'
|
|
|
|
pyproject_text = PYPROJECT_PATH.read_text()
|
|
pyproject_toml = parse_toml(pyproject_text)
|
|
extras = pyproject_toml['project']['optional-dependencies']
|
|
|
|
# Remove pinned extras so they don't muck up the lockfile during generation/upgrade
|
|
for pinned_extra_name in PINNED_EXTRAS:
|
|
extras.pop(pinned_extra_name, None)
|
|
|
|
# Write an intermediate pyproject.toml to use for generating lockfile and bundle requirements
|
|
modify_and_write_pyproject(pyproject_text, table_name=EXTRAS_TABLE, table=extras)
|
|
|
|
# If verifying, set UV_EXCLUDE_NEWER env var with the last timestamp recorded in uv.lock
|
|
env = None
|
|
if verify:
|
|
env = os.environ.copy()
|
|
env['UV_EXCLUDE_NEWER'] = parse_toml(LOCKFILE_PATH.read_text())['options']['exclude-newer']
|
|
|
|
# Generate/upgrade lockfile
|
|
run_process('uv', 'lock', upgrade_arg, env=env)
|
|
lockfile = parse_toml(LOCKFILE_PATH.read_text())
|
|
|
|
# Generate bundle requirements
|
|
if not upgrade_only or upgrade_only.lower() == 'pyinstaller':
|
|
info = call_github_api(PYINSTALLER_BUILDS_URL)
|
|
for target_suffix, asset_tag in PYINSTALLER_BUILDS_TARGETS.items():
|
|
asset_info = next(asset for asset in info['assets'] if asset_tag in asset['name'])
|
|
pyinstaller_version = PYINSTALLER_VERSION_RE.match(asset_info['name']).group('version')
|
|
pyinstaller_builds_deps = run_pip_compile(
|
|
'--no-emit-package=pyinstaller',
|
|
upgrade_arg,
|
|
input_line=f'pyinstaller=={pyinstaller_version}',
|
|
env=env)
|
|
requirements_path = REQUIREMENTS_PATH / REQS_OUTPUT_TMPL.format(target_suffix)
|
|
requirements_path.write_text(PYINSTALLER_BUILDS_TMPL.format(
|
|
pyinstaller_builds_deps, asset_info['browser_download_url'], asset_info['digest']))
|
|
|
|
for target_suffix, target in BUNDLE_TARGETS.items():
|
|
run_uv_export(
|
|
extras=target.extras,
|
|
groups=target.groups,
|
|
prune_packages=target.prune_packages,
|
|
omit_packages=target.omit_packages,
|
|
output_file=REQUIREMENTS_PATH / REQS_OUTPUT_TMPL.format(target_suffix))
|
|
|
|
run_uv_export(
|
|
groups=['build'],
|
|
output_file=REQUIREMENTS_PATH / REQS_OUTPUT_TMPL.format('pypi-build'))
|
|
|
|
run_pip_compile(
|
|
upgrade_arg,
|
|
input_line='pip',
|
|
output_file=REQUIREMENTS_PATH / REQS_OUTPUT_TMPL.format('pip'),
|
|
env=env)
|
|
|
|
# Generate pinned extras
|
|
for pinned_name, extra_name in PINNED_EXTRAS.items():
|
|
pinned_extra = extras[pinned_name] = []
|
|
exported_extra = run_uv_export(extras=[extra_name], bare=True)
|
|
for line in exported_extra.splitlines():
|
|
dep = parse_dependency(line)
|
|
wheels = next((
|
|
pkg.get('wheels') for pkg in lockfile['package']
|
|
if pkg['name'] == dep.name and pkg['version'] == dep.version), None)
|
|
assert wheels, f'no wheels found for {dep.name} in lockfile'
|
|
# If multiple wheels are found, we'll *assume* it's because they're platform-specific.
|
|
# Platform tags can't be used in markers, so the best we can do is pin to exact version
|
|
if len(wheels) > 1:
|
|
pinned_extra.append(line)
|
|
continue
|
|
# If there's only a 'none-any' wheel, then use a direct reference to PyPI URL with hash
|
|
wheel_url = wheels[0]['url']
|
|
algo, _, digest = wheels[0]['hash'].partition(':')
|
|
pinned_line = f'{dep.name} @ {wheel_url}#{algo}={digest}'
|
|
pinned_extra.append(' ; '.join(filter(None, (pinned_line, dep.markers))))
|
|
|
|
# Write the finalized pyproject.toml
|
|
modify_and_write_pyproject(pyproject_text, table_name=EXTRAS_TABLE, table=extras)
|
|
|
|
|
|
def parse_args():
|
|
import argparse
|
|
parser = argparse.ArgumentParser(description='generate/update lockfile and requirements')
|
|
parser.add_argument(
|
|
'upgrade_only', nargs='?', metavar='PACKAGE',
|
|
help='only upgrade this package. (by default, all packages will be upgraded)')
|
|
parser.add_argument(
|
|
'--verify', action='store_true',
|
|
help='only verify the update(s) using the previously recorded cooldown timestamp')
|
|
return parser.parse_args()
|
|
|
|
|
|
def main():
|
|
args = parse_args()
|
|
|
|
if args.upgrade_only in ('ejs', 'yt-dlp-ejs'):
|
|
update_ejs(verify=args.verify)
|
|
else:
|
|
update_requirements(upgrade_only=args.upgrade_only, verify=args.verify)
|
|
|
|
|
|
if __name__ == '__main__':
|
|
main()
|