mirror of
https://github.com/yt-dlp/yt-dlp.git
synced 2026-04-08 01:47:45 +09:00
Add lockfile and pinned extras (#16421)
* Add `pin`, `pin-curl-cffi`, `pin-secretstorage` and `pin-deno` extras * Check in a `uv.lock` for devs * Add `devscripts/update_requirements.py` for dependency upgrades Authored by: bashonly, Grub4K Co-authored-by: Simon Sawicki <contact@grub4k.dev>
This commit is contained in:
@@ -1,225 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import annotations
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
import contextlib
|
||||
import dataclasses
|
||||
import datetime as dt
|
||||
import itertools
|
||||
import json
|
||||
import pathlib
|
||||
import re
|
||||
import urllib.request
|
||||
|
||||
from devscripts.utils import run_process
|
||||
|
||||
|
||||
REQUIREMENTS_PATH = pathlib.Path(__file__).parent.parent / 'bundle/requirements'
|
||||
INPUT_TMPL = 'requirements-{}.in'
|
||||
OUTPUT_TMPL = 'requirements-{}.txt'
|
||||
CUSTOM_COMPILE_COMMAND = 'python -m devscripts.update_bundle_requirements'
|
||||
COOLDOWN_DATE = (dt.date.today() - dt.timedelta(days=7)).isoformat()
|
||||
FUTURE_DATE = (dt.date.today() + dt.timedelta(days=1)).isoformat()
|
||||
|
||||
COOLDOWN_EXCEPTIONS = ('protobug', 'yt-dlp-ejs')
|
||||
|
||||
LINUX_GNU_PYTHON_VERSION = '3.13'
|
||||
LINUX_MUSL_PYTHON_VERISON = '3.14'
|
||||
WINDOWS_INTEL_PYTHON_VERSION = '3.10'
|
||||
WINDOWS_ARM64_PYTHON_VERSION = '3.13'
|
||||
MACOS_PYTHON_VERSION = '3.14'
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class Target:
|
||||
platform: str
|
||||
version: str
|
||||
extras: list[str] = dataclasses.field(default_factory=list)
|
||||
groups: list[str] = dataclasses.field(default_factory=list)
|
||||
compile_args: list[str] = dataclasses.field(default_factory=list)
|
||||
|
||||
|
||||
INSTALL_DEPS_TARGETS = {
|
||||
'linux-x86_64': Target(
|
||||
platform='x86_64-manylinux2014',
|
||||
version=LINUX_GNU_PYTHON_VERSION,
|
||||
extras=['default', 'curl-cffi', 'secretstorage'],
|
||||
groups=['pyinstaller'],
|
||||
),
|
||||
'linux-aarch64': Target(
|
||||
platform='aarch64-manylinux2014',
|
||||
version=LINUX_GNU_PYTHON_VERSION,
|
||||
extras=['default', 'curl-cffi', 'secretstorage'],
|
||||
groups=['pyinstaller'],
|
||||
),
|
||||
'linux-armv7l': Target(
|
||||
platform='linux',
|
||||
version=LINUX_GNU_PYTHON_VERSION,
|
||||
extras=['default', 'curl-cffi', 'secretstorage'],
|
||||
groups=['pyinstaller'],
|
||||
),
|
||||
'musllinux-x86_64': Target(
|
||||
platform='x86_64-unknown-linux-musl',
|
||||
version=LINUX_MUSL_PYTHON_VERISON,
|
||||
extras=['default', 'curl-cffi', 'secretstorage'],
|
||||
groups=['pyinstaller'],
|
||||
),
|
||||
'musllinux-aarch64': Target(
|
||||
platform='aarch64-unknown-linux-musl',
|
||||
version=LINUX_MUSL_PYTHON_VERISON,
|
||||
extras=['default', 'secretstorage'],
|
||||
groups=['pyinstaller', 'curl-cffi'],
|
||||
),
|
||||
'win-x64': Target(
|
||||
platform='x86_64-pc-windows-msvc',
|
||||
version=WINDOWS_INTEL_PYTHON_VERSION,
|
||||
extras=['default', 'curl-cffi'],
|
||||
),
|
||||
'win-x86': Target(
|
||||
platform='i686-pc-windows-msvc',
|
||||
version=WINDOWS_INTEL_PYTHON_VERSION,
|
||||
extras=['default'],
|
||||
),
|
||||
'win-arm64': Target(
|
||||
platform='aarch64-pc-windows-msvc',
|
||||
version=WINDOWS_ARM64_PYTHON_VERSION,
|
||||
extras=['default', 'curl-cffi'],
|
||||
),
|
||||
'macos': Target(
|
||||
platform='macos',
|
||||
version=MACOS_PYTHON_VERSION,
|
||||
extras=['default', 'curl-cffi'],
|
||||
# NB: Resolve delocate and PyInstaller together since they share dependencies
|
||||
groups=['delocate', 'pyinstaller'],
|
||||
# curl-cffi and cffi don't provide universal2 wheels, so only directly install their deps
|
||||
# NB: uv's --no-emit-package option is equivalent to pip-compile's --unsafe-package option
|
||||
compile_args=['--no-emit-package', 'curl-cffi', '--no-emit-package', 'cffi'],
|
||||
),
|
||||
# We fuse our own universal2 wheels for curl-cffi+cffi, so we need a separate requirements file
|
||||
'macos-curl_cffi': Target(
|
||||
platform='macos',
|
||||
version=MACOS_PYTHON_VERSION,
|
||||
extras=['curl-cffi'],
|
||||
# Only need curl-cffi+cffi in this requirements file; their deps are installed directly
|
||||
compile_args=[
|
||||
# XXX: Try to keep this in sync with curl-cffi's and cffi's transitive dependencies
|
||||
f'--no-emit-package={package}' for package in (
|
||||
'certifi',
|
||||
'markdown-it-py',
|
||||
'mdurl',
|
||||
'pycparser',
|
||||
'pygments',
|
||||
'rich',
|
||||
)
|
||||
],
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class PyInstallerTarget:
|
||||
platform: str
|
||||
version: str
|
||||
asset_tag: str
|
||||
|
||||
|
||||
PYINSTALLER_BUILDS_TARGETS = {
|
||||
'win-x64-pyinstaller': PyInstallerTarget(
|
||||
platform='x86_64-pc-windows-msvc',
|
||||
version=WINDOWS_INTEL_PYTHON_VERSION,
|
||||
asset_tag='win_amd64',
|
||||
),
|
||||
'win-x86-pyinstaller': PyInstallerTarget(
|
||||
platform='i686-pc-windows-msvc',
|
||||
version=WINDOWS_INTEL_PYTHON_VERSION,
|
||||
asset_tag='win32',
|
||||
),
|
||||
'win-arm64-pyinstaller': PyInstallerTarget(
|
||||
platform='aarch64-pc-windows-msvc',
|
||||
version=WINDOWS_ARM64_PYTHON_VERSION,
|
||||
asset_tag='win_arm64',
|
||||
),
|
||||
}
|
||||
|
||||
PYINSTALLER_BUILDS_URL = 'https://api.github.com/repos/yt-dlp/Pyinstaller-Builds/releases/latest'
|
||||
|
||||
PYINSTALLER_BUILDS_TMPL = '''\
|
||||
{}pyinstaller@{} \\
|
||||
--hash={}
|
||||
'''
|
||||
|
||||
PYINSTALLER_VERSION_RE = re.compile(r'pyinstaller-(?P<version>[0-9]+\.[0-9]+\.[0-9]+)-')
|
||||
|
||||
|
||||
def write_requirements_input(filepath: pathlib.Path, *args: str) -> None:
|
||||
filepath.write_text(run_process(
|
||||
sys.executable, '-m', 'devscripts.install_deps',
|
||||
'--omit-default', '--print', *args).stdout)
|
||||
|
||||
|
||||
def run_pip_compile(python_platform: str, python_version: str, requirements_input_path: pathlib.Path, *args: str) -> str:
|
||||
return run_process(
|
||||
'uv', 'pip', 'compile',
|
||||
'--no-config',
|
||||
'--quiet',
|
||||
'--no-progress',
|
||||
'--color=never',
|
||||
'--upgrade',
|
||||
f'--exclude-newer={COOLDOWN_DATE}',
|
||||
*(f'--exclude-newer-package={package}={FUTURE_DATE}' for package in COOLDOWN_EXCEPTIONS),
|
||||
f'--python-platform={python_platform}',
|
||||
f'--python-version={python_version}',
|
||||
'--generate-hashes',
|
||||
'--no-strip-markers',
|
||||
f'--custom-compile-command={CUSTOM_COMPILE_COMMAND}',
|
||||
str(requirements_input_path),
|
||||
'--format=requirements.txt',
|
||||
*args)
|
||||
|
||||
|
||||
def main():
|
||||
with contextlib.closing(urllib.request.urlopen(PYINSTALLER_BUILDS_URL)) as resp:
|
||||
info = json.load(resp)
|
||||
|
||||
for target_suffix, target in PYINSTALLER_BUILDS_TARGETS.items():
|
||||
asset_info = next(asset for asset in info['assets'] if target.asset_tag in asset['name'])
|
||||
pyinstaller_version = PYINSTALLER_VERSION_RE.match(asset_info['name']).group('version')
|
||||
base_requirements_path = REQUIREMENTS_PATH / INPUT_TMPL.format(target_suffix)
|
||||
base_requirements_path.write_text(f'pyinstaller=={pyinstaller_version}\n')
|
||||
pyinstaller_builds_deps = run_pip_compile(
|
||||
target.platform, target.version, base_requirements_path,
|
||||
'--no-emit-package=pyinstaller').stdout
|
||||
requirements_path = REQUIREMENTS_PATH / OUTPUT_TMPL.format(target_suffix)
|
||||
requirements_path.write_text(PYINSTALLER_BUILDS_TMPL.format(
|
||||
pyinstaller_builds_deps, asset_info['browser_download_url'], asset_info['digest']))
|
||||
|
||||
for target_suffix, target in INSTALL_DEPS_TARGETS.items():
|
||||
requirements_input_path = REQUIREMENTS_PATH / INPUT_TMPL.format(target_suffix)
|
||||
write_requirements_input(
|
||||
requirements_input_path,
|
||||
*itertools.chain.from_iterable(itertools.product(['--include-extra'], target.extras)),
|
||||
*itertools.chain.from_iterable(itertools.product(['--include-group'], target.groups)))
|
||||
run_pip_compile(
|
||||
target.platform, target.version, requirements_input_path, *target.compile_args,
|
||||
f'--output-file={REQUIREMENTS_PATH / OUTPUT_TMPL.format(target_suffix)}')
|
||||
|
||||
pypi_input_path = REQUIREMENTS_PATH / INPUT_TMPL.format('pypi-build')
|
||||
write_requirements_input(pypi_input_path, '--include-group', 'build')
|
||||
run_pip_compile(
|
||||
'linux', LINUX_GNU_PYTHON_VERSION, pypi_input_path,
|
||||
f'--output-file={REQUIREMENTS_PATH / OUTPUT_TMPL.format("pypi-build")}')
|
||||
|
||||
pip_input_path = REQUIREMENTS_PATH / INPUT_TMPL.format('pip')
|
||||
write_requirements_input(pip_input_path, '--include-group', 'build', '--cherry-pick', 'pip')
|
||||
run_pip_compile(
|
||||
'windows', WINDOWS_INTEL_PYTHON_VERSION, pip_input_path,
|
||||
f'--output-file={REQUIREMENTS_PATH / OUTPUT_TMPL.format("pip")}')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -1,236 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import annotations
|
||||
|
||||
import collections.abc
|
||||
import contextlib
|
||||
import io
|
||||
import json
|
||||
import hashlib
|
||||
import pathlib
|
||||
import urllib.request
|
||||
import zipfile
|
||||
|
||||
|
||||
TEMPLATE = '''\
|
||||
# This file is generated by devscripts/update_ejs.py. DO NOT MODIFY!
|
||||
|
||||
VERSION = {version!r}
|
||||
HASHES = {{
|
||||
{hash_mapping}
|
||||
}}
|
||||
'''
|
||||
PACKAGE_NAME = 'yt-dlp-ejs'
|
||||
PREFIX = f' "{PACKAGE_NAME}=='
|
||||
PYPI_ARTIFACT_NAME = PACKAGE_NAME.replace('-', '_')
|
||||
BASE_PATH = pathlib.Path(__file__).parent.parent
|
||||
PYPROJECT_PATH = BASE_PATH / 'pyproject.toml'
|
||||
PACKAGE_PATH = BASE_PATH / 'yt_dlp/extractor/youtube/jsc/_builtin/vendor'
|
||||
RELEASE_URL = 'https://api.github.com/repos/yt-dlp/ejs/releases/latest'
|
||||
ASSETS = {
|
||||
'yt.solver.lib.js': False,
|
||||
'yt.solver.lib.min.js': False,
|
||||
'yt.solver.deno.lib.js': True,
|
||||
'yt.solver.bun.lib.js': True,
|
||||
'yt.solver.core.min.js': False,
|
||||
'yt.solver.core.js': True,
|
||||
}
|
||||
MAKEFILE_PATH = BASE_PATH / 'Makefile'
|
||||
REQUIREMENTS_PATH = BASE_PATH / 'bundle/requirements'
|
||||
|
||||
|
||||
def requirements_needs_update(
|
||||
lines: collections.abc.Iterable[str],
|
||||
package: str,
|
||||
version: str,
|
||||
):
|
||||
identifier = f'{package}=='
|
||||
for line in lines:
|
||||
if line.startswith(identifier):
|
||||
return not line.removeprefix(identifier).startswith(version)
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def requirements_update(
|
||||
lines: collections.abc.Iterable[str],
|
||||
package: str,
|
||||
new_version: str,
|
||||
new_hashes: list[str],
|
||||
):
|
||||
first_comment = True
|
||||
current = []
|
||||
for line in lines:
|
||||
if not line.endswith('\n'):
|
||||
line += '\n'
|
||||
|
||||
if first_comment:
|
||||
comment_line = line.strip()
|
||||
if comment_line.startswith('#'):
|
||||
yield line
|
||||
continue
|
||||
|
||||
first_comment = False
|
||||
yield '# It was later updated using devscripts/update_ejs.py\n'
|
||||
|
||||
current.append(line)
|
||||
if line.endswith('\\\n'):
|
||||
# continue logical line
|
||||
continue
|
||||
|
||||
if not current[0].startswith(f'{package}=='):
|
||||
yield from current
|
||||
|
||||
else:
|
||||
yield f'{package}=={new_version} \\\n'
|
||||
for digest in new_hashes[:-1]:
|
||||
yield f' --hash={digest} \\\n'
|
||||
yield f' --hash={new_hashes[-1]}\n'
|
||||
|
||||
current.clear()
|
||||
|
||||
|
||||
def request(url: str):
|
||||
return contextlib.closing(urllib.request.urlopen(url))
|
||||
|
||||
|
||||
def makefile_variables(
|
||||
version: str | None = None,
|
||||
name: str | None = None,
|
||||
digest: str | None = None,
|
||||
data: bytes | None = None,
|
||||
keys_only: bool = False,
|
||||
) -> dict[str, str | None]:
|
||||
assert keys_only or all(arg is not None for arg in (version, name, digest, data))
|
||||
|
||||
return {
|
||||
'EJS_VERSION': None if keys_only else version,
|
||||
'EJS_WHEEL_NAME': None if keys_only else name,
|
||||
'EJS_WHEEL_HASH': None if keys_only else digest,
|
||||
'EJS_PY_FOLDERS': None if keys_only else list_wheel_contents(data, 'py', files=False),
|
||||
'EJS_PY_FILES': None if keys_only else list_wheel_contents(data, 'py', folders=False),
|
||||
'EJS_JS_FOLDERS': None if keys_only else list_wheel_contents(data, 'js', files=False),
|
||||
'EJS_JS_FILES': None if keys_only else list_wheel_contents(data, 'js', folders=False),
|
||||
}
|
||||
|
||||
|
||||
def list_wheel_contents(
|
||||
wheel_data: bytes,
|
||||
suffix: str | None = None,
|
||||
folders: bool = True,
|
||||
files: bool = True,
|
||||
) -> str:
|
||||
assert folders or files, 'at least one of "folders" or "files" must be True'
|
||||
|
||||
with zipfile.ZipFile(io.BytesIO(wheel_data)) as zipf:
|
||||
path_gen = (zinfo.filename for zinfo in zipf.infolist())
|
||||
|
||||
filtered = filter(lambda path: path.startswith('yt_dlp_ejs/'), path_gen)
|
||||
if suffix:
|
||||
filtered = filter(lambda path: path.endswith(f'.{suffix}'), filtered)
|
||||
|
||||
files_list = list(filtered)
|
||||
if not folders:
|
||||
return ' '.join(files_list)
|
||||
|
||||
folders_list = list(dict.fromkeys(path.rpartition('/')[0] for path in files_list))
|
||||
if not files:
|
||||
return ' '.join(folders_list)
|
||||
|
||||
return ' '.join(folders_list + files_list)
|
||||
|
||||
|
||||
def main():
|
||||
current_version = None
|
||||
with PYPROJECT_PATH.open() as file:
|
||||
for line in file:
|
||||
if not line.startswith(PREFIX):
|
||||
continue
|
||||
current_version, _, _ = line.removeprefix(PREFIX).partition('"')
|
||||
|
||||
if not current_version:
|
||||
print(f'{PACKAGE_NAME} dependency line could not be found')
|
||||
return
|
||||
|
||||
makefile_info = makefile_variables(keys_only=True)
|
||||
prefixes = tuple(f'{key} = ' for key in makefile_info)
|
||||
with MAKEFILE_PATH.open() as file:
|
||||
for line in file:
|
||||
if not line.startswith(prefixes):
|
||||
continue
|
||||
key, _, val = line.partition(' = ')
|
||||
makefile_info[key] = val.rstrip()
|
||||
|
||||
with request(RELEASE_URL) as resp:
|
||||
info = json.load(resp)
|
||||
|
||||
version = info['tag_name']
|
||||
if version == current_version:
|
||||
print(f'{PACKAGE_NAME} is up to date! ({version})')
|
||||
return
|
||||
|
||||
print(f'Updating {PACKAGE_NAME} from {current_version} to {version}')
|
||||
hashes = []
|
||||
requirements_hashes = []
|
||||
wheel_info = {}
|
||||
for asset in info['assets']:
|
||||
name = asset['name']
|
||||
digest = asset['digest']
|
||||
|
||||
# Is it the source distribution? If so, we only need its hash for the requirements files
|
||||
if name == f'{PYPI_ARTIFACT_NAME}-{version}.tar.gz':
|
||||
requirements_hashes.append(digest)
|
||||
continue
|
||||
|
||||
is_wheel = name.startswith(f'{PYPI_ARTIFACT_NAME}-') and name.endswith('.whl')
|
||||
if not is_wheel and name not in ASSETS:
|
||||
continue
|
||||
|
||||
with request(asset['browser_download_url']) as resp:
|
||||
data = resp.read()
|
||||
|
||||
# verify digest from github
|
||||
algo, _, expected = digest.partition(':')
|
||||
hexdigest = hashlib.new(algo, data).hexdigest()
|
||||
assert hexdigest == expected, f'downloaded attest mismatch ({hexdigest!r} != {expected!r})'
|
||||
|
||||
if is_wheel:
|
||||
requirements_hashes.append(digest)
|
||||
wheel_info = makefile_variables(version, name, digest, data)
|
||||
continue
|
||||
|
||||
# calculate sha3-512 digest
|
||||
asset_hash = hashlib.sha3_512(data).hexdigest()
|
||||
hashes.append(f' {name!r}: {asset_hash!r},')
|
||||
|
||||
if ASSETS[name]:
|
||||
(PACKAGE_PATH / name).write_bytes(data)
|
||||
|
||||
hash_mapping = '\n'.join(hashes)
|
||||
for asset_name in ASSETS:
|
||||
assert asset_name in hash_mapping, f'{asset_name} not found in release'
|
||||
|
||||
assert all(wheel_info.get(key) for key in makefile_info), 'wheel info not found in release'
|
||||
|
||||
(PACKAGE_PATH / '_info.py').write_text(TEMPLATE.format(
|
||||
version=version,
|
||||
hash_mapping=hash_mapping,
|
||||
))
|
||||
|
||||
content = PYPROJECT_PATH.read_text()
|
||||
updated = content.replace(PREFIX + current_version, PREFIX + version)
|
||||
PYPROJECT_PATH.write_text(updated)
|
||||
|
||||
makefile = MAKEFILE_PATH.read_text()
|
||||
for key in wheel_info:
|
||||
makefile = makefile.replace(f'{key} = {makefile_info[key]}', f'{key} = {wheel_info[key]}')
|
||||
MAKEFILE_PATH.write_text(makefile)
|
||||
|
||||
for req in REQUIREMENTS_PATH.glob('requirements-*.txt'):
|
||||
lines = req.read_text().splitlines(True)
|
||||
if requirements_needs_update(lines, PACKAGE_NAME, version):
|
||||
with req.open(mode='w') as f:
|
||||
f.writelines(requirements_update(lines, PACKAGE_NAME, version, requirements_hashes))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
497
devscripts/update_requirements.py
Executable file
497
devscripts/update_requirements.py
Executable file
@@ -0,0 +1,497 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import annotations
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
import collections.abc
|
||||
import dataclasses
|
||||
import hashlib
|
||||
import io
|
||||
import pathlib
|
||||
import re
|
||||
import zipfile
|
||||
|
||||
from devscripts.tomlparse import parse_toml
|
||||
from devscripts.utils import (
|
||||
call_github_api,
|
||||
request,
|
||||
run_process,
|
||||
zipf_files_and_folders,
|
||||
)
|
||||
|
||||
|
||||
BASE_PATH = pathlib.Path(__file__).parent.parent
|
||||
PYPROJECT_PATH = BASE_PATH / 'pyproject.toml'
|
||||
MAKEFILE_PATH = BASE_PATH / 'Makefile'
|
||||
LOCKFILE_PATH = BASE_PATH / 'uv.lock'
|
||||
REQUIREMENTS_PATH = BASE_PATH / 'bundle/requirements'
|
||||
REQS_OUTPUT_TMPL = 'requirements-{}.txt'
|
||||
CUSTOM_COMPILE_COMMAND = 'python -m devscripts.update_requirements'
|
||||
|
||||
EXTRAS_TABLE = 'project.optional-dependencies'
|
||||
GROUPS_TABLE = 'dependency-groups'
|
||||
|
||||
PINNED_EXTRAS = {
|
||||
'pin': 'default',
|
||||
'pin-curl-cffi': 'curl-cffi',
|
||||
'pin-secretstorage': 'secretstorage',
|
||||
'pin-deno': 'deno',
|
||||
}
|
||||
|
||||
EJS_ASSETS = {
|
||||
'yt.solver.lib.js': False,
|
||||
'yt.solver.lib.min.js': False,
|
||||
'yt.solver.deno.lib.js': True,
|
||||
'yt.solver.bun.lib.js': True,
|
||||
'yt.solver.core.min.js': False,
|
||||
'yt.solver.core.js': True,
|
||||
}
|
||||
|
||||
EJS_TEMPLATE = '''\
|
||||
# This file is generated by devscripts/update_requirements.py. DO NOT MODIFY!
|
||||
|
||||
VERSION = {version!r}
|
||||
HASHES = {{
|
||||
{hash_mapping}
|
||||
}}
|
||||
'''
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class Target:
|
||||
extras: list[str] = dataclasses.field(default_factory=list)
|
||||
groups: list[str] = dataclasses.field(default_factory=list)
|
||||
prune_packages: list[str] = dataclasses.field(default_factory=list)
|
||||
omit_packages: list[str] = dataclasses.field(default_factory=list)
|
||||
|
||||
|
||||
LINUX_TARGET = Target(
|
||||
extras=['default', 'curl-cffi', 'secretstorage'],
|
||||
groups=['pyinstaller'],
|
||||
)
|
||||
WIN64_TARGET = Target(
|
||||
extras=['default', 'curl-cffi'],
|
||||
)
|
||||
|
||||
BUNDLE_TARGETS = {
|
||||
'linux-x86_64': LINUX_TARGET,
|
||||
'linux-aarch64': LINUX_TARGET,
|
||||
'linux-armv7l': LINUX_TARGET,
|
||||
'musllinux-x86_64': LINUX_TARGET,
|
||||
'musllinux-aarch64': LINUX_TARGET,
|
||||
'win-x64': WIN64_TARGET,
|
||||
'win-arm64': WIN64_TARGET,
|
||||
'win-x86': Target(extras=['default']),
|
||||
'macos': Target(
|
||||
extras=['default', 'curl-cffi'],
|
||||
# NB: Resolve delocate and PyInstaller together since they share dependencies
|
||||
groups=['delocate', 'pyinstaller'],
|
||||
# curl-cffi and cffi don't provide universal2 wheels, so only directly install their deps
|
||||
omit_packages=['curl-cffi', 'cffi'],
|
||||
),
|
||||
# We fuse our own universal2 wheels for curl-cffi+cffi, so we need a separate requirements file
|
||||
'macos-curl_cffi': Target(
|
||||
extras=['curl-cffi'],
|
||||
# Only need curl-cffi+cffi in this requirements file; their deps are installed directly
|
||||
# XXX: Try to keep these in sync with curl-cffi's and cffi's transitive dependencies
|
||||
prune_packages=['rich'],
|
||||
omit_packages=['certifi', 'pycparser'],
|
||||
),
|
||||
}
|
||||
|
||||
PYINSTALLER_BUILDS_TARGETS = {
|
||||
'win-x64-pyinstaller': 'win_amd64',
|
||||
'win-x86-pyinstaller': 'win32',
|
||||
'win-arm64-pyinstaller': 'win_arm64',
|
||||
}
|
||||
|
||||
PYINSTALLER_BUILDS_URL = 'https://api.github.com/repos/yt-dlp/Pyinstaller-Builds/releases/latest'
|
||||
|
||||
PYINSTALLER_BUILDS_TMPL = '''\
|
||||
{}pyinstaller @ {} \\
|
||||
--hash={}
|
||||
'''
|
||||
|
||||
PYINSTALLER_VERSION_RE = re.compile(r'pyinstaller-(?P<version>[0-9]+\.[0-9]+\.[0-9]+)-')
|
||||
|
||||
|
||||
def generate_table_lines(
|
||||
table_name: str,
|
||||
table: dict[str, str | list[str | dict[str, str]]],
|
||||
) -> collections.abc.Iterator[str]:
|
||||
yield f'[{table_name}]\n'
|
||||
for name, value in table.items():
|
||||
assert isinstance(value, (str, list)), 'only string & array table values are supported'
|
||||
|
||||
if isinstance(value, str):
|
||||
yield f'{name} = "{value}"\n'
|
||||
continue
|
||||
|
||||
yield f'{name} = ['
|
||||
if value:
|
||||
yield '\n'
|
||||
for element in value:
|
||||
yield ' '
|
||||
if isinstance(element, dict):
|
||||
yield '{ ' + ', '.join(f'{k} = "{v}"' for k, v in element.items()) + ' }'
|
||||
else:
|
||||
yield f'"{element}"'
|
||||
yield ',\n'
|
||||
yield ']\n'
|
||||
yield '\n'
|
||||
|
||||
|
||||
def replace_table_in_pyproject(
|
||||
pyproject_text: str,
|
||||
table_name: str,
|
||||
table: dict[str, str | list[str | dict[str, str]]],
|
||||
) -> collections.abc.Iterator[str]:
|
||||
INSIDE = 1
|
||||
BEYOND = 2
|
||||
|
||||
state = 0
|
||||
for line in pyproject_text.splitlines(True):
|
||||
if state == INSIDE:
|
||||
if line == '\n':
|
||||
state = BEYOND
|
||||
continue
|
||||
if line != f'[{table_name}]\n' or state == BEYOND:
|
||||
yield line
|
||||
continue
|
||||
yield from generate_table_lines(table_name, table)
|
||||
state = INSIDE
|
||||
|
||||
|
||||
def modify_and_write_pyproject(
|
||||
pyproject_text: str,
|
||||
table_name: str,
|
||||
table: dict[str, str | list[str | dict[str, str]]],
|
||||
) -> None:
|
||||
with PYPROJECT_PATH.open(mode='w') as f:
|
||||
f.writelines(replace_table_in_pyproject(pyproject_text, table_name, table))
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class Dependency:
|
||||
name: str
|
||||
direct_reference: str | None
|
||||
version: str | None
|
||||
markers: str | None
|
||||
|
||||
|
||||
def parse_dependency(line: str, comp_op: str = '==') -> Dependency:
|
||||
line = line.rstrip().removesuffix('\\')
|
||||
before, sep, after = map(str.strip, line.partition('@'))
|
||||
name, _, version_and_markers = map(str.strip, before.partition(comp_op))
|
||||
assertion_msg = f'unable to parse Dependency from line:\n {line}'
|
||||
assert name, assertion_msg
|
||||
|
||||
if sep:
|
||||
# Direct reference
|
||||
version = version_and_markers
|
||||
direct_reference, _, markers = map(str.strip, after.partition(';'))
|
||||
assert direct_reference, assertion_msg
|
||||
else:
|
||||
# No direct reference
|
||||
direct_reference = None
|
||||
version, _, markers = map(str.strip, version_and_markers.partition(';'))
|
||||
|
||||
return Dependency(
|
||||
name=name,
|
||||
direct_reference=direct_reference,
|
||||
version=version or None,
|
||||
markers=markers or None)
|
||||
|
||||
|
||||
def run_uv_export(
|
||||
*,
|
||||
extras: list[str] | None = None,
|
||||
groups: list[str] | None = None,
|
||||
prune_packages: list[str] | None = None,
|
||||
omit_packages: list[str] | None = None,
|
||||
bare: bool = False,
|
||||
output_file: pathlib.Path | None = None,
|
||||
) -> str:
|
||||
return run_process(
|
||||
'uv', 'export',
|
||||
'--no-python-downloads',
|
||||
'--quiet',
|
||||
'--no-progress',
|
||||
'--color=never',
|
||||
'--format=requirements.txt',
|
||||
'--frozen',
|
||||
'--refresh',
|
||||
'--no-emit-project',
|
||||
'--no-default-groups',
|
||||
*(f'--extra={extra}' for extra in (extras or [])),
|
||||
*(f'--group={group}' for group in (groups or [])),
|
||||
*(f'--prune={package}' for package in (prune_packages or [])),
|
||||
*(f'--no-emit-package={package}' for package in (omit_packages or [])),
|
||||
*(['--no-annotate', '--no-hashes', '--no-header'] if bare else []),
|
||||
*([f'--output-file={output_file.relative_to(BASE_PATH)}'] if output_file else []),
|
||||
).stdout
|
||||
|
||||
|
||||
def run_pip_compile(
|
||||
*args: str,
|
||||
input_line: str,
|
||||
output_file: pathlib.Path | None = None,
|
||||
env: dict[str, str] | None = None,
|
||||
) -> str:
|
||||
return run_process(
|
||||
'uv', 'pip', 'compile',
|
||||
'--no-python-downloads',
|
||||
'--quiet',
|
||||
'--no-progress',
|
||||
'--color=never',
|
||||
'--format=requirements.txt',
|
||||
'--refresh',
|
||||
'--generate-hashes',
|
||||
'--no-strip-markers',
|
||||
f'--custom-compile-command={CUSTOM_COMPILE_COMMAND}',
|
||||
'--universal',
|
||||
*args,
|
||||
*([f'--output-file={output_file.relative_to(BASE_PATH)}'] if output_file else []),
|
||||
'-', # Read from stdin
|
||||
input=f'{input_line}\n',
|
||||
env=env,
|
||||
).stdout
|
||||
|
||||
|
||||
def makefile_variables(
|
||||
prefix: str,
|
||||
filetypes: list[str] | None = None,
|
||||
*,
|
||||
version: str | None = None,
|
||||
name: str | None = None,
|
||||
digest: str | None = None,
|
||||
data: bytes | None = None,
|
||||
keys_only: bool = False,
|
||||
) -> dict[str, str | None]:
|
||||
|
||||
variables = {
|
||||
f'{prefix}_VERSION': version,
|
||||
f'{prefix}_WHEEL_NAME': name,
|
||||
f'{prefix}_WHEEL_HASH': digest,
|
||||
}
|
||||
for ft in filetypes or []:
|
||||
variables.update({
|
||||
f'{prefix}_{ft.upper()}_FOLDERS': None,
|
||||
f'{prefix}_{ft.upper()}_FILES': None,
|
||||
})
|
||||
|
||||
if keys_only:
|
||||
return variables
|
||||
|
||||
assert all(arg is not None for arg in (version, name, digest, not filetypes or data))
|
||||
|
||||
if filetypes:
|
||||
with io.BytesIO(data) as buf, zipfile.ZipFile(buf) as zipf:
|
||||
for ft in filetypes:
|
||||
files, folders = zipf_files_and_folders(zipf, f'*.{ft.lower()}')
|
||||
variables[f'{prefix}_{ft.upper()}_FOLDERS'] = ' '.join(folders)
|
||||
variables[f'{prefix}_{ft.upper()}_FILES'] = ' '.join(files)
|
||||
|
||||
return variables
|
||||
|
||||
|
||||
def ejs_makefile_variables(**kwargs) -> dict[str, str | None]:
|
||||
return makefile_variables('EJS', filetypes=['PY', 'JS'], **kwargs)
|
||||
|
||||
|
||||
def update_ejs(verify: bool = False):
|
||||
PACKAGE_NAME = 'yt-dlp-ejs'
|
||||
PREFIX = f' "{PACKAGE_NAME}=='
|
||||
LIBRARY_NAME = PACKAGE_NAME.replace('-', '_')
|
||||
PACKAGE_PATH = BASE_PATH / 'yt_dlp/extractor/youtube/jsc/_builtin/vendor'
|
||||
RELEASE_URL = 'https://api.github.com/repos/yt-dlp/ejs/releases/latest'
|
||||
|
||||
current_version = None
|
||||
with PYPROJECT_PATH.open() as file:
|
||||
for line in file:
|
||||
if not line.startswith(PREFIX):
|
||||
continue
|
||||
current_version, _, _ = line.removeprefix(PREFIX).partition('"')
|
||||
|
||||
if not current_version:
|
||||
print(f'{PACKAGE_NAME} dependency line could not be found')
|
||||
return
|
||||
|
||||
makefile_info = ejs_makefile_variables(keys_only=True)
|
||||
prefixes = tuple(f'{key} = ' for key in makefile_info)
|
||||
with MAKEFILE_PATH.open() as file:
|
||||
for line in file:
|
||||
if not line.startswith(prefixes):
|
||||
continue
|
||||
key, _, val = line.partition(' = ')
|
||||
makefile_info[key] = val.rstrip()
|
||||
|
||||
info = call_github_api(RELEASE_URL)
|
||||
version = info['tag_name']
|
||||
if version == current_version:
|
||||
print(f'{PACKAGE_NAME} is up to date! ({version})')
|
||||
return
|
||||
|
||||
print(f'Updating {PACKAGE_NAME} from {current_version} to {version}')
|
||||
hashes = []
|
||||
wheel_info = {}
|
||||
for asset in info['assets']:
|
||||
name = asset['name']
|
||||
digest = asset['digest']
|
||||
|
||||
is_wheel = name.startswith(f'{LIBRARY_NAME}-') and name.endswith('.whl')
|
||||
if not is_wheel and name not in EJS_ASSETS:
|
||||
continue
|
||||
|
||||
with request(asset['browser_download_url']) as resp:
|
||||
data = resp.read()
|
||||
|
||||
# verify digest from github
|
||||
algo, _, expected = digest.partition(':')
|
||||
hexdigest = hashlib.new(algo, data).hexdigest()
|
||||
assert hexdigest == expected, f'downloaded attest mismatch ({hexdigest!r} != {expected!r})'
|
||||
|
||||
if is_wheel:
|
||||
wheel_info = ejs_makefile_variables(version=version, name=name, digest=digest, data=data)
|
||||
continue
|
||||
|
||||
# calculate sha3-512 digest
|
||||
asset_hash = hashlib.sha3_512(data).hexdigest()
|
||||
hashes.append(f' {name!r}: {asset_hash!r},')
|
||||
|
||||
if EJS_ASSETS[name]:
|
||||
(PACKAGE_PATH / name).write_bytes(data)
|
||||
|
||||
hash_mapping = '\n'.join(hashes)
|
||||
for asset_name in EJS_ASSETS:
|
||||
assert asset_name in hash_mapping, f'{asset_name} not found in release'
|
||||
|
||||
assert all(wheel_info.get(key) for key in makefile_info), 'wheel info not found in release'
|
||||
|
||||
(PACKAGE_PATH / '_info.py').write_text(EJS_TEMPLATE.format(
|
||||
version=version,
|
||||
hash_mapping=hash_mapping,
|
||||
))
|
||||
|
||||
content = PYPROJECT_PATH.read_text()
|
||||
updated = content.replace(PREFIX + current_version, PREFIX + version)
|
||||
PYPROJECT_PATH.write_text(updated)
|
||||
|
||||
makefile = MAKEFILE_PATH.read_text()
|
||||
for key in wheel_info:
|
||||
makefile = makefile.replace(f'{key} = {makefile_info[key]}', f'{key} = {wheel_info[key]}')
|
||||
MAKEFILE_PATH.write_text(makefile)
|
||||
|
||||
update_requirements(upgrade_only=PACKAGE_NAME, verify=verify)
|
||||
|
||||
|
||||
def update_requirements(upgrade_only: str | None = None, verify: bool = False):
|
||||
# Are we upgrading all packages or only one (e.g. 'yt-dlp-ejs' or 'protobug')?
|
||||
upgrade_arg = f'--upgrade-package={upgrade_only}' if upgrade_only else '--upgrade'
|
||||
|
||||
pyproject_text = PYPROJECT_PATH.read_text()
|
||||
pyproject_toml = parse_toml(pyproject_text)
|
||||
extras = pyproject_toml['project']['optional-dependencies']
|
||||
|
||||
# Remove pinned extras so they don't muck up the lockfile during generation/upgrade
|
||||
for pinned_extra_name in PINNED_EXTRAS:
|
||||
extras.pop(pinned_extra_name, None)
|
||||
|
||||
# Write an intermediate pyproject.toml to use for generating lockfile and bundle requirements
|
||||
modify_and_write_pyproject(pyproject_text, table_name=EXTRAS_TABLE, table=extras)
|
||||
|
||||
# If verifying, set UV_EXCLUDE_NEWER env var with the last timestamp recorded in uv.lock
|
||||
env = None
|
||||
if verify:
|
||||
env = os.environ.copy()
|
||||
env['UV_EXCLUDE_NEWER'] = parse_toml(LOCKFILE_PATH.read_text())['options']['exclude-newer']
|
||||
|
||||
# Generate/upgrade lockfile
|
||||
run_process('uv', 'lock', upgrade_arg, env=env)
|
||||
lockfile = parse_toml(LOCKFILE_PATH.read_text())
|
||||
|
||||
# Generate bundle requirements
|
||||
if not upgrade_only or upgrade_only.lower() == 'pyinstaller':
|
||||
info = call_github_api(PYINSTALLER_BUILDS_URL)
|
||||
for target_suffix, asset_tag in PYINSTALLER_BUILDS_TARGETS.items():
|
||||
asset_info = next(asset for asset in info['assets'] if asset_tag in asset['name'])
|
||||
pyinstaller_version = PYINSTALLER_VERSION_RE.match(asset_info['name']).group('version')
|
||||
pyinstaller_builds_deps = run_pip_compile(
|
||||
'--no-emit-package=pyinstaller',
|
||||
upgrade_arg,
|
||||
input_line=f'pyinstaller=={pyinstaller_version}',
|
||||
env=env)
|
||||
requirements_path = REQUIREMENTS_PATH / REQS_OUTPUT_TMPL.format(target_suffix)
|
||||
requirements_path.write_text(PYINSTALLER_BUILDS_TMPL.format(
|
||||
pyinstaller_builds_deps, asset_info['browser_download_url'], asset_info['digest']))
|
||||
|
||||
for target_suffix, target in BUNDLE_TARGETS.items():
|
||||
run_uv_export(
|
||||
extras=target.extras,
|
||||
groups=target.groups,
|
||||
prune_packages=target.prune_packages,
|
||||
omit_packages=target.omit_packages,
|
||||
output_file=REQUIREMENTS_PATH / REQS_OUTPUT_TMPL.format(target_suffix))
|
||||
|
||||
run_uv_export(
|
||||
groups=['build'],
|
||||
output_file=REQUIREMENTS_PATH / REQS_OUTPUT_TMPL.format('pypi-build'))
|
||||
|
||||
run_pip_compile(
|
||||
upgrade_arg,
|
||||
input_line='pip',
|
||||
output_file=REQUIREMENTS_PATH / REQS_OUTPUT_TMPL.format('pip'),
|
||||
env=env)
|
||||
|
||||
# Generate pinned extras
|
||||
for pinned_name, extra_name in PINNED_EXTRAS.items():
|
||||
pinned_extra = extras[pinned_name] = []
|
||||
exported_extra = run_uv_export(extras=[extra_name], bare=True)
|
||||
for line in exported_extra.splitlines():
|
||||
dep = parse_dependency(line)
|
||||
wheels = next((
|
||||
pkg.get('wheels') for pkg in lockfile['package']
|
||||
if pkg['name'] == dep.name and pkg['version'] == dep.version), None)
|
||||
assert wheels, f'no wheels found for {dep.name} in lockfile'
|
||||
# If multiple wheels are found, we'll *assume* it's because they're platform-specific.
|
||||
# Platform tags can't be used in markers, so the best we can do is pin to exact version
|
||||
if len(wheels) > 1:
|
||||
pinned_extra.append(line)
|
||||
continue
|
||||
# If there's only a 'none-any' wheel, then use a direct reference to PyPI URL with hash
|
||||
wheel_url = wheels[0]['url']
|
||||
algo, _, digest = wheels[0]['hash'].partition(':')
|
||||
pinned_line = f'{dep.name} @ {wheel_url}#{algo}={digest}'
|
||||
pinned_extra.append(' ; '.join(filter(None, (pinned_line, dep.markers))))
|
||||
|
||||
# Write the finalized pyproject.toml
|
||||
modify_and_write_pyproject(pyproject_text, table_name=EXTRAS_TABLE, table=extras)
|
||||
|
||||
|
||||
def parse_args():
|
||||
import argparse
|
||||
parser = argparse.ArgumentParser(description='generate/update lockfile and requirements')
|
||||
parser.add_argument(
|
||||
'upgrade_only', nargs='?', metavar='PACKAGE',
|
||||
help='only upgrade this package. (by default, all packages will be upgraded)')
|
||||
parser.add_argument(
|
||||
'--verify', action='store_true',
|
||||
help='only verify the update(s) using the previously recorded cooldown timestamp')
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
|
||||
if args.upgrade_only in ('ejs', 'yt-dlp-ejs'):
|
||||
update_ejs(verify=args.verify)
|
||||
else:
|
||||
update_requirements(upgrade_only=args.upgrade_only, verify=args.verify)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -1,8 +1,17 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import contextlib
|
||||
import datetime as dt
|
||||
import functools
|
||||
import itertools
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
import zipfile
|
||||
|
||||
|
||||
def read_file(fname):
|
||||
@@ -64,3 +73,46 @@ def run_process(*args, **kwargs):
|
||||
kwargs.setdefault('encoding', 'utf-8')
|
||||
kwargs.setdefault('errors', 'replace')
|
||||
return subprocess.run(args, **kwargs)
|
||||
|
||||
|
||||
def request(url: str, *, headers: dict | None = None):
|
||||
req = urllib.request.Request(url, headers=headers or {})
|
||||
return contextlib.closing(urllib.request.urlopen(req))
|
||||
|
||||
|
||||
def call_github_api(path: str, *, query: dict | None = None) -> dict | list:
|
||||
API_BASE_URL = 'https://api.github.com/'
|
||||
assert not path.startswith(('https://', 'http://')) or path.startswith(API_BASE_URL)
|
||||
|
||||
url = urllib.parse.urlparse(urllib.parse.urljoin(API_BASE_URL, path))
|
||||
qs = urllib.parse.urlencode({
|
||||
**urllib.parse.parse_qs(url.query),
|
||||
**(query or {}),
|
||||
}, True)
|
||||
|
||||
headers = {
|
||||
'Accept': 'application/vnd.github+json',
|
||||
'User-Agent': 'yt-dlp',
|
||||
'X-GitHub-Api-Version': '2026-03-10',
|
||||
}
|
||||
if gh_token := os.getenv('GH_TOKEN'):
|
||||
headers['Authorization'] = f'Bearer {gh_token}'
|
||||
|
||||
with request(urllib.parse.urlunparse(url._replace(query=qs)), headers=headers) as resp:
|
||||
return json.load(resp)
|
||||
|
||||
|
||||
def zipf_files_and_folders(zipf: zipfile.ZipFile, glob: str = '*') -> tuple[list[str], list[str]]:
|
||||
files = []
|
||||
folders = []
|
||||
|
||||
path = zipfile.Path(zipf)
|
||||
for f in itertools.chain(path.glob(glob), path.rglob(glob)):
|
||||
if not f.is_file():
|
||||
continue
|
||||
files.append(f.at)
|
||||
folder = f.parent.at.rstrip('/')
|
||||
if folder and folder not in folders:
|
||||
folders.append(folder)
|
||||
|
||||
return files, folders
|
||||
|
||||
Reference in New Issue
Block a user