mirror of
https://github.com/yt-dlp/yt-dlp.git
synced 2026-03-30 01:39:23 +03:00
[build] Harden build/release workflows (#16358)
Authored by: bashonly, Grub4K Co-authored-by: Simon Sawicki <contact@grub4k.dev>
This commit is contained in:
@@ -25,8 +25,8 @@ def parse_args():
|
||||
'-e', '--exclude-dependency', metavar='DEPENDENCY', action='append',
|
||||
help='exclude a dependency (can be used multiple times)')
|
||||
parser.add_argument(
|
||||
'-i', '--include-extra', metavar='EXTRA', action='append',
|
||||
help='include an extra/optional-dependencies list (can be used multiple times)')
|
||||
'-i', '--include', '--include-extra', '--include-group', metavar='EXTRA/GROUP', action='append', dest='includes',
|
||||
help='include an extra/group (can be used multiple times)')
|
||||
parser.add_argument(
|
||||
'-c', '--cherry-pick', metavar='DEPENDENCY', action='append',
|
||||
help=(
|
||||
@@ -50,29 +50,41 @@ def uniq(arg) -> dict[str, None]:
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
project_table = parse_toml(read_file(args.input))['project']
|
||||
toml_data = parse_toml(read_file(args.input))
|
||||
project_table = toml_data['project']
|
||||
recursive_pattern = re.compile(rf'{project_table["name"]}\[(?P<extra_name>[\w-]+)\]')
|
||||
extras = project_table['optional-dependencies']
|
||||
groups = toml_data['dependency-groups']
|
||||
|
||||
excludes = uniq(args.exclude_dependency)
|
||||
only_includes = uniq(args.cherry_pick)
|
||||
include_extras = uniq(args.include_extra)
|
||||
includes = uniq(args.includes)
|
||||
|
||||
def yield_deps(extra):
|
||||
def yield_deps_from_extra(extra):
|
||||
for dep in extra:
|
||||
if mobj := recursive_pattern.fullmatch(dep):
|
||||
yield from extras.get(mobj.group('extra_name'), ())
|
||||
else:
|
||||
yield dep
|
||||
|
||||
def yield_deps_from_group(group):
|
||||
for dep in group:
|
||||
if isinstance(dep, dict):
|
||||
yield from yield_deps_from_group(groups[dep['include-group']])
|
||||
else:
|
||||
yield dep
|
||||
|
||||
targets = {}
|
||||
if not args.omit_default:
|
||||
# legacy: 'dependencies' is empty now
|
||||
targets.update(dict.fromkeys(project_table['dependencies']))
|
||||
targets.update(dict.fromkeys(yield_deps(extras['default'])))
|
||||
targets.update(dict.fromkeys(yield_deps_from_extra(extras['default'])))
|
||||
|
||||
for include in filter(None, map(extras.get, include_extras)):
|
||||
targets.update(dict.fromkeys(yield_deps(include)))
|
||||
for include in filter(None, map(extras.get, includes)):
|
||||
targets.update(dict.fromkeys(yield_deps_from_extra(include)))
|
||||
|
||||
for include in filter(None, map(groups.get, includes)):
|
||||
targets.update(dict.fromkeys(yield_deps_from_group(include)))
|
||||
|
||||
def target_filter(target):
|
||||
name = re.match(r'[\w-]+', target).group(0).lower()
|
||||
|
||||
206
devscripts/update_bundle_requirements.py
Executable file
206
devscripts/update_bundle_requirements.py
Executable file
@@ -0,0 +1,206 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import annotations
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
import contextlib
|
||||
import dataclasses
|
||||
import datetime as dt
|
||||
import itertools
|
||||
import json
|
||||
import pathlib
|
||||
import re
|
||||
import urllib.request
|
||||
|
||||
from devscripts.utils import run_process
|
||||
|
||||
|
||||
REQUIREMENTS_PATH = pathlib.Path(__file__).parent.parent / 'bundle/requirements'
|
||||
INPUT_TMPL = 'requirements-{}.in'
|
||||
OUTPUT_TMPL = 'requirements-{}.txt'
|
||||
COOLDOWN_DATE = (dt.datetime.today() - dt.timedelta(days=5)).strftime('%Y-%m-%d')
|
||||
CUSTOM_COMPILE_COMMAND = 'python -m devscripts.update_bundle_requirements'
|
||||
|
||||
LINUX_GNU_PYTHON_VERSION = '3.13'
|
||||
LINUX_MUSL_PYTHON_VERISON = '3.14'
|
||||
WINDOWS_INTEL_PYTHON_VERSION = '3.10'
|
||||
WINDOWS_ARM64_PYTHON_VERSION = '3.13'
|
||||
MACOS_PYTHON_VERSION = '3.14'
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class Target:
|
||||
platform: str
|
||||
version: str
|
||||
extras: list[str] = dataclasses.field(default_factory=list)
|
||||
groups: list[str] = dataclasses.field(default_factory=list)
|
||||
compile_args: list[str] = dataclasses.field(default_factory=list)
|
||||
|
||||
|
||||
INSTALL_DEPS_TARGETS = {
|
||||
'linux-x86_64': Target(
|
||||
platform='x86_64-manylinux2014',
|
||||
version=LINUX_GNU_PYTHON_VERSION,
|
||||
extras=['default', 'curl-cffi-compat', 'secretstorage'],
|
||||
groups=['pyinstaller'],
|
||||
),
|
||||
'linux-aarch64': Target(
|
||||
platform='aarch64-manylinux2014',
|
||||
version=LINUX_GNU_PYTHON_VERSION,
|
||||
extras=['default', 'curl-cffi-compat', 'secretstorage'],
|
||||
groups=['pyinstaller'],
|
||||
),
|
||||
'linux-armv7l': Target(
|
||||
platform='linux',
|
||||
version=LINUX_GNU_PYTHON_VERSION,
|
||||
extras=['default', 'curl-cffi', 'secretstorage'],
|
||||
groups=['pyinstaller'],
|
||||
),
|
||||
'musllinux-x86_64': Target(
|
||||
platform='x86_64-unknown-linux-musl',
|
||||
version=LINUX_MUSL_PYTHON_VERISON,
|
||||
extras=['default', 'curl-cffi', 'secretstorage'],
|
||||
groups=['pyinstaller'],
|
||||
),
|
||||
'musllinux-aarch64': Target(
|
||||
platform='aarch64-unknown-linux-musl',
|
||||
version=LINUX_MUSL_PYTHON_VERISON,
|
||||
extras=['default', 'secretstorage'],
|
||||
groups=['pyinstaller'],
|
||||
),
|
||||
'win-x64': Target(
|
||||
platform='x86_64-pc-windows-msvc',
|
||||
version=WINDOWS_INTEL_PYTHON_VERSION,
|
||||
extras=['default', 'curl-cffi'],
|
||||
),
|
||||
'win-x86': Target(
|
||||
platform='i686-pc-windows-msvc',
|
||||
version=WINDOWS_INTEL_PYTHON_VERSION,
|
||||
extras=['default'],
|
||||
),
|
||||
'win-arm64': Target(
|
||||
platform='aarch64-pc-windows-msvc',
|
||||
version=WINDOWS_ARM64_PYTHON_VERSION,
|
||||
extras=['default', 'curl-cffi'],
|
||||
),
|
||||
'macos': Target(
|
||||
platform='macos',
|
||||
version=MACOS_PYTHON_VERSION,
|
||||
extras=['default', 'curl-cffi-compat'],
|
||||
# NB: Resolve delocate and PyInstaller together since they share dependencies
|
||||
groups=['delocate', 'pyinstaller'],
|
||||
# curl-cffi and cffi don't provide universal2 wheels, so only directly install their deps
|
||||
# NB: uv's --no-emit-package option is equivalent to pip-compile's --unsafe-package option
|
||||
compile_args=['--no-emit-package', 'curl-cffi', '--no-emit-package', 'cffi'],
|
||||
),
|
||||
# We fuse our own universal2 wheels for curl-cffi+cffi, so we need a separate requirements file
|
||||
'macos-curl_cffi': Target(
|
||||
platform='macos',
|
||||
version=MACOS_PYTHON_VERSION,
|
||||
extras=['curl-cffi-compat'],
|
||||
# Only need curl-cffi+cffi in this requirements file; their deps are installed directly
|
||||
compile_args=['--no-emit-package', 'certifi', '--no-emit-package', 'pycparser'],
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class PyInstallerTarget:
|
||||
platform: str
|
||||
version: str
|
||||
asset_tag: str
|
||||
|
||||
|
||||
PYINSTALLER_BUILDS_TARGETS = {
|
||||
'win-x64-pyinstaller': PyInstallerTarget(
|
||||
platform='x86_64-pc-windows-msvc',
|
||||
version=WINDOWS_INTEL_PYTHON_VERSION,
|
||||
asset_tag='win_amd64',
|
||||
),
|
||||
'win-x86-pyinstaller': PyInstallerTarget(
|
||||
platform='i686-pc-windows-msvc',
|
||||
version=WINDOWS_INTEL_PYTHON_VERSION,
|
||||
asset_tag='win32',
|
||||
),
|
||||
'win-arm64-pyinstaller': PyInstallerTarget(
|
||||
platform='aarch64-pc-windows-msvc',
|
||||
version=WINDOWS_ARM64_PYTHON_VERSION,
|
||||
asset_tag='win_arm64',
|
||||
),
|
||||
}
|
||||
|
||||
PYINSTALLER_BUILDS_URL = 'https://api.github.com/repos/yt-dlp/Pyinstaller-Builds/releases/latest'
|
||||
|
||||
PYINSTALLER_BUILDS_TMPL = '''\
|
||||
{}pyinstaller@{} \\
|
||||
--hash={}
|
||||
'''
|
||||
|
||||
PYINSTALLER_VERSION_RE = re.compile(r'pyinstaller-(?P<version>[0-9]+\.[0-9]+\.[0-9]+)-')
|
||||
|
||||
|
||||
def write_requirements_input(filepath: pathlib.Path, *args: str) -> None:
|
||||
filepath.write_text(run_process(
|
||||
sys.executable, '-m', 'devscripts.install_deps',
|
||||
'--omit-default', '--print', *args).stdout)
|
||||
|
||||
|
||||
def run_pip_compile(python_platform: str, python_version: str, requirements_input_path: pathlib.Path, *args: str) -> str:
|
||||
return run_process(
|
||||
'uv', 'pip', 'compile',
|
||||
'--upgrade',
|
||||
f'--exclude-newer={COOLDOWN_DATE}',
|
||||
f'--python-platform={python_platform}',
|
||||
f'--python-version={python_version}',
|
||||
'--generate-hashes',
|
||||
'--no-strip-markers',
|
||||
f'--custom-compile-command={CUSTOM_COMPILE_COMMAND}',
|
||||
str(requirements_input_path),
|
||||
*args)
|
||||
|
||||
|
||||
def main():
|
||||
with contextlib.closing(urllib.request.urlopen(PYINSTALLER_BUILDS_URL)) as resp:
|
||||
info = json.load(resp)
|
||||
|
||||
for target_suffix, target in PYINSTALLER_BUILDS_TARGETS.items():
|
||||
asset_info = next(asset for asset in info['assets'] if target.asset_tag in asset['name'])
|
||||
pyinstaller_version = PYINSTALLER_VERSION_RE.match(asset_info['name']).group('version')
|
||||
base_requirements_path = REQUIREMENTS_PATH / INPUT_TMPL.format(target_suffix)
|
||||
base_requirements_path.write_text(f'pyinstaller=={pyinstaller_version}\n')
|
||||
pyinstaller_builds_deps = run_pip_compile(
|
||||
target.platform, target.version, base_requirements_path,
|
||||
'--color=never', '--no-emit-package=pyinstaller').stdout
|
||||
requirements_path = REQUIREMENTS_PATH / OUTPUT_TMPL.format(target_suffix)
|
||||
requirements_path.write_text(PYINSTALLER_BUILDS_TMPL.format(
|
||||
pyinstaller_builds_deps, asset_info['browser_download_url'], asset_info['digest']))
|
||||
|
||||
for target_suffix, target in INSTALL_DEPS_TARGETS.items():
|
||||
requirements_input_path = REQUIREMENTS_PATH / INPUT_TMPL.format(target_suffix)
|
||||
write_requirements_input(
|
||||
requirements_input_path,
|
||||
*itertools.chain.from_iterable(itertools.product(['--include-extra'], target.extras)),
|
||||
*itertools.chain.from_iterable(itertools.product(['--include-group'], target.groups)))
|
||||
run_pip_compile(
|
||||
target.platform, target.version, requirements_input_path, *target.compile_args,
|
||||
f'--output-file={REQUIREMENTS_PATH / OUTPUT_TMPL.format(target_suffix)}')
|
||||
|
||||
pypi_input_path = REQUIREMENTS_PATH / INPUT_TMPL.format('pypi-build')
|
||||
write_requirements_input(pypi_input_path, '--include-group', 'build')
|
||||
run_pip_compile(
|
||||
'linux', LINUX_GNU_PYTHON_VERSION, pypi_input_path,
|
||||
f'--output-file={REQUIREMENTS_PATH / OUTPUT_TMPL.format("pypi-build")}')
|
||||
|
||||
pip_input_path = REQUIREMENTS_PATH / INPUT_TMPL.format('pip')
|
||||
write_requirements_input(pip_input_path, '--include-group', 'build', '--cherry-pick', 'pip')
|
||||
run_pip_compile(
|
||||
'windows', WINDOWS_INTEL_PYTHON_VERSION, pip_input_path,
|
||||
f'--output-file={REQUIREMENTS_PATH / OUTPUT_TMPL.format("pip")}')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
Reference in New Issue
Block a user