mirror of
https://github.com/yt-dlp/yt-dlp.git
synced 2026-03-30 09:48:33 +03:00
237 lines
7.2 KiB
Python
Executable File
237 lines
7.2 KiB
Python
Executable File
#!/usr/bin/env python3
|
|
from __future__ import annotations
|
|
|
|
import collections.abc
|
|
import contextlib
|
|
import io
|
|
import json
|
|
import hashlib
|
|
import pathlib
|
|
import urllib.request
|
|
import zipfile
|
|
|
|
|
|
TEMPLATE = '''\
|
|
# This file is generated by devscripts/update_ejs.py. DO NOT MODIFY!
|
|
|
|
VERSION = {version!r}
|
|
HASHES = {{
|
|
{hash_mapping}
|
|
}}
|
|
'''
|
|
PACKAGE_NAME = 'yt-dlp-ejs'
|
|
PREFIX = f' "{PACKAGE_NAME}=='
|
|
PYPI_ARTIFACT_NAME = PACKAGE_NAME.replace('-', '_')
|
|
BASE_PATH = pathlib.Path(__file__).parent.parent
|
|
PYPROJECT_PATH = BASE_PATH / 'pyproject.toml'
|
|
PACKAGE_PATH = BASE_PATH / 'yt_dlp/extractor/youtube/jsc/_builtin/vendor'
|
|
RELEASE_URL = 'https://api.github.com/repos/yt-dlp/ejs/releases/latest'
|
|
ASSETS = {
|
|
'yt.solver.lib.js': False,
|
|
'yt.solver.lib.min.js': False,
|
|
'yt.solver.deno.lib.js': True,
|
|
'yt.solver.bun.lib.js': True,
|
|
'yt.solver.core.min.js': False,
|
|
'yt.solver.core.js': True,
|
|
}
|
|
MAKEFILE_PATH = BASE_PATH / 'Makefile'
|
|
REQUIREMENTS_PATH = BASE_PATH / 'bundle/requirements'
|
|
|
|
|
|
def requirements_needs_update(
|
|
lines: collections.abc.Iterable[str],
|
|
package: str,
|
|
version: str,
|
|
):
|
|
identifier = f'{package}=='
|
|
for line in lines:
|
|
if line.startswith(identifier):
|
|
return not line.removeprefix(identifier).startswith(version)
|
|
|
|
return False
|
|
|
|
|
|
def requirements_update(
|
|
lines: collections.abc.Iterable[str],
|
|
package: str,
|
|
new_version: str,
|
|
new_hashes: list[str],
|
|
):
|
|
first_comment = True
|
|
current = []
|
|
for line in lines:
|
|
if not line.endswith('\n'):
|
|
line += '\n'
|
|
|
|
if first_comment:
|
|
comment_line = line.strip()
|
|
if comment_line.startswith('#'):
|
|
yield line
|
|
continue
|
|
|
|
first_comment = False
|
|
yield '# It was later updated using devscripts/update_ejs.py\n'
|
|
|
|
current.append(line)
|
|
if line.endswith('\\\n'):
|
|
# continue logical line
|
|
continue
|
|
|
|
if not current[0].startswith(f'{package}=='):
|
|
yield from current
|
|
|
|
else:
|
|
yield f'{package}=={new_version} \\\n'
|
|
for digest in new_hashes[:-1]:
|
|
yield f' --hash={digest} \\\n'
|
|
yield f' --hash={new_hashes[-1]}\n'
|
|
|
|
current.clear()
|
|
|
|
|
|
def request(url: str):
|
|
return contextlib.closing(urllib.request.urlopen(url))
|
|
|
|
|
|
def makefile_variables(
|
|
version: str | None = None,
|
|
name: str | None = None,
|
|
digest: str | None = None,
|
|
data: bytes | None = None,
|
|
keys_only: bool = False,
|
|
) -> dict[str, str | None]:
|
|
assert keys_only or all(arg is not None for arg in (version, name, digest, data))
|
|
|
|
return {
|
|
'EJS_VERSION': None if keys_only else version,
|
|
'EJS_WHEEL_NAME': None if keys_only else name,
|
|
'EJS_WHEEL_HASH': None if keys_only else digest,
|
|
'EJS_PY_FOLDERS': None if keys_only else list_wheel_contents(data, 'py', files=False),
|
|
'EJS_PY_FILES': None if keys_only else list_wheel_contents(data, 'py', folders=False),
|
|
'EJS_JS_FOLDERS': None if keys_only else list_wheel_contents(data, 'js', files=False),
|
|
'EJS_JS_FILES': None if keys_only else list_wheel_contents(data, 'js', folders=False),
|
|
}
|
|
|
|
|
|
def list_wheel_contents(
|
|
wheel_data: bytes,
|
|
suffix: str | None = None,
|
|
folders: bool = True,
|
|
files: bool = True,
|
|
) -> str:
|
|
assert folders or files, 'at least one of "folders" or "files" must be True'
|
|
|
|
with zipfile.ZipFile(io.BytesIO(wheel_data)) as zipf:
|
|
path_gen = (zinfo.filename for zinfo in zipf.infolist())
|
|
|
|
filtered = filter(lambda path: path.startswith('yt_dlp_ejs/'), path_gen)
|
|
if suffix:
|
|
filtered = filter(lambda path: path.endswith(f'.{suffix}'), filtered)
|
|
|
|
files_list = list(filtered)
|
|
if not folders:
|
|
return ' '.join(files_list)
|
|
|
|
folders_list = list(dict.fromkeys(path.rpartition('/')[0] for path in files_list))
|
|
if not files:
|
|
return ' '.join(folders_list)
|
|
|
|
return ' '.join(folders_list + files_list)
|
|
|
|
|
|
def main():
|
|
current_version = None
|
|
with PYPROJECT_PATH.open() as file:
|
|
for line in file:
|
|
if not line.startswith(PREFIX):
|
|
continue
|
|
current_version, _, _ = line.removeprefix(PREFIX).partition('"')
|
|
|
|
if not current_version:
|
|
print(f'{PACKAGE_NAME} dependency line could not be found')
|
|
return
|
|
|
|
makefile_info = makefile_variables(keys_only=True)
|
|
prefixes = tuple(f'{key} = ' for key in makefile_info)
|
|
with MAKEFILE_PATH.open() as file:
|
|
for line in file:
|
|
if not line.startswith(prefixes):
|
|
continue
|
|
key, _, val = line.partition(' = ')
|
|
makefile_info[key] = val.rstrip()
|
|
|
|
with request(RELEASE_URL) as resp:
|
|
info = json.load(resp)
|
|
|
|
version = info['tag_name']
|
|
if version == current_version:
|
|
print(f'{PACKAGE_NAME} is up to date! ({version})')
|
|
return
|
|
|
|
print(f'Updating {PACKAGE_NAME} from {current_version} to {version}')
|
|
hashes = []
|
|
requirements_hashes = []
|
|
wheel_info = {}
|
|
for asset in info['assets']:
|
|
name = asset['name']
|
|
digest = asset['digest']
|
|
|
|
# Is it the source distribution? If so, we only need its hash for the requirements files
|
|
if name == f'{PYPI_ARTIFACT_NAME}-{version}.tar.gz':
|
|
requirements_hashes.append(digest)
|
|
continue
|
|
|
|
is_wheel = name.startswith(f'{PYPI_ARTIFACT_NAME}-') and name.endswith('.whl')
|
|
if not is_wheel and name not in ASSETS:
|
|
continue
|
|
|
|
with request(asset['browser_download_url']) as resp:
|
|
data = resp.read()
|
|
|
|
# verify digest from github
|
|
algo, _, expected = digest.partition(':')
|
|
hexdigest = hashlib.new(algo, data).hexdigest()
|
|
assert hexdigest == expected, f'downloaded attest mismatch ({hexdigest!r} != {expected!r})'
|
|
|
|
if is_wheel:
|
|
requirements_hashes.append(digest)
|
|
wheel_info = makefile_variables(version, name, digest, data)
|
|
continue
|
|
|
|
# calculate sha3-512 digest
|
|
asset_hash = hashlib.sha3_512(data).hexdigest()
|
|
hashes.append(f' {name!r}: {asset_hash!r},')
|
|
|
|
if ASSETS[name]:
|
|
(PACKAGE_PATH / name).write_bytes(data)
|
|
|
|
hash_mapping = '\n'.join(hashes)
|
|
for asset_name in ASSETS:
|
|
assert asset_name in hash_mapping, f'{asset_name} not found in release'
|
|
|
|
assert all(wheel_info.get(key) for key in makefile_info), 'wheel info not found in release'
|
|
|
|
(PACKAGE_PATH / '_info.py').write_text(TEMPLATE.format(
|
|
version=version,
|
|
hash_mapping=hash_mapping,
|
|
))
|
|
|
|
content = PYPROJECT_PATH.read_text()
|
|
updated = content.replace(PREFIX + current_version, PREFIX + version)
|
|
PYPROJECT_PATH.write_text(updated)
|
|
|
|
makefile = MAKEFILE_PATH.read_text()
|
|
for key in wheel_info:
|
|
makefile = makefile.replace(f'{key} = {makefile_info[key]}', f'{key} = {wheel_info[key]}')
|
|
MAKEFILE_PATH.write_text(makefile)
|
|
|
|
for req in REQUIREMENTS_PATH.glob('requirements-*.txt'):
|
|
lines = req.read_text().splitlines(True)
|
|
if requirements_needs_update(lines, PACKAGE_NAME, version):
|
|
with req.open(mode='w') as f:
|
|
f.writelines(requirements_update(lines, PACKAGE_NAME, version, requirements_hashes))
|
|
|
|
|
|
if __name__ == '__main__':
|
|
main()
|