[cleanup] Misc

This commit is contained in:
pukkandan 2023-03-04 22:40:08 +05:30
parent eb8fd6d044
commit 392389b7df
No known key found for this signature in database
GPG key ID: 7EEE9E1E817D0A39
8 changed files with 46 additions and 38 deletions

View file

@ -192,9 +192,8 @@ ## UPDATE
<a id="update-channels"/>
There are currently two release channels for binaries, `stable` and `nightly`.
`stable` releases are what the program will update to by default, and have had many of their changes tested by users of the master branch.
`nightly` releases are built after each push to the master branch, and will have the most recent fixes and additions, but also have the potential for bugs.
The latest `nightly` is available as a [pre-release from this repository](https://github.com/yt-dlp/yt-dlp/releases/tag/nightly), and all `nightly` releases are [archived in their own repo](https://github.com/yt-dlp/yt-dlp-nightly-builds/releases).
`stable` is the default channel, and many of its changes have been tested by users of the nightly channel.
The `nightly` channel has releases built after each push to the master branch, and will have the most recent fixes and additions, but also have more risk of regressions. They are available in [their own repo](https://github.com/yt-dlp/yt-dlp-nightly-builds/releases).
When using `--update`/`-U`, a release binary will only update to its current channel.
This release channel can be changed by using the `--update-to` option. `--update-to` can also be used to upgrade or downgrade to specific tags from a channel.

View file

@ -1,19 +1,26 @@
from __future__ import annotations
# Allow direct execution
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import enum
import itertools
import json
import logging
import re
import subprocess
import sys
from collections import defaultdict
from dataclasses import dataclass
from functools import lru_cache
from pathlib import Path
from devscripts.utils import read_file, run_process, write_file
BASE_URL = 'https://github.com'
LOCATION_PATH = Path(__file__).parent
HASH_LENGTH = 7
logger = logging.getLogger(__name__)
@ -82,7 +89,7 @@ def __str__(self):
result = f'{self.short!r}'
if self.hash:
result += f' ({self.hash[:7]})'
result += f' ({self.hash[:HASH_LENGTH]})'
if self.authors:
authors = ', '.join(self.authors)
@ -208,7 +215,7 @@ def format_single_change(self, info):
def _format_message_link(self, message, hash):
assert message or hash, 'Improperly defined commit message or override'
message = message if message else hash[:7]
message = message if message else hash[:HASH_LENGTH]
return f'[{message}]({self.repo_url}/commit/{hash})' if hash else message
def _format_issues(self, issues):
@ -242,9 +249,8 @@ class CommitRange:
FIXES_RE = re.compile(r'(?i:Fix(?:es)?(?:\s+bugs?)?(?:\s+in|\s+for)?|Revert)\s+([\da-f]{40})')
UPSTREAM_MERGE_RE = re.compile(r'Update to ytdl-commit-([\da-f]+)')
def __init__(self, start, end, default_author=None) -> None:
self._start = start
self._end = end
def __init__(self, start, end, default_author=None):
self._start, self._end = start, end
self._commits, self._fixes = self._get_commits_and_fixes(default_author)
self._commits_added = []
@ -262,14 +268,10 @@ def __contains__(self, commit):
return commit in self._commits
def _is_ancestor(self, commitish):
return bool(subprocess.call(
[self.COMMAND, 'merge-base', '--is-ancestor', commitish, self._start]))
def _get_commits_and_fixes(self, default_author):
result = subprocess.check_output([
result = run_process(
self.COMMAND, 'log', f'--format=%H%n%s%n%b%n{self.COMMIT_SEPARATOR}',
f'{self._start}..{self._end}' if self._start else self._end], text=True)
f'{self._start}..{self._end}' if self._start else self._end).stdout
commits = {}
fixes = defaultdict(list)
@ -301,12 +303,12 @@ def _get_commits_and_fixes(self, default_author):
for commitish, fix_commits in fixes.items():
if commitish in commits:
hashes = ', '.join(commit.hash[:7] for commit in fix_commits)
logger.info(f'Found fix(es) for {commitish[:7]}: {hashes}')
hashes = ', '.join(commit.hash[:HASH_LENGTH] for commit in fix_commits)
logger.info(f'Found fix(es) for {commitish[:HASH_LENGTH]}: {hashes}')
for fix_commit in fix_commits:
del commits[fix_commit.hash]
else:
logger.debug(f'Commit with fixes not in changes: {commitish[:7]}')
logger.debug(f'Commit with fixes not in changes: {commitish[:HASH_LENGTH]}')
return commits, fixes
@ -397,11 +399,10 @@ def groups(self):
def get_new_contributors(contributors_path, commits):
contributors = set()
if contributors_path.exists():
with contributors_path.open() as file:
for line in filter(None, map(str.strip, file)):
author, _, _ = line.partition(' (')
authors = author.split('/')
contributors.update(map(str.casefold, authors))
for line in read_file(contributors_path).splitlines():
author, _, _ = line.strip().partition(' (')
authors = author.split('/')
contributors.update(map(str.casefold, authors))
new_contributors = set()
for commit in commits:
@ -453,8 +454,7 @@ def get_new_contributors(contributors_path, commits):
if not args.no_override:
if args.override_path.exists():
with args.override_path.open() as file:
overrides = json.load(file)
overrides = json.loads(read_file(args.override_path))
commits.apply_overrides(overrides)
else:
logger.warning(f'File {args.override_path.as_posix()} does not exist')
@ -464,8 +464,7 @@ def get_new_contributors(contributors_path, commits):
new_contributors = get_new_contributors(args.contributors_path, commits)
if new_contributors:
if args.contributors:
with args.contributors_path.open('a') as file:
file.writelines(f'{contributor}\n' for contributor in new_contributors)
write_file(args.contributors_path, '\n'.join(new_contributors) + '\n', mode='a')
logger.info(f'New contributors: {", ".join(new_contributors)}')
print(Changelog(commits.groups(), args.repo))

View file

@ -9,11 +9,10 @@
import argparse
import contextlib
import subprocess
import sys
from datetime import datetime
from devscripts.utils import read_version, write_file
from devscripts.utils import read_version, run_process, write_file
def get_new_version(version, revision):
@ -32,7 +31,7 @@ def get_new_version(version, revision):
def get_git_head():
with contextlib.suppress(Exception):
return subprocess.check_output(['git', 'rev-parse', 'HEAD'], text=True).strip() or None
return run_process('git', 'rev-parse', 'HEAD').stdout.strip()
VERSION_TEMPLATE = '''\

View file

@ -1,5 +1,6 @@
import argparse
import functools
import subprocess
def read_file(fname):
@ -12,8 +13,8 @@ def write_file(fname, content, mode='w'):
return f.write(content)
# Get the version without importing the package
def read_version(fname='yt_dlp/version.py'):
"""Get the version without importing the package"""
exec(compile(read_file(fname), fname, 'exec'))
return locals()['__version__']
@ -33,3 +34,13 @@ def get_filename_args(has_infile=False, default_outfile=None):
def compose_functions(*functions):
return lambda x: functools.reduce(lambda y, f: f(y), functions, x)
def run_process(*args, **kwargs):
kwargs.setdefault('text', True)
kwargs.setdefault('check', True)
kwargs.setdefault('capture_output', True)
if kwargs['text']:
kwargs.setdefault('encoding', 'utf-8')
kwargs.setdefault('errors', 'replace')
return subprocess.run(args, **kwargs)

View file

@ -3784,7 +3784,7 @@ def get_encoding(stream):
klass = type(self)
write_debug(join_nonempty(
f'{"yt-dlp" if REPOSITORY == "yt-dlp/yt-dlp" else REPOSITORY} version',
__version__ + {'stable': '', 'nightly': '*'}.get(CHANNEL, f' <{CHANNEL}>'),
f'{CHANNEL}@{__version__}',
f'[{RELEASE_GIT_HEAD[:9]}]' if RELEASE_GIT_HEAD else '',
'' if source == 'unknown' else f'({source})',
'' if _IN_CLI else 'API' if klass == YoutubeDL else f'API:{self.__module__}.{klass.__qualname__}',

View file

@ -130,7 +130,7 @@ def _real_extract(self, url):
'channel_url': 'channel_url', 'was_live': 'was_live'}),
**self._process_video(info['id'], video),
} for i, video in enumerate(parts, 1)]
print(entries[0]['duration'])
return {
'_type': 'multi_video',
**info,

View file

@ -3717,10 +3717,10 @@ def _extract_formats_and_subtitles(self, streaming_data, video_id, player_url, l
'filesize': int_or_none(fmt.get('contentLength')),
'format_id': f'{itag}{"-drc" if fmt.get("isDrc") else ""}',
'format_note': join_nonempty(
'%s%s' % (audio_track.get('displayName') or '',
' (default)' if language_preference > 0 else ''),
join_nonempty(audio_track.get('displayName'),
language_preference > 0 and ' (default)', delim=''),
fmt.get('qualityLabel') or quality.replace('audio_quality_', ''),
'DRC' if fmt.get('isDrc') else None,
fmt.get('isDrc') and 'DRC',
try_get(fmt, lambda x: x['projectionType'].replace('RECTANGULAR', '').lower()),
try_get(fmt, lambda x: x['spatialAudioType'].replace('SPATIAL_AUDIO_TYPE_', '').lower()),
throttled and 'THROTTLED', is_damaged and 'DAMAGED', delim=', '),

View file

@ -29,13 +29,13 @@
'stable': 'yt-dlp/yt-dlp',
'nightly': 'yt-dlp/yt-dlp-nightly-builds',
}
REPOSITORY = UPDATE_SOURCES['stable']
_VERSION_RE = re.compile(r'(\d+\.)*\d+')
API_BASE_URL = 'https://api.github.com/repos'
# Backwards compatibility variables for the current channel
REPOSITORY = UPDATE_SOURCES[CHANNEL]
API_URL = f'{API_BASE_URL}/{REPOSITORY}/releases'