2023-03-03 18:01:41 +01:00
|
|
|
from __future__ import annotations
|
|
|
|
|
2023-03-04 18:10:08 +01:00
|
|
|
# Allow direct execution
|
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
|
|
|
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
|
|
|
2023-03-03 18:01:41 +01:00
|
|
|
import enum
|
|
|
|
import itertools
|
|
|
|
import json
|
|
|
|
import logging
|
|
|
|
import re
|
|
|
|
from collections import defaultdict
|
|
|
|
from dataclasses import dataclass
|
|
|
|
from functools import lru_cache
|
|
|
|
from pathlib import Path
|
|
|
|
|
2023-03-04 18:10:08 +01:00
|
|
|
from devscripts.utils import read_file, run_process, write_file
|
|
|
|
|
2023-03-03 18:01:41 +01:00
|
|
|
BASE_URL = 'https://github.com'
|
|
|
|
LOCATION_PATH = Path(__file__).parent
|
2023-03-04 18:10:08 +01:00
|
|
|
HASH_LENGTH = 7
|
2023-03-03 18:01:41 +01:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
class CommitGroup(enum.Enum):
|
|
|
|
PRIORITY = 'Important'
|
|
|
|
CORE = 'Core'
|
|
|
|
EXTRACTOR = 'Extractor'
|
|
|
|
DOWNLOADER = 'Downloader'
|
|
|
|
POSTPROCESSOR = 'Postprocessor'
|
2023-09-17 13:22:04 +02:00
|
|
|
NETWORKING = 'Networking'
|
2023-03-03 18:01:41 +01:00
|
|
|
MISC = 'Misc.'
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
@lru_cache
|
2023-09-17 13:22:04 +02:00
|
|
|
def subgroup_lookup(cls):
|
2023-03-03 18:01:41 +01:00
|
|
|
return {
|
|
|
|
name: group
|
|
|
|
for group, names in {
|
|
|
|
cls.MISC: {
|
|
|
|
'build',
|
2023-10-14 01:02:06 +02:00
|
|
|
'ci',
|
2023-03-03 18:01:41 +01:00
|
|
|
'cleanup',
|
|
|
|
'devscripts',
|
|
|
|
'docs',
|
|
|
|
'test',
|
|
|
|
},
|
2023-09-17 13:22:04 +02:00
|
|
|
cls.NETWORKING: {
|
|
|
|
'rh',
|
|
|
|
},
|
2023-03-03 18:01:41 +01:00
|
|
|
}.items()
|
|
|
|
for name in names
|
|
|
|
}
|
|
|
|
|
|
|
|
@classmethod
|
2023-09-17 13:22:04 +02:00
|
|
|
@lru_cache
|
|
|
|
def group_lookup(cls):
|
|
|
|
result = {
|
|
|
|
'fd': cls.DOWNLOADER,
|
|
|
|
'ie': cls.EXTRACTOR,
|
|
|
|
'pp': cls.POSTPROCESSOR,
|
|
|
|
'upstream': cls.CORE,
|
|
|
|
}
|
|
|
|
result.update({item.name.lower(): item for item in iter(cls)})
|
2023-03-03 18:01:41 +01:00
|
|
|
return result
|
|
|
|
|
2023-09-17 13:22:04 +02:00
|
|
|
@classmethod
|
|
|
|
def get(cls, value: str) -> tuple[CommitGroup | None, str | None]:
|
|
|
|
group, _, subgroup = (group.strip().lower() for group in value.partition('/'))
|
|
|
|
|
|
|
|
result = cls.group_lookup().get(group)
|
|
|
|
if not result:
|
|
|
|
if subgroup:
|
|
|
|
return None, value
|
|
|
|
subgroup = group
|
|
|
|
result = cls.subgroup_lookup().get(subgroup)
|
|
|
|
|
|
|
|
return result, subgroup or None
|
|
|
|
|
2023-03-03 18:01:41 +01:00
|
|
|
|
|
|
|
@dataclass
|
|
|
|
class Commit:
|
|
|
|
hash: str | None
|
|
|
|
short: str
|
|
|
|
authors: list[str]
|
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
result = f'{self.short!r}'
|
|
|
|
|
|
|
|
if self.hash:
|
2023-03-04 18:10:08 +01:00
|
|
|
result += f' ({self.hash[:HASH_LENGTH]})'
|
2023-03-03 18:01:41 +01:00
|
|
|
|
|
|
|
if self.authors:
|
|
|
|
authors = ', '.join(self.authors)
|
|
|
|
result += f' by {authors}'
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
|
|
|
@dataclass
|
|
|
|
class CommitInfo:
|
|
|
|
details: str | None
|
|
|
|
sub_details: tuple[str, ...]
|
|
|
|
message: str
|
|
|
|
issues: list[str]
|
|
|
|
commit: Commit
|
|
|
|
fixes: list[Commit]
|
|
|
|
|
|
|
|
def key(self):
|
|
|
|
return ((self.details or '').lower(), self.sub_details, self.message)
|
|
|
|
|
|
|
|
|
2023-04-03 07:22:11 +02:00
|
|
|
def unique(items):
|
|
|
|
return sorted({item.strip().lower(): item for item in items if item}.values())
|
|
|
|
|
|
|
|
|
2023-03-03 18:01:41 +01:00
|
|
|
class Changelog:
|
|
|
|
MISC_RE = re.compile(r'(?:^|\b)(?:lint(?:ing)?|misc|format(?:ting)?|fixes)(?:\b|$)', re.IGNORECASE)
|
2023-04-03 07:22:11 +02:00
|
|
|
ALWAYS_SHOWN = (CommitGroup.PRIORITY,)
|
2023-03-03 18:01:41 +01:00
|
|
|
|
2023-04-03 07:22:11 +02:00
|
|
|
def __init__(self, groups, repo, collapsible=False):
|
2023-03-03 18:01:41 +01:00
|
|
|
self._groups = groups
|
|
|
|
self._repo = repo
|
2023-04-03 07:22:11 +02:00
|
|
|
self._collapsible = collapsible
|
2023-03-03 18:01:41 +01:00
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
return '\n'.join(self._format_groups(self._groups)).replace('\t', ' ')
|
|
|
|
|
|
|
|
def _format_groups(self, groups):
|
2023-04-03 07:22:11 +02:00
|
|
|
first = True
|
2023-03-03 18:01:41 +01:00
|
|
|
for item in CommitGroup:
|
2023-04-03 07:22:11 +02:00
|
|
|
if self._collapsible and item not in self.ALWAYS_SHOWN and first:
|
|
|
|
first = False
|
|
|
|
yield '\n<details><summary><h3>Changelog</h3></summary>\n'
|
|
|
|
|
2023-03-03 18:01:41 +01:00
|
|
|
group = groups[item]
|
|
|
|
if group:
|
|
|
|
yield self.format_module(item.value, group)
|
|
|
|
|
2023-04-03 07:22:11 +02:00
|
|
|
if self._collapsible:
|
|
|
|
yield '\n</details>'
|
|
|
|
|
2023-03-03 18:01:41 +01:00
|
|
|
def format_module(self, name, group):
|
|
|
|
result = f'\n#### {name} changes\n' if name else '\n'
|
|
|
|
return result + '\n'.join(self._format_group(group))
|
|
|
|
|
|
|
|
def _format_group(self, group):
|
|
|
|
sorted_group = sorted(group, key=CommitInfo.key)
|
|
|
|
detail_groups = itertools.groupby(sorted_group, lambda item: (item.details or '').lower())
|
2023-03-03 22:39:09 +01:00
|
|
|
for _, items in detail_groups:
|
|
|
|
items = list(items)
|
|
|
|
details = items[0].details
|
2023-03-03 18:01:41 +01:00
|
|
|
|
|
|
|
if details == 'cleanup':
|
2023-04-03 07:22:11 +02:00
|
|
|
items = self._prepare_cleanup_misc_items(items)
|
|
|
|
|
|
|
|
prefix = '-'
|
|
|
|
if details:
|
|
|
|
if len(items) == 1:
|
|
|
|
prefix = f'- **{details}**:'
|
|
|
|
else:
|
|
|
|
yield f'- **{details}**'
|
|
|
|
prefix = '\t-'
|
2023-03-03 18:01:41 +01:00
|
|
|
|
2023-03-03 22:39:09 +01:00
|
|
|
sub_detail_groups = itertools.groupby(items, lambda item: tuple(map(str.lower, item.sub_details)))
|
2023-03-03 18:01:41 +01:00
|
|
|
for sub_details, entries in sub_detail_groups:
|
|
|
|
if not sub_details:
|
|
|
|
for entry in entries:
|
2023-04-03 07:22:11 +02:00
|
|
|
yield f'{prefix} {self.format_single_change(entry)}'
|
2023-03-03 18:01:41 +01:00
|
|
|
continue
|
|
|
|
|
|
|
|
entries = list(entries)
|
2023-04-03 07:22:11 +02:00
|
|
|
sub_prefix = f'{prefix} {", ".join(entries[0].sub_details)}'
|
2023-03-03 18:01:41 +01:00
|
|
|
if len(entries) == 1:
|
2023-04-03 07:22:11 +02:00
|
|
|
yield f'{sub_prefix}: {self.format_single_change(entries[0])}'
|
2023-03-03 18:01:41 +01:00
|
|
|
continue
|
|
|
|
|
2023-04-03 07:22:11 +02:00
|
|
|
yield sub_prefix
|
2023-03-03 18:01:41 +01:00
|
|
|
for entry in entries:
|
2023-04-03 07:22:11 +02:00
|
|
|
yield f'\t{prefix} {self.format_single_change(entry)}'
|
2023-03-03 18:01:41 +01:00
|
|
|
|
2023-04-03 07:22:11 +02:00
|
|
|
def _prepare_cleanup_misc_items(self, items):
|
2023-03-03 18:01:41 +01:00
|
|
|
cleanup_misc_items = defaultdict(list)
|
2023-04-03 07:22:11 +02:00
|
|
|
sorted_items = []
|
2023-03-03 18:01:41 +01:00
|
|
|
for item in items:
|
|
|
|
if self.MISC_RE.search(item.message):
|
|
|
|
cleanup_misc_items[tuple(item.commit.authors)].append(item)
|
|
|
|
else:
|
2023-04-03 07:22:11 +02:00
|
|
|
sorted_items.append(item)
|
2023-03-03 18:01:41 +01:00
|
|
|
|
2023-04-03 07:22:11 +02:00
|
|
|
for commit_infos in cleanup_misc_items.values():
|
|
|
|
sorted_items.append(CommitInfo(
|
|
|
|
'cleanup', ('Miscellaneous',), ', '.join(
|
2023-09-17 13:22:04 +02:00
|
|
|
self._format_message_link(None, info.commit.hash)
|
2023-04-03 07:22:11 +02:00
|
|
|
for info in sorted(commit_infos, key=lambda item: item.commit.hash or '')),
|
|
|
|
[], Commit(None, '', commit_infos[0].commit.authors), []))
|
2023-03-03 18:01:41 +01:00
|
|
|
|
2023-04-03 07:22:11 +02:00
|
|
|
return sorted_items
|
2023-03-03 18:01:41 +01:00
|
|
|
|
2023-09-17 13:22:04 +02:00
|
|
|
def format_single_change(self, info: CommitInfo):
|
|
|
|
message, sep, rest = info.message.partition('\n')
|
|
|
|
if '[' not in message:
|
|
|
|
# If the message doesn't already contain markdown links, try to add a link to the commit
|
|
|
|
message = self._format_message_link(message, info.commit.hash)
|
|
|
|
|
2023-03-03 18:01:41 +01:00
|
|
|
if info.issues:
|
2023-09-17 13:22:04 +02:00
|
|
|
message = f'{message} ({self._format_issues(info.issues)})'
|
2023-03-03 18:01:41 +01:00
|
|
|
|
|
|
|
if info.commit.authors:
|
2023-09-17 13:22:04 +02:00
|
|
|
message = f'{message} by {self._format_authors(info.commit.authors)}'
|
2023-03-03 18:01:41 +01:00
|
|
|
|
|
|
|
if info.fixes:
|
|
|
|
fix_message = ', '.join(f'{self._format_message_link(None, fix.hash)}' for fix in info.fixes)
|
|
|
|
|
|
|
|
authors = sorted({author for fix in info.fixes for author in fix.authors}, key=str.casefold)
|
|
|
|
if authors != info.commit.authors:
|
|
|
|
fix_message = f'{fix_message} by {self._format_authors(authors)}'
|
|
|
|
|
2023-09-17 13:22:04 +02:00
|
|
|
message = f'{message} (With fixes in {fix_message})'
|
2023-03-03 18:01:41 +01:00
|
|
|
|
2023-09-17 13:22:04 +02:00
|
|
|
return message if not sep else f'{message}{sep}{rest}'
|
2023-03-03 18:01:41 +01:00
|
|
|
|
|
|
|
def _format_message_link(self, message, hash):
|
|
|
|
assert message or hash, 'Improperly defined commit message or override'
|
2023-03-04 18:10:08 +01:00
|
|
|
message = message if message else hash[:HASH_LENGTH]
|
2023-09-17 13:22:04 +02:00
|
|
|
return f'[{message}]({self.repo_url}/commit/{hash})' if hash else message
|
2023-03-03 18:01:41 +01:00
|
|
|
|
|
|
|
def _format_issues(self, issues):
|
|
|
|
return ', '.join(f'[#{issue}]({self.repo_url}/issues/{issue})' for issue in issues)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def _format_authors(authors):
|
|
|
|
return ', '.join(f'[{author}]({BASE_URL}/{author})' for author in authors)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def repo_url(self):
|
|
|
|
return f'{BASE_URL}/{self._repo}'
|
|
|
|
|
|
|
|
|
|
|
|
class CommitRange:
|
|
|
|
COMMAND = 'git'
|
|
|
|
COMMIT_SEPARATOR = '-----'
|
|
|
|
|
|
|
|
AUTHOR_INDICATOR_RE = re.compile(r'Authored by:? ', re.IGNORECASE)
|
|
|
|
MESSAGE_RE = re.compile(r'''
|
2023-04-03 07:22:11 +02:00
|
|
|
(?:\[(?P<prefix>[^\]]+)\]\ )?
|
2023-09-23 22:00:31 +02:00
|
|
|
(?:(?P<sub_details>`?[\w.-]+`?): )?
|
2023-03-03 18:01:41 +01:00
|
|
|
(?P<message>.+?)
|
|
|
|
(?:\ \((?P<issues>\#\d+(?:,\ \#\d+)*)\))?
|
|
|
|
''', re.VERBOSE | re.DOTALL)
|
|
|
|
EXTRACTOR_INDICATOR_RE = re.compile(r'(?:Fix|Add)\s+Extractors?', re.IGNORECASE)
|
2023-07-22 05:38:12 +02:00
|
|
|
REVERT_RE = re.compile(r'(?:\[[^\]]+\]\s+)?(?i:Revert)\s+([\da-f]{40})')
|
2023-03-03 22:39:09 +01:00
|
|
|
FIXES_RE = re.compile(r'(?i:Fix(?:es)?(?:\s+bugs?)?(?:\s+in|\s+for)?|Revert)\s+([\da-f]{40})')
|
2023-03-03 18:01:41 +01:00
|
|
|
UPSTREAM_MERGE_RE = re.compile(r'Update to ytdl-commit-([\da-f]+)')
|
|
|
|
|
2023-03-04 18:10:08 +01:00
|
|
|
def __init__(self, start, end, default_author=None):
|
|
|
|
self._start, self._end = start, end
|
2023-03-03 18:01:41 +01:00
|
|
|
self._commits, self._fixes = self._get_commits_and_fixes(default_author)
|
|
|
|
self._commits_added = []
|
|
|
|
|
|
|
|
def __iter__(self):
|
|
|
|
return iter(itertools.chain(self._commits.values(), self._commits_added))
|
|
|
|
|
|
|
|
def __len__(self):
|
|
|
|
return len(self._commits) + len(self._commits_added)
|
|
|
|
|
|
|
|
def __contains__(self, commit):
|
|
|
|
if isinstance(commit, Commit):
|
|
|
|
if not commit.hash:
|
|
|
|
return False
|
|
|
|
commit = commit.hash
|
|
|
|
|
|
|
|
return commit in self._commits
|
|
|
|
|
|
|
|
def _get_commits_and_fixes(self, default_author):
|
2023-03-04 18:10:08 +01:00
|
|
|
result = run_process(
|
2023-03-03 18:01:41 +01:00
|
|
|
self.COMMAND, 'log', f'--format=%H%n%s%n%b%n{self.COMMIT_SEPARATOR}',
|
2023-03-04 18:10:08 +01:00
|
|
|
f'{self._start}..{self._end}' if self._start else self._end).stdout
|
2023-03-03 18:01:41 +01:00
|
|
|
|
2023-07-06 14:04:51 +02:00
|
|
|
commits, reverts = {}, {}
|
2023-03-03 18:01:41 +01:00
|
|
|
fixes = defaultdict(list)
|
|
|
|
lines = iter(result.splitlines(False))
|
2023-03-04 12:09:23 +01:00
|
|
|
for i, commit_hash in enumerate(lines):
|
2023-03-03 18:01:41 +01:00
|
|
|
short = next(lines)
|
|
|
|
skip = short.startswith('Release ') or short == '[version] update'
|
|
|
|
|
|
|
|
authors = [default_author] if default_author else []
|
|
|
|
for line in iter(lambda: next(lines), self.COMMIT_SEPARATOR):
|
|
|
|
match = self.AUTHOR_INDICATOR_RE.match(line)
|
|
|
|
if match:
|
|
|
|
authors = sorted(map(str.strip, line[match.end():].split(',')), key=str.casefold)
|
|
|
|
|
|
|
|
commit = Commit(commit_hash, short, authors)
|
2023-03-04 12:09:23 +01:00
|
|
|
if skip and (self._start or not i):
|
2023-03-03 18:01:41 +01:00
|
|
|
logger.debug(f'Skipped commit: {commit}')
|
|
|
|
continue
|
2023-03-04 12:09:23 +01:00
|
|
|
elif skip:
|
|
|
|
logger.debug(f'Reached Release commit, breaking: {commit}')
|
|
|
|
break
|
2023-03-03 18:01:41 +01:00
|
|
|
|
2023-07-06 14:04:51 +02:00
|
|
|
revert_match = self.REVERT_RE.fullmatch(commit.short)
|
|
|
|
if revert_match:
|
|
|
|
reverts[revert_match.group(1)] = commit
|
|
|
|
continue
|
|
|
|
|
2023-03-03 18:01:41 +01:00
|
|
|
fix_match = self.FIXES_RE.search(commit.short)
|
|
|
|
if fix_match:
|
|
|
|
commitish = fix_match.group(1)
|
|
|
|
fixes[commitish].append(commit)
|
|
|
|
|
|
|
|
commits[commit.hash] = commit
|
|
|
|
|
2023-07-06 14:04:51 +02:00
|
|
|
for commitish, revert_commit in reverts.items():
|
|
|
|
reverted = commits.pop(commitish, None)
|
|
|
|
if reverted:
|
2023-09-17 13:22:04 +02:00
|
|
|
logger.debug(f'{commitish} fully reverted {reverted}')
|
2023-07-06 14:04:51 +02:00
|
|
|
else:
|
|
|
|
commits[revert_commit.hash] = revert_commit
|
|
|
|
|
2023-03-03 18:01:41 +01:00
|
|
|
for commitish, fix_commits in fixes.items():
|
|
|
|
if commitish in commits:
|
2023-03-04 18:10:08 +01:00
|
|
|
hashes = ', '.join(commit.hash[:HASH_LENGTH] for commit in fix_commits)
|
|
|
|
logger.info(f'Found fix(es) for {commitish[:HASH_LENGTH]}: {hashes}')
|
2023-03-03 18:01:41 +01:00
|
|
|
for fix_commit in fix_commits:
|
|
|
|
del commits[fix_commit.hash]
|
|
|
|
else:
|
2023-03-04 18:10:08 +01:00
|
|
|
logger.debug(f'Commit with fixes not in changes: {commitish[:HASH_LENGTH]}')
|
2023-03-03 18:01:41 +01:00
|
|
|
|
|
|
|
return commits, fixes
|
|
|
|
|
|
|
|
def apply_overrides(self, overrides):
|
|
|
|
for override in overrides:
|
|
|
|
when = override.get('when')
|
|
|
|
if when and when not in self and when != self._start:
|
2023-09-17 13:22:04 +02:00
|
|
|
logger.debug(f'Ignored {when!r} override')
|
2023-03-03 18:01:41 +01:00
|
|
|
continue
|
|
|
|
|
2023-06-21 05:51:20 +02:00
|
|
|
override_hash = override.get('hash') or when
|
2023-03-03 18:01:41 +01:00
|
|
|
if override['action'] == 'add':
|
|
|
|
commit = Commit(override.get('hash'), override['short'], override.get('authors') or [])
|
|
|
|
logger.info(f'ADD {commit}')
|
|
|
|
self._commits_added.append(commit)
|
|
|
|
|
|
|
|
elif override['action'] == 'remove':
|
|
|
|
if override_hash in self._commits:
|
|
|
|
logger.info(f'REMOVE {self._commits[override_hash]}')
|
|
|
|
del self._commits[override_hash]
|
|
|
|
|
|
|
|
elif override['action'] == 'change':
|
|
|
|
if override_hash not in self._commits:
|
|
|
|
continue
|
2023-06-21 05:51:20 +02:00
|
|
|
commit = Commit(override_hash, override['short'], override.get('authors') or [])
|
2023-03-03 18:01:41 +01:00
|
|
|
logger.info(f'CHANGE {self._commits[commit.hash]} -> {commit}')
|
|
|
|
self._commits[commit.hash] = commit
|
|
|
|
|
|
|
|
self._commits = {key: value for key, value in reversed(self._commits.items())}
|
|
|
|
|
|
|
|
def groups(self):
|
2023-04-03 07:22:11 +02:00
|
|
|
group_dict = defaultdict(list)
|
2023-03-03 18:01:41 +01:00
|
|
|
for commit in self:
|
2023-04-03 07:22:11 +02:00
|
|
|
upstream_re = self.UPSTREAM_MERGE_RE.search(commit.short)
|
2023-03-03 18:01:41 +01:00
|
|
|
if upstream_re:
|
2023-09-17 13:22:04 +02:00
|
|
|
commit.short = f'[upstream] Merged with youtube-dl {upstream_re.group(1)}'
|
2023-03-03 18:01:41 +01:00
|
|
|
|
|
|
|
match = self.MESSAGE_RE.fullmatch(commit.short)
|
|
|
|
if not match:
|
|
|
|
logger.error(f'Error parsing short commit message: {commit.short!r}')
|
|
|
|
continue
|
|
|
|
|
2023-04-03 07:22:11 +02:00
|
|
|
prefix, sub_details_alt, message, issues = match.groups()
|
|
|
|
issues = [issue.strip()[1:] for issue in issues.split(',')] if issues else []
|
2023-03-03 18:01:41 +01:00
|
|
|
|
2023-04-03 07:22:11 +02:00
|
|
|
if prefix:
|
|
|
|
groups, details, sub_details = zip(*map(self.details_from_prefix, prefix.split(',')))
|
|
|
|
group = next(iter(filter(None, groups)), None)
|
|
|
|
details = ', '.join(unique(details))
|
|
|
|
sub_details = list(itertools.chain.from_iterable(sub_details))
|
2023-03-03 18:01:41 +01:00
|
|
|
else:
|
|
|
|
group = CommitGroup.CORE
|
2023-04-03 07:22:11 +02:00
|
|
|
details = None
|
|
|
|
sub_details = []
|
2023-03-03 18:01:41 +01:00
|
|
|
|
2023-04-03 07:22:11 +02:00
|
|
|
if sub_details_alt:
|
|
|
|
sub_details.append(sub_details_alt)
|
|
|
|
sub_details = tuple(unique(sub_details))
|
2023-03-03 18:01:41 +01:00
|
|
|
|
|
|
|
if not group:
|
2023-04-03 07:22:11 +02:00
|
|
|
if self.EXTRACTOR_INDICATOR_RE.search(commit.short):
|
|
|
|
group = CommitGroup.EXTRACTOR
|
2023-12-30 22:27:36 +01:00
|
|
|
logger.error(f'Assuming [ie] group for {commit.short!r}')
|
2023-04-03 07:22:11 +02:00
|
|
|
else:
|
2023-12-30 22:27:36 +01:00
|
|
|
group = CommitGroup.CORE
|
2023-03-03 18:01:41 +01:00
|
|
|
|
|
|
|
commit_info = CommitInfo(
|
|
|
|
details, sub_details, message.strip(),
|
|
|
|
issues, commit, self._fixes[commit.hash])
|
2023-04-03 07:22:11 +02:00
|
|
|
|
2023-03-03 18:01:41 +01:00
|
|
|
logger.debug(f'Resolved {commit.short!r} to {commit_info!r}')
|
2023-04-03 07:22:11 +02:00
|
|
|
group_dict[group].append(commit_info)
|
|
|
|
|
|
|
|
return group_dict
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def details_from_prefix(prefix):
|
|
|
|
if not prefix:
|
|
|
|
return CommitGroup.CORE, None, ()
|
2023-03-03 18:01:41 +01:00
|
|
|
|
2023-09-17 13:22:04 +02:00
|
|
|
prefix, *sub_details = prefix.split(':')
|
2023-04-03 07:22:11 +02:00
|
|
|
|
2023-09-17 13:22:04 +02:00
|
|
|
group, details = CommitGroup.get(prefix)
|
|
|
|
if group is CommitGroup.PRIORITY and details:
|
|
|
|
details = details.partition('/')[2].strip()
|
2023-04-03 07:22:11 +02:00
|
|
|
|
2023-09-17 13:22:04 +02:00
|
|
|
if details and '/' in details:
|
|
|
|
logger.error(f'Prefix is overnested, using first part: {prefix}')
|
|
|
|
details = details.partition('/')[0].strip()
|
2023-04-03 07:22:11 +02:00
|
|
|
|
|
|
|
if details == 'common':
|
|
|
|
details = None
|
2023-09-17 13:22:04 +02:00
|
|
|
elif group is CommitGroup.NETWORKING and details == 'rh':
|
|
|
|
details = 'Request Handler'
|
2023-04-03 07:22:11 +02:00
|
|
|
|
|
|
|
return group, details, sub_details
|
2023-03-03 18:01:41 +01:00
|
|
|
|
|
|
|
|
|
|
|
def get_new_contributors(contributors_path, commits):
|
|
|
|
contributors = set()
|
|
|
|
if contributors_path.exists():
|
2023-03-04 18:10:08 +01:00
|
|
|
for line in read_file(contributors_path).splitlines():
|
|
|
|
author, _, _ = line.strip().partition(' (')
|
|
|
|
authors = author.split('/')
|
|
|
|
contributors.update(map(str.casefold, authors))
|
2023-03-03 18:01:41 +01:00
|
|
|
|
|
|
|
new_contributors = set()
|
|
|
|
for commit in commits:
|
|
|
|
for author in commit.authors:
|
|
|
|
author_folded = author.casefold()
|
|
|
|
if author_folded not in contributors:
|
|
|
|
contributors.add(author_folded)
|
|
|
|
new_contributors.add(author)
|
|
|
|
|
|
|
|
return sorted(new_contributors, key=str.casefold)
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
import argparse
|
|
|
|
|
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
description='Create a changelog markdown from a git commit range')
|
|
|
|
parser.add_argument(
|
|
|
|
'commitish', default='HEAD', nargs='?',
|
|
|
|
help='The commitish to create the range from (default: %(default)s)')
|
|
|
|
parser.add_argument(
|
|
|
|
'-v', '--verbosity', action='count', default=0,
|
|
|
|
help='increase verbosity (can be used twice)')
|
|
|
|
parser.add_argument(
|
|
|
|
'-c', '--contributors', action='store_true',
|
|
|
|
help='update CONTRIBUTORS file (default: %(default)s)')
|
|
|
|
parser.add_argument(
|
|
|
|
'--contributors-path', type=Path, default=LOCATION_PATH.parent / 'CONTRIBUTORS',
|
|
|
|
help='path to the CONTRIBUTORS file')
|
|
|
|
parser.add_argument(
|
|
|
|
'--no-override', action='store_true',
|
|
|
|
help='skip override json in commit generation (default: %(default)s)')
|
|
|
|
parser.add_argument(
|
|
|
|
'--override-path', type=Path, default=LOCATION_PATH / 'changelog_override.json',
|
|
|
|
help='path to the changelog_override.json file')
|
|
|
|
parser.add_argument(
|
|
|
|
'--default-author', default='pukkandan',
|
|
|
|
help='the author to use without a author indicator (default: %(default)s)')
|
|
|
|
parser.add_argument(
|
|
|
|
'--repo', default='yt-dlp/yt-dlp',
|
|
|
|
help='the github repository to use for the operations (default: %(default)s)')
|
2023-04-03 07:22:11 +02:00
|
|
|
parser.add_argument(
|
|
|
|
'--collapsible', action='store_true',
|
|
|
|
help='make changelog collapsible (default: %(default)s)')
|
2023-03-03 18:01:41 +01:00
|
|
|
args = parser.parse_args()
|
|
|
|
|
|
|
|
logging.basicConfig(
|
|
|
|
datefmt='%Y-%m-%d %H-%M-%S', format='{asctime} | {levelname:<8} | {message}',
|
|
|
|
level=logging.WARNING - 10 * args.verbosity, style='{', stream=sys.stderr)
|
|
|
|
|
2023-03-04 12:09:23 +01:00
|
|
|
commits = CommitRange(None, args.commitish, args.default_author)
|
2023-03-03 18:01:41 +01:00
|
|
|
|
|
|
|
if not args.no_override:
|
|
|
|
if args.override_path.exists():
|
2023-03-04 18:10:08 +01:00
|
|
|
overrides = json.loads(read_file(args.override_path))
|
2023-03-03 18:01:41 +01:00
|
|
|
commits.apply_overrides(overrides)
|
|
|
|
else:
|
|
|
|
logger.warning(f'File {args.override_path.as_posix()} does not exist')
|
|
|
|
|
|
|
|
logger.info(f'Loaded {len(commits)} commits')
|
|
|
|
|
|
|
|
new_contributors = get_new_contributors(args.contributors_path, commits)
|
|
|
|
if new_contributors:
|
|
|
|
if args.contributors:
|
2023-03-04 18:10:08 +01:00
|
|
|
write_file(args.contributors_path, '\n'.join(new_contributors) + '\n', mode='a')
|
2023-03-03 18:01:41 +01:00
|
|
|
logger.info(f'New contributors: {", ".join(new_contributors)}')
|
|
|
|
|
2023-04-03 07:22:11 +02:00
|
|
|
print(Changelog(commits.groups(), args.repo, args.collapsible))
|