diff --git a/.github/workflows/report_404_packages.yml b/.github/workflows/report_404_packages.yml new file mode 100644 index 00000000000..84cec8e693b --- /dev/null +++ b/.github/workflows/report_404_packages.yml @@ -0,0 +1,147 @@ +name: Report unreachable packages + +on: + workflow_dispatch: + schedule: + - cron: "31 7 * * *" + +permissions: + contents: write + pull-requests: write + +concurrency: + group: report-404-packages + cancel-in-progress: false + +jobs: + report_404_packages: + runs-on: ubuntu-latest + env: + GH_TOKEN: ${{ github.token }} + steps: + - uses: actions/checkout@v5 + with: + fetch-depth: 0 + + # Always roll the cache, GitHub will evict it after 7 days of inactivity. + - name: Restore reported URLs cache + id: reported_urls_cache + uses: actions/cache@v5 + with: + path: ./reported_urls.txt + key: reported-urls-cache-${{ github.run_id }} + restore-keys: | + reported-urls-cache- + + - name: Require cache for scheduled runs + run: | + # cache-hit semantics: + # true => exact key match + # false => restore-key match + # "" => true miss (nothing restored) + if [ "${{ github.event_name }}" != "workflow_dispatch" ] && [ "${{ steps.reported_urls_cache.outputs.cache-hit }}" = "" ]; then + echo "::error::No reported_urls cache found. Run workflow_dispatch once to bootstrap." + exit 1 + fi + + - name: Ensure reported_urls.txt exists + run: touch ./reported_urls.txt + + - name: Decide run cadence + id: cadence + run: | + if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then + echo "run_report=true" >> "$GITHUB_OUTPUT" + exit 0 + fi + + # Daily schedule, but only report on first Saturday of the month. + if [ "$(date -u +%u)" -eq 6 ] && [ "$(date -u +%d)" -le 7 ]; then + echo "run_report=true" >> "$GITHUB_OUTPUT" + else + echo "run_report=false" >> "$GITHUB_OUTPUT" + echo "::notice::Skipping report run: not the first Saturday of the month." + fi + + - name: Set up Python + if: steps.cadence.outputs.run_report == 'true' + uses: actions/setup-python@v5 + with: + python-version: "3.13" + + - name: Set up uv + if: steps.cadence.outputs.run_report == 'true' + uses: astral-sh/setup-uv@v5 + + - name: Configure git + if: steps.cadence.outputs.run_report == 'true' + run: | + git config user.name "thecrawl bot" + git config user.email "noreply@packagecontrol.io" + + - name: Run 404 package report + id: report + if: steps.cadence.outputs.run_report == 'true' + run: | + uv run -m tools.report_404_packages \ + --commit \ + --build-pr-message \ + -z \ + --ignore-file ./reported_urls.txt > ./reported_records.txt + + if [ -s ./reported_records.txt ]; then + echo "has_results=true" >> "$GITHUB_OUTPUT" + else + echo "has_results=false" >> "$GITHUB_OUTPUT" + fi + + - name: No packages to report + if: steps.cadence.outputs.run_report == 'true' && steps.report.outputs.has_results != 'true' + run: echo "No unreachable packages to report." + + - name: Prepare branch + id: branch + if: steps.cadence.outputs.run_report == 'true' && steps.report.outputs.has_results == 'true' + run: | + report_hash="$(sha256sum ./reported_records.txt | awk '{print substr($1,1,12)}')" + branch_name="bot/report-404-${report_hash}-${GITHUB_RUN_ID}-${GITHUB_RUN_ATTEMPT:-1}" + + git switch -c "$branch_name" + git push --set-upstream origin "$branch_name" + echo "name=$branch_name" >> "$GITHUB_OUTPUT" + + - name: Open pull request + if: steps.cadence.outputs.run_report == 'true' && steps.report.outputs.has_results == 'true' + run: | + gh pr create \ + --base "${{ github.ref_name }}" \ + --head "${{ steps.branch.outputs.name }}" \ + --title "$(cat ./pr_title.txt)" \ + --body-file ./pr_body.md + + - name: Update reported URL list for cache + if: steps.cadence.outputs.run_report == 'true' + run: | + # Append URLs from this run (name\0details\0timestamp records). + awk -v RS='\n' -v FS='\0' 'NF >= 2 && $2 != "" { print $2 }' \ + ./reported_records.txt >> ./reported_urls.txt + + # Keep only URLs still present in workspace.json. + if [ ! -f ./workspace.json ]; then + echo "::error::workspace.json missing; cannot prune reported URLs." + exit 1 + fi + + tmp_file="$(mktemp)" + while IFS= read -r url; do + [ -z "$url" ] && continue + if grep -Fq "\"$url\"" ./workspace.json; then + echo "$url" >> "$tmp_file" + fi + done < ./reported_urls.txt + + sort -u "$tmp_file" > ./reported_urls.txt + rm -f "$tmp_file" + + echo "Reported URLs:" + cat ./reported_urls.txt diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000000..7b08236cd2f --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,17 @@ +[project] +name = "package-control-channel" +version = "0.1.0" +description = "Utilities for maintaining package_control_channel" +readme = "readme.md" +requires-python = ">=3.13" +dependencies = [] + +[dependency-groups] +dev = [ + "pytest>=9.0.0", + "mockito>=2.0.4", + "pytest-mockito>=0.0.6.post1", +] + +[tool.pytest.ini_options] +testpaths = ["tools"] diff --git a/tools/README.md b/tools/README.md index b20cbf80d79..c30456273ac 100644 --- a/tools/README.md +++ b/tools/README.md @@ -21,8 +21,67 @@ Check-only mode (non-zero exit code if any file would be changed): uv run -m tools.format_package_control_channel ./repository --check ``` +## `report_404_packages.py` + +Finds packages in a crawler `workspace.json` that fail with `fatal: 404` +for at least --min-age days (default: 21) and report them. + +If --commit is set, it actually removes the packages from the repository and +creates a commit. + +If you don't specify a --workspace, it will download one for you from +`packagecontrol/thecrawl`. (Requires `gh`.) + +The default sources are derived from `git origin`; use `--allowed-source` +to override. + +Use `--ignore` (name or details URL) and/or `--ignore-file` to skip already +known packages so recurring scheduled runs don't re-report them. + +### Usage + +Report only (`-z` for machine friendly output): + +```bash +uv run -m tools.report_404_packages +uv run -m tools.report_404_packages -z +``` + +Use a specific workspace file: + +```bash +uv run -m tools.report_404_packages --workspace ./workspace.json +``` + +Ignore specific package details URLs (or names): + +```bash +uv run -m tools.report_404_packages \ + --ignore "https://github.com/axsuul/sublime-0x0" \ + --ignore "SublimeLinter,AnotherPackage" +``` + +Ignore via file: + +```bash +uv run -m tools.report_404_packages --ignore-file ./tools/known-404s.txt +``` + +Apply removals and commit: + +```bash +uv run -m tools.report_404_packages --commit +``` + +Build PR message files (`pr_title.txt`, `pr_body.md`) from the report. This +is for the CI. + +```bash +uv run -m tools.report_404_packages --build-pr-message +``` + ### Tests ```bash -uvx pytest tools/test_channel_json_format.py +uvx pytest ``` diff --git a/tools/report_404_packages.py b/tools/report_404_packages.py new file mode 100644 index 00000000000..cbc183f2e68 --- /dev/null +++ b/tools/report_404_packages.py @@ -0,0 +1,748 @@ +from __future__ import annotations + +import argparse +import json +import subprocess +import sys +from collections.abc import Container +from dataclasses import dataclass +from datetime import UTC, datetime, timedelta +from pathlib import Path +from typing import Any, Iterable, Iterator, Mapping +from urllib.parse import urlparse + +from ._channel_json_format import format_channel_json + + +@dataclass(frozen=True) +class UnreachablePackage: + name: str + details: str | None + failing_since: datetime + age_days: int + source: str + + +def main(argv: list[str] | None = None) -> int: + args = parse_args(argv) + now = datetime.now(UTC) + + workspace_path = resolve_workspace_path(args.workspace) + if args.workspace is None: + refresh_workspace_if_stale(workspace_path, now=now) + + workspace = load_workspace(workspace_path) + print_workspace_age_note_if_needed(workspace_path, workspace=workspace, now=now) + + allowed_sources = resolve_allowed_sources(args.allowed_source) + ignored_identifiers = resolve_ignored_identifiers( + ignore_values=args.ignore, + ignore_files=args.ignore_file, + ) + unreachable_packages = collect_unreachable_packages( + workspace, + allowed_sources=allowed_sources, + min_age_days=args.min_age, + ignored_identifiers=ignored_identifiers, + now=now, + ) + + planned_files, removed_names = remove_packages_from_repository( + repository_root=Path("."), + unreachable_packages=unreachable_packages, + apply_changes=False, + ) + packages_to_report = [ + package + for package in unreachable_packages + if package.name in removed_names + ] + + if args.build_pr_message and packages_to_report: + write_pr_message_files(packages_to_report, root=Path(".")) + + if args.commit and packages_to_report: + ensure_paths_are_clean(planned_files) + + changed_files, _ = remove_packages_from_repository( + repository_root=Path("."), + unreachable_packages=unreachable_packages, + apply_changes=True, + ) + + commit_message = render_commit_message(packages_to_report) + create_git_commit(changed_files=changed_files, commit_message=commit_message) + + if args.z: + sys.stdout.write(render_machine_report(packages_to_report)) + else: + print_last_commit_patch() + return 0 + + if args.z: + sys.stdout.write(render_machine_report(packages_to_report)) + else: + sys.stdout.write(render_human_report(packages_to_report)) + return 0 + + +def parse_args(argv: list[str] | None = None) -> argparse.Namespace: + parser = argparse.ArgumentParser( + description=( + "Report packages that fail with fatal 404 errors for at least " + "a configurable number of days." + ) + ) + parser.add_argument( + "--commit", + action="store_true", + help="Apply the removals to the repository and create a commit.", + ) + parser.add_argument( + "-z", + action="store_true", + help=( + "Machine-readable output as newline-delimited records with " + "NUL-separated fields: \\0\\0" + ), + ) + parser.add_argument( + "--min-age", + type=int, + default=21, + metavar="DAYS", + help="Minimum failing age in full days (default: 21).", + ) + parser.add_argument( + "--allowed-source", + action="append", + default=None, + help=( + "Allowed source URL prefix. Can be passed multiple times. " + "By default this is computed from git origin." + ), + ) + parser.add_argument( + "--workspace", + nargs="?", + const="workspace.json", + default=None, + help=( + "Use a workspace file. Without a value, defaults to workspace.json. " + "If omitted, workspace.json is auto-refreshed via gh when older " + "than 1 hour." + ), + ) + parser.add_argument( + "--ignore", + action="append", + default=None, + help=( + "Ignore package identifiers (name or details URL). Can be passed " + "multiple times and supports comma-separated values." + ), + ) + parser.add_argument( + "--ignore-file", + action="append", + default=None, + metavar="PATH", + help=( + "Read ignored package identifiers (name or details URL) from file. " + "One value per line, with optional comma-separated values. " + "Blank lines and lines starting with # are ignored." + ), + ) + parser.add_argument( + "--build-pr-message", + action="store_true", + help="Write pr_title.txt and pr_body.md for the current report.", + ) + return parser.parse_args(argv) + + +def resolve_workspace_path(workspace_arg: str | None) -> Path: + if workspace_arg is None: + return Path("workspace.json") + return Path(workspace_arg) + + +def refresh_workspace_if_stale(workspace_path: Path, *, now: datetime) -> None: + if workspace_path.exists() and workspace_is_fresh(workspace_path, now=now): + return + + workspace_path.parent.mkdir(parents=True, exist_ok=True) + run([ + "gh", + "-R", + "packagecontrol/thecrawl", + "release", + "download", + "crawler-status", + "--pattern", + "workspace.json", + "--output", + str(workspace_path), + "--clobber", + ]) + + +def workspace_is_fresh(workspace_path: Path, *, now: datetime) -> bool: + modified_at = datetime.fromtimestamp(workspace_path.stat().st_mtime, tz=UTC) + return now - modified_at < timedelta(hours=1) + + +def load_workspace(workspace_path: Path) -> dict[str, Any]: + if not workspace_path.exists(): + raise SystemExit(f"Workspace file not found: {workspace_path}") + + try: + return json.loads(workspace_path.read_text(encoding="utf-8")) + except json.JSONDecodeError as error: + raise SystemExit(f"Failed to parse workspace JSON: {workspace_path}: {error}") from error + + +def print_workspace_age_note_if_needed( + workspace_path: Path, + *, + workspace: dict[str, Any], + now: datetime, +) -> None: + newest_last_seen = newest_last_seen_timestamp(workspace) + if newest_last_seen is None: + return + + parsed_newest_last_seen = parse_timestamp(newest_last_seen) + if parsed_newest_last_seen is None: + return + + if now - parsed_newest_last_seen < timedelta(days=1): + return + + print( + ( + f"Note: your {workspace_path} file is rather old, consider downloading " + "a fresh one using\n" + "gh -R packagecontrol/thecrawl release download crawler-status " + "--pattern workspace.json --output workspace.json --clobber" + ), + file=sys.stderr, + ) + + +def resolve_allowed_sources(override_sources: list[str] | None) -> list[str]: + if override_sources is not None: + return override_sources + return compute_allowed_sources_from_origin() + + +def resolve_ignored_identifiers( + *, + ignore_values: list[str] | None, + ignore_files: list[str] | None, +) -> set[str]: + identifiers: set[str] = set() + + for ignore_value in ignore_values or []: + identifiers.update(split_ignored_values(ignore_value)) + + for ignore_file in ignore_files or []: + identifiers.update(load_ignored_values_file(Path(ignore_file))) + + return identifiers + + +def split_ignored_values(raw_value: str) -> set[str]: + return {item.strip() for item in raw_value.split(",") if item.strip()} + + +def load_ignored_values_file(path: Path) -> set[str]: + if not path.exists(): + raise SystemExit(f"Ignore file not found: {path}") + + return { + value + for raw_line in path.read_text(encoding="utf-8").splitlines() + if (line := raw_line.strip()) + if not line.startswith("#") + for value in split_ignored_values(line) + } + + +def compute_allowed_sources_from_origin() -> list[str]: + origin_url = run_output(["git", "config", "--get", "remote.origin.url"]).strip() + if not origin_url: + raise SystemExit( + "Failed to determine git origin. Provide --allowed-source explicitly." + ) + + try: + owner, repo = parse_github_origin_owner_repo(origin_url) + except ValueError as error: + raise SystemExit( + f"Unsupported git origin URL: {origin_url}. " + "Provide --allowed-source explicitly." + ) from error + + owner_repo_pairs = equivalent_origin_repositories(owner, repo) + return [ + f"https://raw.githubusercontent.com/{source_owner}/{source_repo}/" + for source_owner, source_repo in owner_repo_pairs + ] + + +def parse_github_origin_owner_repo(origin_url: str) -> tuple[str, str]: + http_url = remote_to_url(origin_url) + if "://github.com/" not in http_url: + raise ValueError("Origin is not github.com") + return parse_owner_repo(http_url) + + +def equivalent_origin_repositories(owner: str, repo: str) -> list[tuple[str, str]]: + if ( + repo == "package_control_channel" + and owner in ("wbond", "sublimehq") + ): + return [("sublimehq", repo), ("wbond", repo)] + + return [(owner, repo)] + + +def newest_last_seen_timestamp(workspace: dict[str, Any]) -> str | None: + last_seens = ( + last_seen + for package in workspace.get("packages").values() + if (last_seen := package.get("last_seen")) + ) + return max(last_seens, default=None) + + +def collect_unreachable_packages( + workspace: dict[str, Any], + *, + allowed_sources: list[str], + min_age_days: int, + ignored_identifiers: Container[str] = (), + now: datetime, +) -> list[UnreachablePackage]: + unreachable: list[UnreachablePackage] = [] + for package in workspace.get("packages").values(): + source = package.get("source") + if not isinstance(source, str): + continue + if not any(source.startswith(allowed_source) for allowed_source in allowed_sources): + continue + + name = package["name"] + if name in ignored_identifiers: + continue + + details = package.get("details") + if details and details in ignored_identifiers: + continue + + fail_reason = package.get("fail_reason", "") + if "fatal: 404" not in fail_reason.lower(): + continue + + raw_failing_since = package.get("failing_since") + if not raw_failing_since: + continue + + failing_since = parse_timestamp(raw_failing_since) + if failing_since is None: + continue + + age_days = (now - failing_since).days + if age_days < min_age_days: + continue + + unreachable.append( + UnreachablePackage( + name=name, + details=details, + failing_since=failing_since, + age_days=age_days, + source=source, + ) + ) + + return sorted( + unreachable, + key=lambda package: (package.failing_since, package.name.casefold()), + ) + + +def remove_packages_from_repository( + *, + repository_root: Path, + unreachable_packages: list[UnreachablePackage], + apply_changes: bool = True, +) -> tuple[list[Path], set[str]]: + changed_files: list[Path] = [] + removed_names: set[str] = set() + unreachable_names = {package.name for package in unreachable_packages} + root = repository_root.resolve() + + source_urls = unique(package.source for package in unreachable_packages) + package_files = ( + package_file + for source_url in source_urls + for package_file in iter_channel_package_files(source_url, root=root) + ) + for json_file in package_files: + payload = json.loads(json_file.read_text(encoding="utf-8")) + packages = payload.get("packages", []) + + kept_packages: list[Any] = [] + file_changed = False + for package in packages: + package_name = extract_package_name(package) + if package_name is None or package_name not in unreachable_names: + kept_packages.append(package) + continue + + removed_names.add(package_name) + file_changed = True + + if not file_changed: + continue + + changed_files.append(json_file) + if apply_changes: + payload["packages"] = kept_packages + json_file.write_text( + format_channel_json(payload), + encoding="utf-8", + ) + + return changed_files, removed_names + + +def iter_channel_package_files( + source_url: str, *, root: Path +) -> Iterator[Path]: + source_file = resolve_source_file_path(source_url, root=root) + main_file = (root / source_file).resolve() + yield main_file + + payload = json.loads(main_file.read_text(encoding="utf-8")) + for include_entry in payload.get("includes", []): + channel_file = (main_file.parent / include_entry).resolve() + + try: + channel_file.relative_to(root) + except ValueError as error: + raise SystemExit( + "Channel file escapes repository root: " + f"{channel_file} (source: {source_url})" + ) from error + + if not channel_file.is_file(): + raise SystemExit( + "Channel file listed by source/includes is missing: " + f"{channel_file} (source: {source_url})" + ) + + yield channel_file + + +def resolve_source_file_path(source_url: str, *, root: Path) -> Path: + """Map a crawler source URL to a channel file path in this checkout.""" + + for candidate in unique(candidate_source_file_paths(source_url)): + if (root / candidate).is_file(): + return candidate + + raise SystemExit( + f"Failed to map source URL to local file: {source_url}" + ) + + +def candidate_source_file_paths(source_url: str) -> Iterator[Path]: + parsed = urlparse(source_url) + path_parts = [part for part in parsed.path.strip("/").split("/") if part] + if not path_parts: + return [] + + # Known GitHub URL layout is translated directly first. If that does not + # resolve to an existing file, fall back to trying progressively shorter URL + # path suffixes. That fallback keeps custom/self-hosted source URLs working + # without having to model every possible URL layout. + + if github_raw_path := resolve_github_source_url(parsed.netloc, path_parts): + yield github_raw_path + + yield from ( + Path(*path_parts[split_index:]) + for split_index in range(len(path_parts)) + ) + + +def resolve_github_source_url( + netloc: str, + path_parts: list[str], +) -> Path | None: + if netloc != "raw.githubusercontent.com" or len(path_parts) < 4: + return None + + if ( + len(path_parts) >= 6 + and path_parts[2] == "refs" + and path_parts[3] in {"heads", "tags"} + ): + return Path(*path_parts[5:]) + + return Path(*path_parts[3:]) + + +def unique(paths: Iterable[Path]) -> Iterator[Path]: + seen: set[Path] = set() + for path in paths: + if path in seen: + continue + seen.add(path) + yield path + + +def ensure_paths_are_clean(paths: list[Path]) -> None: + if not paths: + return + + status = run_output(["git", "diff", "--name-only", "--", *[str(path) for path in paths]]) + if not status.strip(): + return + + raise SystemExit( + "Refusing to commit because target files are dirty:\n" + f"{status.rstrip()}" + ) + + +def render_commit_message(packages: list[UnreachablePackage]) -> str: + singular = len(packages) == 1 + if singular: + package = packages[0] + subject = f"Remove unreachable package {package.name}" + intro = ( + f"Remove {package.name} which responds with a 404 since " + f"{format_date(package.failing_since)}." + ) + body_lines = [intro] + else: + subject = "Remove unreachable packages" + intro = "Remove the following packages which respond with 404s." + bullets = [ + f"- {package.name} [since {format_date(package.failing_since)}]" + for package in packages + ] + body_lines = [ + intro, + "", + *bullets, + ] + + return f"{subject}\n\n{'\n'.join(body_lines)}\n" + + +def render_human_report(packages: list[UnreachablePackage]) -> str: + if not packages: + return "\n" + + max_name_width = max(len(package.name) for package in packages) + lines = [ + f"{package.name.ljust(max_name_width)} " + f"[since {format_date(package.failing_since)}; {format_age(package.age_days)}]" + for package in packages + ] + return "\n".join(lines) + "\n" + + +def render_machine_report(packages: list[UnreachablePackage]) -> str: + if not packages: + return "" + + return "\n".join( + f"{package.name}\0{package.details or ''}\0{format_timestamp(package.failing_since)}" + for package in packages + ) + + +def write_pr_message_files( + packages: list[UnreachablePackage], + *, + root: Path, +) -> None: + (root / "pr_title.txt").write_text(render_pr_title(packages) + "\n", encoding="utf-8") + (root / "pr_body.md").write_text(render_pr_body(packages), encoding="utf-8") + + +def render_pr_title(packages: list[UnreachablePackage]) -> str: + if len(packages) == 1: + return f"Remove unreachable {packages[0].name}" + return "Remove unreachable packages" + + +def render_pr_body(packages: list[UnreachablePackage]) -> str: + if len(packages) == 1: + subject = "The following package responds with a 404:" + status_line = "You can check the current [status](https://packages.sublimetext.io/status)." + outro = "This PR removes the package from the registry." + else: + subject = "The following packages respond with 404s:" + status_line = "You can check their current [status](https://packages.sublimetext.io/status)." + outro = "This PR removes the packages from the registry." + + bullets = [ + f"- **{package.name}** [since {format_date(package.failing_since)}; {format_age(package.age_days)}]" + for package in packages + ] + + return "\n".join([ + "Hi, thecrawl bot here! 👋", + "", + subject, + "", + *bullets, + "", + status_line, + "", + outro, + "", + ]) + + +def format_date(value: datetime) -> str: + return value.astimezone(UTC).date().isoformat() + + +def format_timestamp(value: datetime) -> str: + return value.astimezone(UTC).strftime("%Y-%m-%dT%H:%M:%SZ") + + +def format_age(days: int) -> str: + if days > 7: + weeks = days // 7 + return f"{weeks} {pluralize(weeks, 'week')}" + return f"{days} {pluralize(days, 'day')}" + + +def pluralize(count: int, singular: str) -> str: + if count == 1: + return singular + return f"{singular}s" + + +def parse_timestamp(value: str) -> datetime | None: + try: + parsed = datetime.fromisoformat(value) + except ValueError: + return None + + if parsed.tzinfo is None: + return parsed.replace(tzinfo=UTC) + return parsed.astimezone(UTC) + + +def extract_package_name(package: Mapping[str, Any]) -> str | None: + """ + Extract the package name from a package entry. + Tries 'name' key first, then parses the repo name from 'details' if it's a *Hub URL. + """ + if name := package.get("name"): + return name + + if details := package.get("details"): + try: + _, repo = parse_owner_repo(details) + except ValueError: + return None + else: + return repo + return None + + +def parse_owner_repo(url: str) -> tuple[str, str]: + """ + Extract owner and repo name from a *Hub URL. + Example: https://github.com/timbrel/GitSavvy -> ("timbrel", "GitSavvy") + https://github.com/timbrel/GitSavvy/tree/dev -> ("timbrel", "GitSavvy") + https://github.com/timbrel/GitSavvy/releases/tag/2.50.0 -> ("timbrel", "GitSavvy") + https://gitlab.com/jiehong/sublime_jq -> ("jiehong", "sublime_jq") + https://bitbucket.org/hmml/jsonlint -> ("hmml", "jsonlint") + https://codeberg.org/TobyGiacometti/SublimeDirectorySettings + -> ("TobyGiacometti", "SublimeDirectorySettings") + """ + parts = urlparse(url) + path_parts = parts.path.strip("/").split("/") + if len(path_parts) < 2: + raise ValueError("Invalid *Hub repo URL") + return path_parts[0], path_parts[1] + + +def remote_to_url(remote_url: str) -> str: + """ + Parse out a Github HTTP URL from a remote URI: + + r1 = remote_to_url("git://github.com/timbrel/GitSavvy.git") + assert r1 == "https://github.com/timbrel/GitSavvy" + + r2 = remote_to_url("git@github.com:divmain/GitSavvy.git") + assert r2 == "https://github.com/timbrel/GitSavvy" + + r3 = remote_to_url("https://github.com/timbrel/GitSavvy.git") + assert r3 == "https://github.com/timbrel/GitSavvy" + """ + + if remote_url.endswith(".git"): + remote_url = remote_url[:-4] + + if remote_url.startswith("git@"): + return remote_url.replace(":", "/").replace("git@", "https://") + elif remote_url.startswith("git://"): + return remote_url.replace("git://", "https://") + elif remote_url.startswith("http"): + return remote_url + else: + raise ValueError('Cannot parse remote "{}" and transform to url'.format(remote_url)) + + +def create_git_commit(*, changed_files: list[Path], commit_message: str) -> None: + if not changed_files: + return + + file_args = [str(file) for file in changed_files] + run(["git", "add", "--", *file_args]) + run( + ["git", "commit", "--quiet", "-F", "-", "--only", "--", *file_args], + input_text=commit_message + ) + + +def print_last_commit_patch() -> None: + patch = run_output(["git", "show", "--no-color", "--format=fuller", "--stat", "--patch"]) + sys.stdout.write(patch) + + +def run(command: list[str], *, input_text: str | None = None) -> None: + subprocess.run( + command, + check=True, + text=True, + input=input_text, + ) + + +def run_output(command: list[str], *, input_text: str | None = None) -> str: + completed = subprocess.run( + command, + check=True, + text=True, + input=input_text, + capture_output=True, + ) + return completed.stdout + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/tools/test_report_404_packages.py b/tools/test_report_404_packages.py new file mode 100644 index 00000000000..08d9969c069 --- /dev/null +++ b/tools/test_report_404_packages.py @@ -0,0 +1,632 @@ +from __future__ import annotations + +import json +from datetime import UTC, datetime +from pathlib import Path +from textwrap import dedent + +import pytest +from mockito import expect, mock, unstub, when + +from . import report_404_packages as script + + +@pytest.fixture +def lenient_unstub(): + """Same as the built in unstub fixture but doesn't check usage. + Typical to avoid. + """ + try: + yield + finally: + unstub() + + +def test_collect_unreachable_packages_filters_by_source_reason_and_age(): + now = datetime(2026, 4, 22, tzinfo=UTC) + workspace = { + "packages": { + "NoName": { + "name": "NoName", + "details": "https://github.com/someone/NoName", + "source": "https://raw.githubusercontent.com/wbond/package_control_channel/refs/heads/master/repository.json", + "fail_reason": "fatal: 404 not found", + "failing_since": "2026-03-10T00:00:00Z", + }, + "Alpha": { + "name": "Alpha", + "source": "https://raw.githubusercontent.com/wbond/package_control_channel/refs/heads/master/repository.json", + "fail_reason": "fatal: 404 Could not resolve repository", + "failing_since": "2026-03-20T00:00:00Z", + }, + "TooYoung": { + "name": "TooYoung", + "source": "https://raw.githubusercontent.com/wbond/package_control_channel/refs/heads/master/repository.json", + "fail_reason": "fatal: 404 not found", + "failing_since": "2026-04-20T00:00:00Z", + }, + "WrongSource": { + "name": "WrongSource", + "source": "https://raw.githubusercontent.com/other/channel/master/repository.json", + "fail_reason": "fatal: 404 not found", + "failing_since": "2026-03-01T00:00:00Z", + }, + "WrongReason": { + "name": "WrongReason", + "source": "https://raw.githubusercontent.com/wbond/package_control_channel/refs/heads/master/repository.json", + "fail_reason": "403 forbidden", + "failing_since": "2026-03-01T00:00:00Z", + }, + } + } + + result = script.collect_unreachable_packages( + workspace, + allowed_sources=[ + "https://raw.githubusercontent.com/wbond/package_control_channel/", + ], + min_age_days=21, + now=now, + ) + + assert [item.name for item in result] == ["NoName", "Alpha"] + assert result[1].age_days == 33 + + +def test_collect_unreachable_packages_ignores_by_name_or_details(): + now = datetime(2026, 4, 22, tzinfo=UTC) + workspace = { + "packages": { + "Alpha": { + "name": "Alpha", + "details": "https://github.com/example/Alpha", + "source": "https://raw.githubusercontent.com/wbond/package_control_channel/refs/heads/master/repository.json", + "fail_reason": "fatal: 404 not found", + "failing_since": "2026-03-10T00:00:00Z", + }, + "Beta": { + "name": "Beta", + "details": "https://github.com/example/Beta", + "source": "https://raw.githubusercontent.com/wbond/package_control_channel/refs/heads/master/repository.json", + "fail_reason": "fatal: 404 not found", + "failing_since": "2026-03-11T00:00:00Z", + }, + "Gamma": { + "name": "Gamma", + "details": "https://github.com/example/Gamma", + "source": "https://raw.githubusercontent.com/wbond/package_control_channel/refs/heads/master/repository.json", + "fail_reason": "fatal: 404 not found", + "failing_since": "2026-03-12T00:00:00Z", + }, + } + } + + result = script.collect_unreachable_packages( + workspace, + allowed_sources=[ + "https://raw.githubusercontent.com/wbond/package_control_channel/", + ], + min_age_days=21, + ignored_identifiers={ + "Alpha", + "https://github.com/example/Beta", + }, + now=now, + ) + + assert [item.name for item in result] == ["Gamma"] + + +def test_resolve_ignored_identifiers_merges_cli_and_file(tmp_path): + ignore_file = tmp_path / "ignore.txt" + ignore_file.write_text( + "\n".join( + [ + "# ignore known packages", + "https://github.com/example/Alpha", + "Gamma, Delta", + "", + ] + ), + encoding="utf-8", + ) + + result = script.resolve_ignored_identifiers( + ignore_values=["Alpha, Beta"], + ignore_files=[str(ignore_file)], + ) + + assert result == { + "Alpha", + "Beta", + "Gamma", + "Delta", + "https://github.com/example/Alpha", + } + + +def test_resolve_ignored_identifiers_fails_for_missing_file(tmp_path): + missing = tmp_path / "missing-ignore.txt" + + with pytest.raises(SystemExit, match="Ignore file not found"): + script.resolve_ignored_identifiers( + ignore_values=None, + ignore_files=[str(missing)], + ) + + +@pytest.mark.parametrize( + ("origin_url", "expected"), + [ + ( + "https://github.com/wbond/package_control_channel.git\n", + [ + "https://raw.githubusercontent.com/sublimehq/package_control_channel/", + "https://raw.githubusercontent.com/wbond/package_control_channel/", + ], + ), + ( + "https://github.com/sublimehq/package_control_channel.git\n", + [ + "https://raw.githubusercontent.com/sublimehq/package_control_channel/", + "https://raw.githubusercontent.com/wbond/package_control_channel/", + ], + ), + ( + "https://github.com/SublimeLinter/package_control_channel.git\n", + [ + "https://raw.githubusercontent.com/SublimeLinter/package_control_channel/", + ], + ), + ], +) +def test_computes_allowed_sources_from_origin( + unstub, + origin_url: str, + expected: list[str], +): + when(script).run_output( + ["git", "config", "--get", "remote.origin.url"] + ).thenReturn(origin_url) + + assert script.compute_allowed_sources_from_origin() == expected + + +@pytest.mark.parametrize( + ("origin_url", "expected"), + [ + ( + "https://github.com/wbond/package_control_channel.git", + ("wbond", "package_control_channel"), + ), + ( + "https://github.com/SublimeLinter/package_control_channel", + ("SublimeLinter", "package_control_channel"), + ), + ( + "git@github.com:wbond/package_control_channel.git", + ("wbond", "package_control_channel"), + ), + ], +) +def test_parse_github_origin_owner_repo_parses_supported_forms( + origin_url: str, + expected: tuple[str, str], +): + assert script.parse_github_origin_owner_repo(origin_url) == expected + + +def test_parse_github_origin_owner_repo_rejects_unsupported_host(): + with pytest.raises(ValueError, match="Origin is not github.com"): + script.parse_github_origin_owner_repo( + "https://gitlab.com/wbond/package_control_channel.git" + ) + + +def test_parse_github_origin_owner_repo_rejects_missing_repo_segment(): + with pytest.raises(ValueError): + script.parse_github_origin_owner_repo("https://github.com/wbond") + + +def test_extract_package_name_prefers_name_then_details_repo(): + assert script.extract_package_name({"name": "DirectName"}) == "DirectName" + assert script.extract_package_name({"details": "https://github.com/user/RepoName"}) == "RepoName" + assert script.extract_package_name({"details": "https://example.invalid/no-name"}) is None + + +@pytest.mark.parametrize( + ("source_url", "expected"), + [ + ( + "https://raw.githubusercontent.com/wbond/package_control_channel/refs/heads/master/repository.json", + Path("repository.json"), + ), + ( + "https://packages.monokai.pro/packages.json", + Path("packages.json"), + ), + ( + "https://example.com/sublime/channel/packages.json", + Path("sublime/channel/packages.json"), + ), + ], +) +def test_resolve_source_file_path_supports_github_raw_and_custom_hosts( + unstub, + source_url: str, + expected: Path, +): + root = mock() + expect(root, between=(0,)).__truediv__(...).is_file().thenReturn(False) + when(root).__truediv__(expected).is_file().thenReturn(True) + assert script.resolve_source_file_path(source_url, root=root) == expected + + +def test_resolve_source_file_path_prefers_github_raw_layout(tmp_path): + (tmp_path / "repository.json").write_text("{}", encoding="utf-8") + fallback_file = ( + tmp_path + / "wbond" + / "package_control_channel" + / "refs" + / "heads" + / "master" + / "repository.json" + ) + fallback_file.parent.mkdir(parents=True) + fallback_file.write_text("{}", encoding="utf-8") + + result = script.resolve_source_file_path( + "https://raw.githubusercontent.com/wbond/package_control_channel/" + "refs/heads/master/repository.json", + root=tmp_path, + ) + + assert result == Path("repository.json") + + +@pytest.mark.parametrize( + ("source_url", "expected"), + [ + ( + "https://raw.githubusercontent.com/user/repo/main/packages.json", + Path("packages.json"), + ), + ( + "https://raw.githubusercontent.com/user/repo/refs/tags/v1.0.0/packages.json", + Path("packages.json"), + ), + ], +) +def test_resolve_source_file_path_maps_github_raw_layouts( + tmp_path, + source_url: str, + expected: Path, +): + (tmp_path / expected).write_text("{}", encoding="utf-8") + + assert script.resolve_source_file_path(source_url, root=tmp_path) == expected + + +def test_collect_channel_package_files_fails_when_source_url_cannot_be_mapped(tmp_path): + with pytest.raises(SystemExit, match="Failed to map source URL"): + list(script.iter_channel_package_files( + "https://example.com/not/in/checkout/packages.json", + root=tmp_path, + )) + + +def test_collect_channel_package_files_fails_when_include_is_missing(tmp_path): + (tmp_path / "repository.json").write_text( + json.dumps( + { + "schema_version": "3.0.0", + "packages": [], + "includes": ["./repository/missing.json"], + } + ), + encoding="utf-8", + ) + + with pytest.raises(SystemExit, match="is missing"): + list(script.iter_channel_package_files( + "https://raw.githubusercontent.com/wbond/package_control_channel/" + "refs/heads/master/repository.json", + root=tmp_path, + )) + + +def test_collect_channel_package_files_fails_when_include_escapes_repo_root(tmp_path): + (tmp_path / "repository.json").write_text( + json.dumps( + { + "schema_version": "3.0.0", + "packages": [], + "includes": ["../evil.json"], + } + ), + encoding="utf-8", + ) + (tmp_path.parent / "evil.json").write_text("{}", encoding="utf-8") + + with pytest.raises(SystemExit, match="escapes repository root"): + list(script.iter_channel_package_files( + "https://raw.githubusercontent.com/wbond/package_control_channel/" + "refs/heads/master/repository.json", + root=tmp_path, + )) + + +def test_render_commit_message_supports_singular_and_plural(): + single = [ + script.UnreachablePackage( + name="KarmaRunner", + details="https://github.com/knee-cola/KarmaRunner", + failing_since=datetime(2026, 3, 21, tzinfo=UTC), + age_days=31, + source="https://raw.githubusercontent.com/wbond/package_control_channel/refs/heads/master/repository.json", + ) + ] + plural = [ + *single, + script.UnreachablePackage( + name="LazyTimeTracker", + details="https://github.com/Bwata/LazyTimeTracker", + failing_since=datetime(2026, 3, 28, tzinfo=UTC), + age_days=24, + source="https://raw.githubusercontent.com/wbond/package_control_channel/refs/heads/master/repository.json", + ), + ] + + single_commit = script.render_commit_message(single) + assert single_commit == dedent( + """\ + Remove unreachable package KarmaRunner + + Remove KarmaRunner which responds with a 404 since 2026-03-21. + """ + ) + + plural_commit = script.render_commit_message(plural) + assert plural_commit == dedent( + """\ + Remove unreachable packages + + Remove the following packages which respond with 404s. + + - KarmaRunner [since 2026-03-21] + - LazyTimeTracker [since 2026-03-28] + """ + ) + + +def test_render_human_report_and_machine_report(): + packages = [ + script.UnreachablePackage( + name="KarmaRunner", + details="https://github.com/knee-cola/KarmaRunner", + failing_since=datetime(2026, 3, 21, tzinfo=UTC), + age_days=31, + source="https://raw.githubusercontent.com/wbond/package_control_channel/", + ), + script.UnreachablePackage( + name="LazyTimeTracker", + details="https://github.com/Bwata/LazyTimeTracker", + failing_since=datetime(2026, 3, 28, tzinfo=UTC), + age_days=24, + source="https://raw.githubusercontent.com/wbond/package_control_channel/", + ), + ] + + assert script.render_human_report(packages) == ( + "KarmaRunner [since 2026-03-21; 4 weeks]\n" + "LazyTimeTracker [since 2026-03-28; 3 weeks]\n" + ) + assert script.render_human_report([]) == "\n" + + assert script.render_machine_report(packages) == ( + "KarmaRunner\x00https://github.com/knee-cola/KarmaRunner\x002026-03-21T00:00:00Z\n" + "LazyTimeTracker\x00https://github.com/Bwata/LazyTimeTracker\x002026-03-28T00:00:00Z" + ) + assert script.render_machine_report([]) == "" + + +def test_render_pr_title_and_body_support_singular_and_plural(): + single = [ + script.UnreachablePackage( + name="testify", + details="https://github.com/example/testify", + failing_since=datetime(2026, 2, 28, tzinfo=UTC), + age_days=49, + source="https://raw.githubusercontent.com/wbond/package_control_channel/", + ) + ] + plural = [ + *single, + script.UnreachablePackage( + name="KarmaRunner", + details="https://github.com/example/KarmaRunner", + failing_since=datetime(2026, 3, 21, tzinfo=UTC), + age_days=31, + source="https://raw.githubusercontent.com/wbond/package_control_channel/", + ), + ] + + assert script.render_pr_title(single) == "Remove unreachable testify" + assert script.render_pr_title(plural) == "Remove unreachable packages" + + assert script.render_pr_body(single) == dedent( + """\ + Hi, thecrawl bot here! 👋 + + The following package responds with a 404: + + - **testify** [since 2026-02-28; 7 weeks] + + You can check the current [status](https://packages.sublimetext.io/status). + + This PR removes the package from the registry. + """ + ) + + assert script.render_pr_body(plural) == dedent( + """\ + Hi, thecrawl bot here! 👋 + + The following packages respond with 404s: + + - **testify** [since 2026-02-28; 7 weeks] + - **KarmaRunner** [since 2026-03-21; 4 weeks] + + You can check their current [status](https://packages.sublimetext.io/status). + + This PR removes the packages from the registry. + """ + ) + + +def test_write_pr_message_files_writes_expected_files(tmp_path): + packages = [ + script.UnreachablePackage( + name="testify", + details="https://github.com/example/testify", + failing_since=datetime(2026, 2, 28, tzinfo=UTC), + age_days=49, + source="https://raw.githubusercontent.com/wbond/package_control_channel/", + ) + ] + + script.write_pr_message_files(packages, root=tmp_path) + + assert (tmp_path / "pr_title.txt").read_text(encoding="utf-8") == "Remove unreachable testify\n" + assert (tmp_path / "pr_body.md").read_text(encoding="utf-8") == dedent( + """\ + Hi, thecrawl bot here! 👋 + + The following package responds with a 404: + + - **testify** [since 2026-02-28; 7 weeks] + + You can check the current [status](https://packages.sublimetext.io/status). + + This PR removes the package from the registry. + """ + ) + + +def test_remove_packages_from_repository_uses_source_and_includes_once(tmp_path): + repository_root = tmp_path + (repository_root / "repository").mkdir() + + (repository_root / "repository.json").write_text( + json.dumps( + { + "schema_version": "3.0.0", + "packages": [ + {"name": "RootPackage", "details": "https://example.invalid/root"}, + {"name": "KeepRoot", "details": "https://example.invalid/keep-root"}, + ], + "includes": ["./repository/a.json"], + } + ), + encoding="utf-8", + ) + (repository_root / "repository" / "a.json").write_text( + json.dumps( + { + "schema_version": "3.0.0", + "packages": [ + {"name": "KarmaRunner", "details": "https://example.invalid/karma"}, + {"name": "KeepA", "details": "https://example.invalid/keep-a"}, + ], + "includes": ["./c.json"], + } + ), + encoding="utf-8", + ) + (repository_root / "repository" / "c.json").write_text( + json.dumps( + { + "schema_version": "3.0.0", + "packages": [ + {"name": "LazyTimeTracker", "details": "https://example.invalid/lazy"}, + ], + } + ), + encoding="utf-8", + ) + + unreachable_packages = [ + script.UnreachablePackage( + name="RootPackage", + details="https://example.invalid/root", + failing_since=datetime(2026, 3, 21, tzinfo=UTC), + age_days=31, + source="https://raw.githubusercontent.com/wbond/package_control_channel/refs/heads/master/repository.json", + ), + script.UnreachablePackage( + name="KarmaRunner", + details="https://example.invalid/karma", + failing_since=datetime(2026, 3, 21, tzinfo=UTC), + age_days=31, + source="https://raw.githubusercontent.com/wbond/package_control_channel/refs/heads/master/repository.json", + ), + script.UnreachablePackage( + name="LazyTimeTracker", + details="https://example.invalid/lazy", + failing_since=datetime(2026, 3, 21, tzinfo=UTC), + age_days=31, + source="https://raw.githubusercontent.com/wbond/package_control_channel/refs/heads/master/repository.json", + ), + ] + + planned_files, removed_names = script.remove_packages_from_repository( + repository_root=repository_root, + unreachable_packages=unreachable_packages, + apply_changes=False, + ) + + assert planned_files == [ + (repository_root / "repository.json").resolve(), + (repository_root / "repository" / "a.json").resolve(), + ] + assert removed_names == {"RootPackage", "KarmaRunner"} + + changed_files, removed_names = script.remove_packages_from_repository( + repository_root=repository_root, + unreachable_packages=unreachable_packages, + apply_changes=True, + ) + + assert changed_files == [ + (repository_root / "repository.json").resolve(), + (repository_root / "repository" / "a.json").resolve(), + ] + assert removed_names == {"RootPackage", "KarmaRunner"} + + root_payload = json.loads((repository_root / "repository.json").read_text(encoding="utf-8")) + assert root_payload["packages"] == [ + {"name": "KeepRoot", "details": "https://example.invalid/keep-root"}, + ] + + a_payload = json.loads((repository_root / "repository" / "a.json").read_text(encoding="utf-8")) + assert a_payload["packages"] == [ + {"name": "KeepA", "details": "https://example.invalid/keep-a"}, + ] + + c_payload = json.loads((repository_root / "repository" / "c.json").read_text(encoding="utf-8")) + assert c_payload["packages"] == [ + {"name": "LazyTimeTracker", "details": "https://example.invalid/lazy"}, + ] + + +def test_ensure_paths_are_clean_fails_when_target_files_are_dirty(monkeypatch): + def fake_run_output(command: list[str], *, input_text: str | None = None) -> str: + assert command[:3] == ["git", "diff", "--name-only"] + assert input_text is None + return "repository/a.json\n" + + monkeypatch.setattr(script, "run_output", fake_run_output) + + with pytest.raises(SystemExit, match="target files are dirty"): + script.ensure_paths_are_clean([Path("repository/a.json")]) diff --git a/uv.lock b/uv.lock new file mode 100644 index 00000000000..2ade5c862cf --- /dev/null +++ b/uv.lock @@ -0,0 +1,107 @@ +version = 1 +revision = 3 +requires-python = ">=3.13" + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, +] + +[[package]] +name = "mockito" +version = "2.0.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c8/8e/eff6dfa2c597ddc09d2c83a922f5154f00fa8bda90f68eda984a6d7506bd/mockito-2.0.4.tar.gz", hash = "sha256:d341f228c5b5ccf3c473751f6c6f09d8d83c5da49846a6ee31eb60b65607e0ef", size = 218582, upload-time = "2026-04-15T09:23:47.927Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/01/70/625563b5925ee5393a971f4790aab4b0059deda3bf9196c57ba7172e9ab3/mockito-2.0.4-py3-none-any.whl", hash = "sha256:04d7e6e9b9b7288e76235b894f9b3cbfdc9cfc631e548ed607933146ae9db1f5", size = 51921, upload-time = "2026-04-15T09:23:46.68Z" }, +] + +[[package]] +name = "package-control-channel" +version = "0.1.0" +source = { virtual = "." } + +[package.dev-dependencies] +dev = [ + { name = "mockito" }, + { name = "pytest" }, + { name = "pytest-mockito" }, +] + +[package.metadata] + +[package.metadata.requires-dev] +dev = [ + { name = "mockito", specifier = ">=2.0.4" }, + { name = "pytest", specifier = ">=9.0.0" }, + { name = "pytest-mockito", specifier = ">=0.0.6.post1" }, +] + +[[package]] +name = "packaging" +version = "26.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/df/de/0d2b39fb4af88a0258f3bac87dfcbb48e73fbdea4a2ed0e2213f9a4c2f9a/packaging-26.1.tar.gz", hash = "sha256:f042152b681c4bfac5cae2742a55e103d27ab2ec0f3d88037136b6bfe7c9c5de", size = 215519, upload-time = "2026-04-14T21:12:49.362Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7a/c2/920ef838e2f0028c8262f16101ec09ebd5969864e5a64c4c05fad0617c56/packaging-26.1-py3-none-any.whl", hash = "sha256:5d9c0669c6285e491e0ced2eee587eaf67b670d94a19e94e3984a481aba6802f", size = 95831, upload-time = "2026-04-14T21:12:47.56Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "pygments" +version = "2.20.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c3/b2/bc9c9196916376152d655522fdcebac55e66de6603a76a02bca1b6414f6c/pygments-2.20.0.tar.gz", hash = "sha256:6757cd03768053ff99f3039c1a36d6c0aa0b263438fcab17520b30a303a82b5f", size = 4955991, upload-time = "2026-03-29T13:29:33.898Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/7e/a72dd26f3b0f4f2bf1dd8923c85f7ceb43172af56d63c7383eb62b332364/pygments-2.20.0-py3-none-any.whl", hash = "sha256:81a9e26dd42fd28a23a2d169d86d7ac03b46e2f8b59ed4698fb4785f946d0176", size = 1231151, upload-time = "2026-03-29T13:29:30.038Z" }, +] + +[[package]] +name = "pytest" +version = "9.0.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7d/0d/549bd94f1a0a402dc8cf64563a117c0f3765662e2e668477624baeec44d5/pytest-9.0.3.tar.gz", hash = "sha256:b86ada508af81d19edeb213c681b1d48246c1a91d304c6c81a427674c17eb91c", size = 1572165, upload-time = "2026-04-07T17:16:18.027Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d4/24/a372aaf5c9b7208e7112038812994107bc65a84cd00e0354a88c2c77a617/pytest-9.0.3-py3-none-any.whl", hash = "sha256:2c5efc453d45394fdd706ade797c0a81091eccd1d6e4bccfcd476e2b8e0ab5d9", size = 375249, upload-time = "2026-04-07T17:16:16.13Z" }, +] + +[[package]] +name = "pytest-mockito" +version = "0.0.6.post1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mockito" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d2/13/56507ae9b513d4bac9ac5e412e7ff66d34827fc5c23e44fccde37a5c82ef/pytest_mockito-0.0.6.post1.tar.gz", hash = "sha256:67e26a69942de4c50001268c779700f8d722384d20385313a88e81b6d23fb71c", size = 3552, upload-time = "2026-02-10T09:36:18.784Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/9a/3bb38852013dbb5f03191f35f75823a38cc9e01d2a087c5dfe735dacae94/pytest_mockito-0.0.6.post1-py3-none-any.whl", hash = "sha256:b8384e1ac3e92828475fb6d24d6888e97acef52d9a77328d1333d48e9174ddbf", size = 3939, upload-time = "2026-02-10T09:36:17.552Z" }, +]