Add docstrings and auto-download todo.json if missing
- Add module-level and function docstrings throughout - Download todo.json from UDD automatically when running 'list' command if the file doesn't exist locally Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
parent
65789b0e5b
commit
e80f511155
1 changed files with 75 additions and 2 deletions
77
todo
77
todo
|
|
@ -1,4 +1,10 @@
|
|||
#!/usr/bin/python3
|
||||
"""CLI tool for tracking Debian packages with new upstream versions.
|
||||
|
||||
Fetches TODO items from the Debian UDD (Ultimate Debian Database) and displays
|
||||
packages where a new upstream version is available. Filters out pre-release
|
||||
versions and shows team/uploader metadata.
|
||||
"""
|
||||
|
||||
import json
|
||||
import re
|
||||
|
|
@ -31,6 +37,15 @@ HIDE_UPLOADER = "Edward Betts <edward@4angle.com>"
|
|||
|
||||
|
||||
def parse_details(details: str) -> tuple[str, Optional[str]]:
|
||||
"""Parse version details string into new and current versions.
|
||||
|
||||
Args:
|
||||
details: String like "1.2.3 (currently in unstable: 1.2.2-1)"
|
||||
|
||||
Returns:
|
||||
Tuple of (new_version, current_version). current_version is None
|
||||
if not present in the input.
|
||||
"""
|
||||
match = CURRENTLY_RE.match(details)
|
||||
if match:
|
||||
return match.group("new").strip(), match.group("current").strip()
|
||||
|
|
@ -38,11 +53,20 @@ def parse_details(details: str) -> tuple[str, Optional[str]]:
|
|||
|
||||
|
||||
def is_prerelease_version(details: str) -> bool:
|
||||
"""Check if the new version in details is a pre-release.
|
||||
|
||||
Detects versions containing alpha, beta, rc, a, or b suffixes.
|
||||
"""
|
||||
new_version, _ = parse_details(details)
|
||||
return bool(PRERELEASE_RE.search(new_version))
|
||||
|
||||
|
||||
def vcs_git_to_team(vcs_git: Optional[str]) -> Optional[str]:
|
||||
"""Extract team name from a Vcs-Git URL.
|
||||
|
||||
For salsa.debian.org URLs, extracts the group/team name.
|
||||
Returns the full URL for non-Salsa repositories.
|
||||
"""
|
||||
if not vcs_git:
|
||||
return None
|
||||
match = re.search(r"salsa\.debian\.org/([^/]+)/", vcs_git)
|
||||
|
|
@ -52,12 +76,22 @@ def vcs_git_to_team(vcs_git: Optional[str]) -> Optional[str]:
|
|||
|
||||
|
||||
def normalize_uploaders(uploaders: str) -> str:
|
||||
"""Clean and format uploaders string.
|
||||
|
||||
Splits comma-separated uploaders, removes the configured HIDE_UPLOADER,
|
||||
and joins with newlines.
|
||||
"""
|
||||
parts = [part.strip().strip(",") for part in uploaders.split(",")]
|
||||
cleaned = [part for part in parts if part and part != HIDE_UPLOADER]
|
||||
return "\n".join(cleaned)
|
||||
|
||||
|
||||
def load_cache(source_paths: list[str]) -> Optional[dict[str, SourceInfo]]:
|
||||
"""Load cached Vcs-Git and uploader info if still valid.
|
||||
|
||||
Returns None if cache is missing, corrupted, or stale (based on
|
||||
Sources file mtimes or cache version mismatch).
|
||||
"""
|
||||
try:
|
||||
with CACHE_PATH.open("r", encoding="utf-8") as handle:
|
||||
data = json.load(handle)
|
||||
|
|
@ -100,6 +134,10 @@ def load_cache(source_paths: list[str]) -> Optional[dict[str, SourceInfo]]:
|
|||
|
||||
|
||||
def save_cache(source_paths: list[str], vcs_by_source: dict[str, SourceInfo]) -> None:
|
||||
"""Save Vcs-Git and uploader info to cache file.
|
||||
|
||||
Stores current mtimes of Sources files for cache invalidation.
|
||||
"""
|
||||
sources_mtimes: dict[str, float] = {}
|
||||
for path in source_paths:
|
||||
try:
|
||||
|
|
@ -119,6 +157,11 @@ def save_cache(source_paths: list[str], vcs_by_source: dict[str, SourceInfo]) ->
|
|||
|
||||
|
||||
def load_source_info_map() -> dict[str, SourceInfo]:
|
||||
"""Load Vcs-Git and uploader info for all source packages.
|
||||
|
||||
Parses APT Sources files from /var/lib/apt/lists/ and extracts
|
||||
Vcs-Git URLs and Uploaders fields. Results are cached to disk.
|
||||
"""
|
||||
source_paths = sorted(glob.glob("/var/lib/apt/lists/*Sources"))
|
||||
cached = load_cache(source_paths)
|
||||
if cached is not None:
|
||||
|
|
@ -151,18 +194,21 @@ def load_source_info_map() -> dict[str, SourceInfo]:
|
|||
|
||||
|
||||
def fetch_todo_list() -> TodoList:
|
||||
"""Fetch the TODO list from UDD as JSON."""
|
||||
with urlopen(TODO_URL) as response:
|
||||
payload = response.read().decode("utf-8")
|
||||
return cast(TodoList, json.loads(payload))
|
||||
|
||||
|
||||
def save_todo_list(todo_list: TodoList) -> None:
|
||||
"""Save TODO list to local JSON file."""
|
||||
with TODO_PATH.open("w", encoding="utf-8") as handle:
|
||||
json.dump(todo_list, handle, indent=2, ensure_ascii=True)
|
||||
handle.write("\n")
|
||||
|
||||
|
||||
def summarize_sources(todo_list: TodoList) -> set[str]:
|
||||
"""Extract set of source package names from TODO list."""
|
||||
sources: set[str] = set()
|
||||
for item in todo_list:
|
||||
source = item.get(":source")
|
||||
|
|
@ -172,6 +218,11 @@ def summarize_sources(todo_list: TodoList) -> set[str]:
|
|||
|
||||
|
||||
def load_notes() -> dict[str, str]:
|
||||
"""Load per-package notes from the notes file.
|
||||
|
||||
Each line should be: <source-package> <note text>
|
||||
Multiple notes for the same package are joined with semicolons.
|
||||
"""
|
||||
if not NOTES_PATH.exists():
|
||||
return {}
|
||||
notes_by_source: dict[str, list[str]] = {}
|
||||
|
|
@ -193,6 +244,11 @@ def load_notes() -> dict[str, str]:
|
|||
|
||||
|
||||
def filter_todo_list(todo_list: TodoList, include_prerelease: bool = False) -> TodoList:
|
||||
"""Filter TODO list to only new upstream version items.
|
||||
|
||||
Removes non-upstream items, pre-releases (unless include_prerelease=True),
|
||||
and items where normalized versions already match.
|
||||
"""
|
||||
filtered: TodoList = []
|
||||
for item in todo_list:
|
||||
shortname = item.get(":shortname")
|
||||
|
|
@ -214,6 +270,10 @@ def filter_todo_list(todo_list: TodoList, include_prerelease: bool = False) -> T
|
|||
|
||||
|
||||
def normalize_upstream_version(version: str) -> str:
|
||||
"""Strip epoch and Debian revision from version string.
|
||||
|
||||
"1:2.3.4-5" -> "2.3.4"
|
||||
"""
|
||||
if ":" in version:
|
||||
version = version.split(":", 1)[1]
|
||||
if "-" in version:
|
||||
|
|
@ -222,6 +282,8 @@ def normalize_upstream_version(version: str) -> str:
|
|||
|
||||
|
||||
def print_changes(old_list: TodoList, new_list: TodoList) -> None:
|
||||
"""Print added and removed packages between two TODO lists."""
|
||||
|
||||
def format_details(details: str) -> str:
|
||||
new_version, current_version = parse_details(details)
|
||||
display_new = new_version
|
||||
|
|
@ -258,8 +320,17 @@ def print_changes(old_list: TodoList, new_list: TodoList) -> None:
|
|||
|
||||
|
||||
def list_todos(include_prerelease: bool) -> None:
|
||||
with TODO_PATH.open("r", encoding="utf-8") as handle:
|
||||
todo_list = cast(TodoList, json.load(handle))
|
||||
"""Display filtered TODO items in a table.
|
||||
|
||||
Downloads todo.json from UDD if not present locally.
|
||||
"""
|
||||
if not TODO_PATH.exists():
|
||||
print("Downloading todo.json...")
|
||||
todo_list = fetch_todo_list()
|
||||
save_todo_list(todo_list)
|
||||
else:
|
||||
with TODO_PATH.open("r", encoding="utf-8") as handle:
|
||||
todo_list = cast(TodoList, json.load(handle))
|
||||
|
||||
source_info_map = load_source_info_map()
|
||||
notes_by_source = load_notes()
|
||||
|
|
@ -315,6 +386,7 @@ def list_todos(include_prerelease: bool) -> None:
|
|||
console.print(f"Packages: {len(filtered)}")
|
||||
|
||||
def update_todos() -> None:
|
||||
"""Fetch latest TODO list from UDD and show changes."""
|
||||
old_list: TodoList = []
|
||||
if TODO_PATH.exists():
|
||||
with TODO_PATH.open("r", encoding="utf-8") as handle:
|
||||
|
|
@ -326,6 +398,7 @@ def update_todos() -> None:
|
|||
@click.group(invoke_without_command=True)
|
||||
@click.pass_context
|
||||
def cli(context: click.Context) -> None:
|
||||
"""Track Debian packages with new upstream versions."""
|
||||
if context.invoked_subcommand is None:
|
||||
list_todos(include_prerelease=False)
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue