Refactor update_package with comprehensive error handling and integrate update scripts.

Add custom exception hierarchy (PackageUpdateError, PackageNotFoundError,
RepositoryStateError, ExternalCommandError, MissingToolError) for specific
error handling throughout package update operations.

Extract helper functions from 90-line update_package function:
- validate_package_info: Package validation
- resolve_package_directories: Path resolution
- ensure_package_checkout: Salsa checkout with error handling
- validate_repository_state: Check for uncommitted changes
- run_gbp_pq_workflow: Run gbp pq import/switch with error checks
- import_upstream_version: Import upstream using gbp
- run_command: Centralized subprocess execution with consistent errors
- check_required_tools: Validate required tools upfront

Incorporate update scripts as Python functions (replaces external shell scripts):
- update_debian_control: Update Standards-Version, remove obsolete fields
- update_debian_copyright_year: Update copyright years to current
- update_debian_watch: Upgrade watch files from version 4 to 5
- add_salsa_ci: Add debian/salsa-ci.yml if missing
- run_package_updates: Orchestrator for all update operations

Enhance network error handling in fetch_todo_list:
- Handle HTTP errors, network errors, timeouts, invalid JSON
- Add timeout parameter (default 30s)
- Graceful error messages in list_todos and update_todos

Add comprehensive test coverage:
- 67 tests for update-pkg functionality
- Tests for all helper functions and update operations
- Network error handling tests
- 125 total tests, all passing

Benefits:
- No external script dependencies
- Consistent error handling throughout
- Better error messages with actionable guidance
- All update logic in one maintainable codebase

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
This commit is contained in:
Edward Betts 2026-02-01 16:55:50 +00:00
parent 822dcedde7
commit 3b7a07e1d9
2 changed files with 1580 additions and 47 deletions

View file

@ -5,12 +5,15 @@ packages where a new upstream version is available. Filters out pre-release
versions and shows team/uploader metadata.
"""
import datetime
import json
import re
import glob
import os
import shutil
import subprocess
import sys
import urllib.error
from pathlib import Path
from typing import Any, Optional, cast
from urllib.request import urlopen
@ -38,6 +41,97 @@ HIDE_UPLOADER = "Edward Betts <edward@4angle.com>"
DEBIAN_SRC_BASE = Path.home() / "src" / "debian"
class PackageUpdateError(Exception):
"""Base exception for package update operations."""
pass
class PackageNotFoundError(PackageUpdateError):
"""Package not found in source info."""
pass
class RepositoryStateError(PackageUpdateError):
"""Repository in invalid state (uncommitted changes, etc)."""
pass
class ExternalCommandError(PackageUpdateError):
"""External command failed (salsa, gbp, dch)."""
def __init__(self, command: str, returncode: int, message: str):
self.command = command
self.returncode = returncode
super().__init__(message)
class MissingToolError(PackageUpdateError):
"""Required external tool not available."""
pass
def run_command(
cmd: list[str],
cwd: Path,
description: str,
capture_output: bool = False,
check: bool = True,
) -> subprocess.CompletedProcess:
"""Run command with consistent error handling.
Args:
cmd: Command and arguments to run
cwd: Working directory for the command
description: Human-readable description of what the command does
capture_output: Whether to capture stdout/stderr
check: Whether to raise exception on non-zero exit
Returns:
CompletedProcess instance
Raises:
ExternalCommandError: If command fails and check=True
MissingToolError: If command not found
"""
try:
result = subprocess.run(
cmd,
cwd=cwd,
capture_output=capture_output,
text=True,
)
if check and result.returncode != 0:
cmd_str = " ".join(cmd)
raise ExternalCommandError(
command=cmd_str,
returncode=result.returncode,
message=f"{description} failed: {cmd_str} (exit code {result.returncode})",
)
return result
except FileNotFoundError as e:
raise MissingToolError(
f"Command not found: {cmd[0]}. Please install it to continue."
) from e
def check_required_tools() -> None:
"""Check that required external tools are available.
Raises:
MissingToolError: If a required tool is not found
"""
required_tools = ["salsa", "gbp", "dch"]
missing_tools = []
for tool in required_tools:
if not shutil.which(tool):
missing_tools.append(tool)
if missing_tools:
tools_str = ", ".join(missing_tools)
raise MissingToolError(
f"Required tools not found: {tools_str}. Please install them to continue."
)
def parse_details(details: str) -> tuple[str, Optional[str]]:
"""Parse version details string into new and current versions.
@ -195,11 +289,40 @@ def load_source_info_map() -> dict[str, SourceInfo]:
return vcs_by_source
def fetch_todo_list() -> TodoList:
"""Fetch the TODO list from UDD as JSON."""
with urlopen(TODO_URL) as response:
payload = response.read().decode("utf-8")
return cast(TodoList, json.loads(payload))
def fetch_todo_list(timeout: int = 30) -> TodoList:
"""Fetch the TODO list from UDD as JSON.
Args:
timeout: Request timeout in seconds (default: 30)
Returns:
List of TODO items
Raises:
PackageUpdateError: If network request fails or JSON is invalid
"""
try:
with urlopen(TODO_URL, timeout=timeout) as response:
payload = response.read().decode("utf-8")
except urllib.error.HTTPError as e:
raise PackageUpdateError(
f"HTTP error {e.code} while fetching TODO list from {TODO_URL}: {e.reason}"
) from e
except urllib.error.URLError as e:
raise PackageUpdateError(
f"Network error while fetching TODO list from {TODO_URL}: {e.reason}"
) from e
except TimeoutError as e:
raise PackageUpdateError(
f"Timeout after {timeout}s while fetching TODO list from {TODO_URL}"
) from e
try:
return cast(TodoList, json.loads(payload))
except json.JSONDecodeError as e:
raise PackageUpdateError(
f"Invalid JSON in TODO list response: {e}"
) from e
def save_todo_list(todo_list: TodoList) -> None:
@ -328,8 +451,13 @@ def list_todos(include_prerelease: bool) -> None:
"""
if not TODO_PATH.exists():
print("Downloading todo.json...")
todo_list = fetch_todo_list()
save_todo_list(todo_list)
try:
todo_list = fetch_todo_list()
save_todo_list(todo_list)
except PackageUpdateError as e:
print(f"Error: {e}", file=sys.stderr)
print("Please try again later or check your network connection.", file=sys.stderr)
sys.exit(1)
else:
with TODO_PATH.open("r", encoding="utf-8") as handle:
todo_list = cast(TodoList, json.load(handle))
@ -394,7 +522,14 @@ def update_todos() -> None:
if TODO_PATH.exists():
with TODO_PATH.open("r", encoding="utf-8") as handle:
old_list = cast(TodoList, json.load(handle))
todo_list = fetch_todo_list()
try:
todo_list = fetch_todo_list()
except PackageUpdateError as e:
print(f"Error: {e}", file=sys.stderr)
print("Failed to fetch updated TODO list.", file=sys.stderr)
sys.exit(1)
save_todo_list(todo_list)
print_changes(filter_todo_list(old_list), filter_todo_list(todo_list))
@ -464,21 +599,39 @@ def add_changelog_entry(pkg_dir: Path) -> bool:
return result.returncode == 0
def update_package(package: str) -> None:
"""Update a Debian package to a new upstream version.
def validate_package_info(package: str) -> SourceInfo:
"""Validate package exists in source info map.
Checks out the package if not already present, imports the new upstream
version using gbp, and creates a new changelog entry.
Args:
package: Package name to validate
Returns:
Source info dictionary containing vcs_git and uploaders
Raises:
PackageNotFoundError: If package not found or has no vcs_git
"""
source_info_map = load_source_info_map()
source_info = source_info_map.get(package)
if not source_info or not source_info.get("vcs_git"):
print(
f"Error: Could not find team info for package '{package}'", file=sys.stderr
raise PackageNotFoundError(
f"Could not find team info for package '{package}'"
)
sys.exit(1)
return source_info
def resolve_package_directories(package: str, source_info: SourceInfo) -> tuple[Path, Path, Path]:
"""Resolve team_dir, pkg_dir, and repo_dir paths.
Args:
package: Package name
source_info: Source info dictionary with vcs_git
Returns:
Tuple of (team_dir, pkg_dir, repo_dir)
"""
team_slug = source_info["vcs_git"]
display_team = team_slug_to_display_name(team_slug).lower()
@ -486,46 +639,508 @@ def update_package(package: str) -> None:
pkg_dir = team_dir / package
repo_dir = pkg_dir / package
# Create package directory if it doesn't exist
pkg_dir.mkdir(parents=True, exist_ok=True)
return team_dir, pkg_dir, repo_dir
if not repo_dir.exists():
# Package not checked out, do salsa checkout
print(f"Checking out {package} into {pkg_dir}...")
salsa_path = f"{team_slug}/deps/{package}"
result = subprocess.run(
["salsa", "checkout", salsa_path],
cwd=pkg_dir,
def ensure_package_checkout(package: str, source_info: SourceInfo, pkg_dir: Path) -> None:
"""Checkout package from salsa.
Args:
package: Package name
source_info: Source info dictionary with vcs_git
pkg_dir: Directory to check out into
Raises:
ExternalCommandError: If salsa checkout fails
RepositoryStateError: If directory creation fails
"""
try:
pkg_dir.mkdir(parents=True, exist_ok=True)
except OSError as e:
raise RepositoryStateError(
f"Failed to create directory {pkg_dir}: {e}"
) from e
print(f"Checking out {package} into {pkg_dir}...")
team_slug = source_info["vcs_git"]
salsa_path = f"{team_slug}/deps/{package}"
run_command(
["salsa", "checkout", salsa_path],
pkg_dir,
f"Checkout {salsa_path}",
)
def validate_repository_state(repo_dir: Path) -> None:
"""Validate repository has no uncommitted changes.
Args:
repo_dir: Repository directory to check
Raises:
RepositoryStateError: If there are uncommitted changes
"""
if has_uncommitted_changes(repo_dir):
raise RepositoryStateError(
f"{repo_dir} has uncommitted changes. "
"Please commit or stash them first."
)
if result.returncode != 0:
print(f"Error: salsa checkout failed for {salsa_path}", file=sys.stderr)
sys.exit(1)
else:
# Package already checked out, check for uncommitted changes
if has_uncommitted_changes(repo_dir):
print(
f"Error: {repo_dir} has uncommitted changes. "
"Please commit or stash them first.",
file=sys.stderr,
def run_gbp_pq_workflow(repo_dir: Path) -> None:
"""Run gbp pq import and switch.
Args:
repo_dir: Repository directory
Raises:
ExternalCommandError: If gbp pq commands fail
"""
run_command(
["gbp", "pq", "import"],
repo_dir,
"Import patch queue",
)
run_command(
["gbp", "pq", "switch"],
repo_dir,
"Switch to patch branch",
)
def import_upstream_version(repo_dir: Path) -> None:
"""Import new upstream version using gbp.
Args:
repo_dir: Repository directory
Raises:
ExternalCommandError: If gbp import-orig fails
"""
print("Importing new upstream version...")
run_command(
["gbp", "import-orig", "--uscan", "--pristine-tar", "--no-interactive"],
repo_dir,
"Import upstream version",
)
def update_debian_control(repo_dir: Path) -> None:
"""Update debian/control file.
- Removes obsolete 'Priority: optional' (now default)
- Removes obsolete 'Rules-Requires-Root: no' (now default)
- Updates Standards-Version to current (4.7.3)
Args:
repo_dir: Repository directory
Raises:
ExternalCommandError: If dch command fails
"""
control_path = repo_dir / "debian" / "control"
if not control_path.exists():
return
lines = control_path.read_text().splitlines()
current_standards_version = "4.7.3"
new_lines = []
for line in lines:
if line == "Priority: optional":
run_command(
["dch", "-a", "Remove 'Priority: optional', now the default."],
repo_dir,
"Add changelog entry for Priority removal",
)
sys.exit(1)
continue
if line == "Rules-Requires-Root: no":
run_command(
["dch", "-a", "Remove 'Rules-Requires-Root: no', now the default."],
repo_dir,
"Add changelog entry for Rules-Requires-Root removal",
)
continue
if line.startswith("Standards-Version: "):
standards_version = line[len("Standards-Version: "):]
if standards_version != current_standards_version:
line = "Standards-Version: " + current_standards_version
run_command(
["dch", "-a", "Update Standards-Version."],
repo_dir,
"Add changelog entry for Standards-Version update",
)
new_lines.append(line)
control_path.write_text("\n".join(new_lines) + "\n")
def update_debian_copyright_year(repo_dir: Path) -> None:
"""Update Edward Betts' copyright years in debian/copyright.
Transforms:
- "Copyright: 2025 Edward Betts <...>" -> "Copyright: 2025-2026 Edward Betts <...>"
- "Copyright: 2022-2024 Edward Betts <...>" -> "Copyright: 2022-2026 Edward Betts <...>"
Args:
repo_dir: Repository directory
Raises:
ExternalCommandError: If dch command fails
"""
copyright_path = repo_dir / "debian" / "copyright"
if not copyright_path.exists():
return
full_name_email = "Edward Betts <edward@4angle.com>"
year = datetime.date.today().year
original = copyright_path.read_text(encoding="utf-8")
holder_re = re.escape(full_name_email)
copyright_line_re = re.compile(
rf"^(?P<indent>\s*)"
rf"(?:(?P<label>Copyright:\s+))?"
rf"(?P<years>\d{{4}}(?:-\d{{4}})?)"
rf"(?P<suffix>\s+{holder_re}\s*)$"
)
copyright_field_start_re = re.compile(r"^\s*Copyright:\s+")
field_start_re = re.compile(r"^(?:\S[^:]*):\s*")
continuation_re = re.compile(r"^\s+")
def update_years_token(years: str) -> Optional[str]:
"""Return an updated years token, or None if no change is needed."""
if "-" in years:
start_s, end_s = years.split("-", 1)
start = int(start_s)
end = int(end_s)
else:
start = end = int(years)
if end >= year:
return None
return f"{start}-{year}" if start != year else str(year)
lines = original.splitlines(keepends=True)
out: list[str] = []
in_copyright_field = False
for line in lines:
stripped = line.rstrip("\n")
if field_start_re.match(stripped) and not copyright_field_start_re.match(stripped):
in_copyright_field = False
if copyright_field_start_re.match(stripped):
in_copyright_field = True
m = copyright_line_re.match(stripped)
should_consider = False
if m and m.group("label"):
should_consider = True
elif m and in_copyright_field and continuation_re.match(stripped):
should_consider = True
if should_consider and m:
new_years = update_years_token(m.group("years"))
if new_years is not None:
indent = m.group("indent")
label = m.group("label") or ""
suffix = m.group("suffix")
line = f"{indent}{label}{new_years}{suffix}\n"
out.append(line)
updated = "".join(out)
if updated != original:
copyright_path.write_text(updated, encoding="utf-8")
run_command(
["dch", "-a", "Update copyright year."],
repo_dir,
"Add changelog entry for copyright year update",
)
def update_debian_watch(repo_dir: Path) -> None:
"""Upgrade debian/watch from version 4 to version 5.
Supports GitHub and PyPI watch entries.
Args:
repo_dir: Repository directory
Raises:
ExternalCommandError: If dch command fails
"""
watch_path = repo_dir / "debian" / "watch"
if not watch_path.exists():
return
try:
original = watch_path.read_text(encoding="utf-8")
except FileNotFoundError:
return
# Already version 5
if re.search(r"(?im)^\s*Version:\s*5\s*$", original):
return
# GitHub URL patterns
github_url_re = re.compile(
r"""(?xi)
https?://github\.com/
(?P<owner>[^/\s]+)/(?P<repo>[^/\s#?]+)
(?:/(?:releases|tags))?
(?:[^\s]*)?
"""
)
# PyPI URL patterns
pypi_debian_net_re = re.compile(
r"""(?xi)
https?://pypi\.debian\.net/
(?P<name>[a-z0-9][a-z0-9._-]*)
/
"""
)
pypi_python_org_source_re = re.compile(
r"""(?xi)
https?://pypi(?:\.python)?\.org/
packages/source/
(?P<letter>[a-z0-9])/
(?P<name>[a-z0-9][a-z0-9._-]*)
/
"""
)
pythonhosted_re = re.compile(
r"""(?xi)
https?://files\.pythonhosted\.org/
packages/source/
(?P<letter>[a-z0-9])/
(?P<name>[a-z0-9][a-z0-9._-]*)
/
"""
)
# Join wrapped lines
joined_lines = []
buf = ""
for line in original.splitlines():
if line.rstrip().endswith("\\"):
buf += line.rstrip()[:-1] + " "
else:
joined_lines.append(buf + line)
buf = ""
if buf:
joined_lines.append(buf)
joined = "\n".join(joined_lines)
# Try GitHub first
m = github_url_re.search(joined)
if m:
owner, repo = m.group("owner"), m.group("repo")
if repo.endswith(".git"):
repo = repo[:-4]
new_body = (
"Version: 5\n"
"\n"
"Template: GitHub\n"
f"Owner: {owner}\n"
f"Project: {repo}\n"
)
else:
# Try PyPI
pypi_name = None
for pattern in [pypi_debian_net_re, pypi_python_org_source_re, pythonhosted_re]:
m = pattern.search(joined)
if m:
pypi_name = m.group("name")
break
if not pypi_name:
# Can't upgrade this watch file
return
new_body = f"Version: 5\n\nTemplate: Pypi\nDist: {pypi_name}\n"
# Preserve leading comments
leading = []
for line in original.splitlines(keepends=True):
if line.lstrip().startswith("#") or not line.strip():
leading.append(line)
continue
break
header = "".join(leading).rstrip()
if header:
header += "\n\n"
new_contents = header + new_body
if new_contents != original:
watch_path.write_text(new_contents, encoding="utf-8")
run_command(
["dch", "-a", "Update debian/watch to format version 5."],
repo_dir,
"Add changelog entry for debian/watch update",
)
def add_salsa_ci(repo_dir: Path) -> None:
"""Add debian/salsa-ci.yml if missing.
Args:
repo_dir: Repository directory
Raises:
ExternalCommandError: If git or dch commands fail
"""
salsa_ci_path = repo_dir / "debian" / "salsa-ci.yml"
if salsa_ci_path.exists():
return
control_path = repo_dir / "debian" / "control"
if not control_path.exists():
return
# Extract salsa repo from Vcs-Git
vcs_git_re = re.compile(r"^Vcs-Git: https://salsa\.debian\.org/(.*).git$")
salsa_repo = None
with control_path.open() as f:
for line in f:
if not line.startswith("Vcs-Git"):
continue
m = vcs_git_re.match(line.rstrip())
if m:
salsa_repo = m.group(1)
break
if not salsa_repo:
return
content = """---
include:
- https://salsa.debian.org/salsa-ci-team/pipeline/raw/master/recipes/debian.yml
"""
salsa_ci_path.write_text(content)
run_command(
["git", "add", str(salsa_ci_path)],
repo_dir,
"Add debian/salsa-ci.yml to git",
)
run_command(
["dch", "-a", "Add debian/salsa-ci.yml."],
repo_dir,
"Add changelog entry for salsa-ci.yml",
)
# Update salsa project settings for homeassistant-team
if salsa_repo.startswith("homeassistant-team"):
run_command(
[
"salsa",
"update_projects",
salsa_repo,
"--jobs",
"yes",
"--ci-config-path",
"debian/salsa-ci.yml",
],
repo_dir,
"Update salsa project CI settings",
check=False, # Don't fail if this doesn't work
)
def run_package_updates(repo_dir: Path) -> None:
"""Run all package update operations.
Performs the following updates:
- Update debian/control (remove obsolete fields, update Standards-Version)
- Update copyright year
- Upgrade debian/watch to version 5
- Add debian/salsa-ci.yml if missing
Args:
repo_dir: Repository directory
Raises:
ExternalCommandError: If any update command fails
"""
print("Updating package files...")
update_debian_control(repo_dir)
update_debian_copyright_year(repo_dir)
update_debian_watch(repo_dir)
add_salsa_ci(repo_dir)
def update_package(package: str) -> None:
"""Update a Debian package to a new upstream version.
Checks out the package if not already present, imports the new upstream
version using gbp, and creates a new changelog entry.
Raises:
PackageNotFoundError: If package not found in source info
RepositoryStateError: If repository has uncommitted changes
ExternalCommandError: If any external command fails
MissingToolError: If required tools are not available
"""
# Validate package exists and check required tools
source_info = validate_package_info(package)
check_required_tools()
# Resolve directory paths
team_dir, pkg_dir, repo_dir = resolve_package_directories(package, source_info)
# Checkout package if needed, or validate existing checkout
if not repo_dir.exists():
ensure_package_checkout(package, source_info, pkg_dir)
else:
validate_repository_state(repo_dir)
print(f"Package {package} already checked out at {pkg_dir}")
# Run gbp pq workflow
run_gbp_pq_workflow(repo_dir)
# Import new upstream version
print("Importing new upstream version...")
result = subprocess.run(
["gbp", "import-orig", "--uscan", "--pristine-tar", "--no-interactive"],
cwd=repo_dir,
)
if result.returncode != 0:
print("Error: gbp import-orig failed", file=sys.stderr)
sys.exit(1)
import_upstream_version(repo_dir)
# Add changelog entry
print("Adding changelog entry...")
if not add_changelog_entry(repo_dir):
print("Error: Failed to add changelog entry", file=sys.stderr)
sys.exit(1)
version = extract_upstream_version_from_git_log(repo_dir)
if not version:
raise ExternalCommandError(
"git log",
0,
"Could not find upstream version in git log.",
)
new_version = f"{version}-1"
run_command(
[
"dch",
"--release-heuristic",
"log",
"--newversion",
new_version,
"New upstream release.",
],
repo_dir,
"Add changelog entry",
)
# Run package updates
run_package_updates(repo_dir)
print(f"Successfully updated {package}")
print(repo_dir)
@ -557,7 +1172,11 @@ def update_command() -> None:
@cli.command("update-pkg", help="Update a package to a new upstream version.")
@click.argument("package")
def update_pkg_command(package: str) -> None:
update_package(package)
try:
update_package(package)
except PackageUpdateError as e:
print(f"Error: {e}", file=sys.stderr)
sys.exit(1)
def main() -> None: