Refactor update_package with comprehensive error handling and integrate update scripts.
Add custom exception hierarchy (PackageUpdateError, PackageNotFoundError, RepositoryStateError, ExternalCommandError, MissingToolError) for specific error handling throughout package update operations. Extract helper functions from 90-line update_package function: - validate_package_info: Package validation - resolve_package_directories: Path resolution - ensure_package_checkout: Salsa checkout with error handling - validate_repository_state: Check for uncommitted changes - run_gbp_pq_workflow: Run gbp pq import/switch with error checks - import_upstream_version: Import upstream using gbp - run_command: Centralized subprocess execution with consistent errors - check_required_tools: Validate required tools upfront Incorporate update scripts as Python functions (replaces external shell scripts): - update_debian_control: Update Standards-Version, remove obsolete fields - update_debian_copyright_year: Update copyright years to current - update_debian_watch: Upgrade watch files from version 4 to 5 - add_salsa_ci: Add debian/salsa-ci.yml if missing - run_package_updates: Orchestrator for all update operations Enhance network error handling in fetch_todo_list: - Handle HTTP errors, network errors, timeouts, invalid JSON - Add timeout parameter (default 30s) - Graceful error messages in list_todos and update_todos Add comprehensive test coverage: - 67 tests for update-pkg functionality - Tests for all helper functions and update operations - Network error handling tests - 125 total tests, all passing Benefits: - No external script dependencies - Consistent error handling throughout - Better error messages with actionable guidance - All update logic in one maintainable codebase Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
This commit is contained in:
parent
822dcedde7
commit
3b7a07e1d9
2 changed files with 1580 additions and 47 deletions
|
|
@ -5,12 +5,15 @@ packages where a new upstream version is available. Filters out pre-release
|
|||
versions and shows team/uploader metadata.
|
||||
"""
|
||||
|
||||
import datetime
|
||||
import json
|
||||
import re
|
||||
import glob
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import urllib.error
|
||||
from pathlib import Path
|
||||
from typing import Any, Optional, cast
|
||||
from urllib.request import urlopen
|
||||
|
|
@ -38,6 +41,97 @@ HIDE_UPLOADER = "Edward Betts <edward@4angle.com>"
|
|||
DEBIAN_SRC_BASE = Path.home() / "src" / "debian"
|
||||
|
||||
|
||||
class PackageUpdateError(Exception):
|
||||
"""Base exception for package update operations."""
|
||||
pass
|
||||
|
||||
|
||||
class PackageNotFoundError(PackageUpdateError):
|
||||
"""Package not found in source info."""
|
||||
pass
|
||||
|
||||
|
||||
class RepositoryStateError(PackageUpdateError):
|
||||
"""Repository in invalid state (uncommitted changes, etc)."""
|
||||
pass
|
||||
|
||||
|
||||
class ExternalCommandError(PackageUpdateError):
|
||||
"""External command failed (salsa, gbp, dch)."""
|
||||
def __init__(self, command: str, returncode: int, message: str):
|
||||
self.command = command
|
||||
self.returncode = returncode
|
||||
super().__init__(message)
|
||||
|
||||
|
||||
class MissingToolError(PackageUpdateError):
|
||||
"""Required external tool not available."""
|
||||
pass
|
||||
|
||||
|
||||
def run_command(
|
||||
cmd: list[str],
|
||||
cwd: Path,
|
||||
description: str,
|
||||
capture_output: bool = False,
|
||||
check: bool = True,
|
||||
) -> subprocess.CompletedProcess:
|
||||
"""Run command with consistent error handling.
|
||||
|
||||
Args:
|
||||
cmd: Command and arguments to run
|
||||
cwd: Working directory for the command
|
||||
description: Human-readable description of what the command does
|
||||
capture_output: Whether to capture stdout/stderr
|
||||
check: Whether to raise exception on non-zero exit
|
||||
|
||||
Returns:
|
||||
CompletedProcess instance
|
||||
|
||||
Raises:
|
||||
ExternalCommandError: If command fails and check=True
|
||||
MissingToolError: If command not found
|
||||
"""
|
||||
try:
|
||||
result = subprocess.run(
|
||||
cmd,
|
||||
cwd=cwd,
|
||||
capture_output=capture_output,
|
||||
text=True,
|
||||
)
|
||||
if check and result.returncode != 0:
|
||||
cmd_str = " ".join(cmd)
|
||||
raise ExternalCommandError(
|
||||
command=cmd_str,
|
||||
returncode=result.returncode,
|
||||
message=f"{description} failed: {cmd_str} (exit code {result.returncode})",
|
||||
)
|
||||
return result
|
||||
except FileNotFoundError as e:
|
||||
raise MissingToolError(
|
||||
f"Command not found: {cmd[0]}. Please install it to continue."
|
||||
) from e
|
||||
|
||||
|
||||
def check_required_tools() -> None:
|
||||
"""Check that required external tools are available.
|
||||
|
||||
Raises:
|
||||
MissingToolError: If a required tool is not found
|
||||
"""
|
||||
required_tools = ["salsa", "gbp", "dch"]
|
||||
missing_tools = []
|
||||
for tool in required_tools:
|
||||
if not shutil.which(tool):
|
||||
missing_tools.append(tool)
|
||||
|
||||
if missing_tools:
|
||||
tools_str = ", ".join(missing_tools)
|
||||
raise MissingToolError(
|
||||
f"Required tools not found: {tools_str}. Please install them to continue."
|
||||
)
|
||||
|
||||
|
||||
def parse_details(details: str) -> tuple[str, Optional[str]]:
|
||||
"""Parse version details string into new and current versions.
|
||||
|
||||
|
|
@ -195,11 +289,40 @@ def load_source_info_map() -> dict[str, SourceInfo]:
|
|||
return vcs_by_source
|
||||
|
||||
|
||||
def fetch_todo_list() -> TodoList:
|
||||
"""Fetch the TODO list from UDD as JSON."""
|
||||
with urlopen(TODO_URL) as response:
|
||||
payload = response.read().decode("utf-8")
|
||||
return cast(TodoList, json.loads(payload))
|
||||
def fetch_todo_list(timeout: int = 30) -> TodoList:
|
||||
"""Fetch the TODO list from UDD as JSON.
|
||||
|
||||
Args:
|
||||
timeout: Request timeout in seconds (default: 30)
|
||||
|
||||
Returns:
|
||||
List of TODO items
|
||||
|
||||
Raises:
|
||||
PackageUpdateError: If network request fails or JSON is invalid
|
||||
"""
|
||||
try:
|
||||
with urlopen(TODO_URL, timeout=timeout) as response:
|
||||
payload = response.read().decode("utf-8")
|
||||
except urllib.error.HTTPError as e:
|
||||
raise PackageUpdateError(
|
||||
f"HTTP error {e.code} while fetching TODO list from {TODO_URL}: {e.reason}"
|
||||
) from e
|
||||
except urllib.error.URLError as e:
|
||||
raise PackageUpdateError(
|
||||
f"Network error while fetching TODO list from {TODO_URL}: {e.reason}"
|
||||
) from e
|
||||
except TimeoutError as e:
|
||||
raise PackageUpdateError(
|
||||
f"Timeout after {timeout}s while fetching TODO list from {TODO_URL}"
|
||||
) from e
|
||||
|
||||
try:
|
||||
return cast(TodoList, json.loads(payload))
|
||||
except json.JSONDecodeError as e:
|
||||
raise PackageUpdateError(
|
||||
f"Invalid JSON in TODO list response: {e}"
|
||||
) from e
|
||||
|
||||
|
||||
def save_todo_list(todo_list: TodoList) -> None:
|
||||
|
|
@ -328,8 +451,13 @@ def list_todos(include_prerelease: bool) -> None:
|
|||
"""
|
||||
if not TODO_PATH.exists():
|
||||
print("Downloading todo.json...")
|
||||
todo_list = fetch_todo_list()
|
||||
save_todo_list(todo_list)
|
||||
try:
|
||||
todo_list = fetch_todo_list()
|
||||
save_todo_list(todo_list)
|
||||
except PackageUpdateError as e:
|
||||
print(f"Error: {e}", file=sys.stderr)
|
||||
print("Please try again later or check your network connection.", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
else:
|
||||
with TODO_PATH.open("r", encoding="utf-8") as handle:
|
||||
todo_list = cast(TodoList, json.load(handle))
|
||||
|
|
@ -394,7 +522,14 @@ def update_todos() -> None:
|
|||
if TODO_PATH.exists():
|
||||
with TODO_PATH.open("r", encoding="utf-8") as handle:
|
||||
old_list = cast(TodoList, json.load(handle))
|
||||
todo_list = fetch_todo_list()
|
||||
|
||||
try:
|
||||
todo_list = fetch_todo_list()
|
||||
except PackageUpdateError as e:
|
||||
print(f"Error: {e}", file=sys.stderr)
|
||||
print("Failed to fetch updated TODO list.", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
save_todo_list(todo_list)
|
||||
print_changes(filter_todo_list(old_list), filter_todo_list(todo_list))
|
||||
|
||||
|
|
@ -464,21 +599,39 @@ def add_changelog_entry(pkg_dir: Path) -> bool:
|
|||
return result.returncode == 0
|
||||
|
||||
|
||||
def update_package(package: str) -> None:
|
||||
"""Update a Debian package to a new upstream version.
|
||||
def validate_package_info(package: str) -> SourceInfo:
|
||||
"""Validate package exists in source info map.
|
||||
|
||||
Checks out the package if not already present, imports the new upstream
|
||||
version using gbp, and creates a new changelog entry.
|
||||
Args:
|
||||
package: Package name to validate
|
||||
|
||||
Returns:
|
||||
Source info dictionary containing vcs_git and uploaders
|
||||
|
||||
Raises:
|
||||
PackageNotFoundError: If package not found or has no vcs_git
|
||||
"""
|
||||
source_info_map = load_source_info_map()
|
||||
source_info = source_info_map.get(package)
|
||||
|
||||
if not source_info or not source_info.get("vcs_git"):
|
||||
print(
|
||||
f"Error: Could not find team info for package '{package}'", file=sys.stderr
|
||||
raise PackageNotFoundError(
|
||||
f"Could not find team info for package '{package}'"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
return source_info
|
||||
|
||||
|
||||
def resolve_package_directories(package: str, source_info: SourceInfo) -> tuple[Path, Path, Path]:
|
||||
"""Resolve team_dir, pkg_dir, and repo_dir paths.
|
||||
|
||||
Args:
|
||||
package: Package name
|
||||
source_info: Source info dictionary with vcs_git
|
||||
|
||||
Returns:
|
||||
Tuple of (team_dir, pkg_dir, repo_dir)
|
||||
"""
|
||||
team_slug = source_info["vcs_git"]
|
||||
display_team = team_slug_to_display_name(team_slug).lower()
|
||||
|
||||
|
|
@ -486,46 +639,508 @@ def update_package(package: str) -> None:
|
|||
pkg_dir = team_dir / package
|
||||
repo_dir = pkg_dir / package
|
||||
|
||||
# Create package directory if it doesn't exist
|
||||
pkg_dir.mkdir(parents=True, exist_ok=True)
|
||||
return team_dir, pkg_dir, repo_dir
|
||||
|
||||
if not repo_dir.exists():
|
||||
# Package not checked out, do salsa checkout
|
||||
print(f"Checking out {package} into {pkg_dir}...")
|
||||
salsa_path = f"{team_slug}/deps/{package}"
|
||||
result = subprocess.run(
|
||||
["salsa", "checkout", salsa_path],
|
||||
cwd=pkg_dir,
|
||||
|
||||
def ensure_package_checkout(package: str, source_info: SourceInfo, pkg_dir: Path) -> None:
|
||||
"""Checkout package from salsa.
|
||||
|
||||
Args:
|
||||
package: Package name
|
||||
source_info: Source info dictionary with vcs_git
|
||||
pkg_dir: Directory to check out into
|
||||
|
||||
Raises:
|
||||
ExternalCommandError: If salsa checkout fails
|
||||
RepositoryStateError: If directory creation fails
|
||||
"""
|
||||
try:
|
||||
pkg_dir.mkdir(parents=True, exist_ok=True)
|
||||
except OSError as e:
|
||||
raise RepositoryStateError(
|
||||
f"Failed to create directory {pkg_dir}: {e}"
|
||||
) from e
|
||||
|
||||
print(f"Checking out {package} into {pkg_dir}...")
|
||||
team_slug = source_info["vcs_git"]
|
||||
salsa_path = f"{team_slug}/deps/{package}"
|
||||
run_command(
|
||||
["salsa", "checkout", salsa_path],
|
||||
pkg_dir,
|
||||
f"Checkout {salsa_path}",
|
||||
)
|
||||
|
||||
|
||||
def validate_repository_state(repo_dir: Path) -> None:
|
||||
"""Validate repository has no uncommitted changes.
|
||||
|
||||
Args:
|
||||
repo_dir: Repository directory to check
|
||||
|
||||
Raises:
|
||||
RepositoryStateError: If there are uncommitted changes
|
||||
"""
|
||||
if has_uncommitted_changes(repo_dir):
|
||||
raise RepositoryStateError(
|
||||
f"{repo_dir} has uncommitted changes. "
|
||||
"Please commit or stash them first."
|
||||
)
|
||||
if result.returncode != 0:
|
||||
print(f"Error: salsa checkout failed for {salsa_path}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
else:
|
||||
# Package already checked out, check for uncommitted changes
|
||||
if has_uncommitted_changes(repo_dir):
|
||||
print(
|
||||
f"Error: {repo_dir} has uncommitted changes. "
|
||||
"Please commit or stash them first.",
|
||||
file=sys.stderr,
|
||||
|
||||
|
||||
def run_gbp_pq_workflow(repo_dir: Path) -> None:
|
||||
"""Run gbp pq import and switch.
|
||||
|
||||
Args:
|
||||
repo_dir: Repository directory
|
||||
|
||||
Raises:
|
||||
ExternalCommandError: If gbp pq commands fail
|
||||
"""
|
||||
run_command(
|
||||
["gbp", "pq", "import"],
|
||||
repo_dir,
|
||||
"Import patch queue",
|
||||
)
|
||||
|
||||
run_command(
|
||||
["gbp", "pq", "switch"],
|
||||
repo_dir,
|
||||
"Switch to patch branch",
|
||||
)
|
||||
|
||||
|
||||
def import_upstream_version(repo_dir: Path) -> None:
|
||||
"""Import new upstream version using gbp.
|
||||
|
||||
Args:
|
||||
repo_dir: Repository directory
|
||||
|
||||
Raises:
|
||||
ExternalCommandError: If gbp import-orig fails
|
||||
"""
|
||||
print("Importing new upstream version...")
|
||||
run_command(
|
||||
["gbp", "import-orig", "--uscan", "--pristine-tar", "--no-interactive"],
|
||||
repo_dir,
|
||||
"Import upstream version",
|
||||
)
|
||||
|
||||
|
||||
def update_debian_control(repo_dir: Path) -> None:
|
||||
"""Update debian/control file.
|
||||
|
||||
- Removes obsolete 'Priority: optional' (now default)
|
||||
- Removes obsolete 'Rules-Requires-Root: no' (now default)
|
||||
- Updates Standards-Version to current (4.7.3)
|
||||
|
||||
Args:
|
||||
repo_dir: Repository directory
|
||||
|
||||
Raises:
|
||||
ExternalCommandError: If dch command fails
|
||||
"""
|
||||
control_path = repo_dir / "debian" / "control"
|
||||
if not control_path.exists():
|
||||
return
|
||||
|
||||
lines = control_path.read_text().splitlines()
|
||||
current_standards_version = "4.7.3"
|
||||
new_lines = []
|
||||
|
||||
for line in lines:
|
||||
if line == "Priority: optional":
|
||||
run_command(
|
||||
["dch", "-a", "Remove 'Priority: optional', now the default."],
|
||||
repo_dir,
|
||||
"Add changelog entry for Priority removal",
|
||||
)
|
||||
sys.exit(1)
|
||||
continue
|
||||
if line == "Rules-Requires-Root: no":
|
||||
run_command(
|
||||
["dch", "-a", "Remove 'Rules-Requires-Root: no', now the default."],
|
||||
repo_dir,
|
||||
"Add changelog entry for Rules-Requires-Root removal",
|
||||
)
|
||||
continue
|
||||
if line.startswith("Standards-Version: "):
|
||||
standards_version = line[len("Standards-Version: "):]
|
||||
if standards_version != current_standards_version:
|
||||
line = "Standards-Version: " + current_standards_version
|
||||
run_command(
|
||||
["dch", "-a", "Update Standards-Version."],
|
||||
repo_dir,
|
||||
"Add changelog entry for Standards-Version update",
|
||||
)
|
||||
|
||||
new_lines.append(line)
|
||||
|
||||
control_path.write_text("\n".join(new_lines) + "\n")
|
||||
|
||||
|
||||
def update_debian_copyright_year(repo_dir: Path) -> None:
|
||||
"""Update Edward Betts' copyright years in debian/copyright.
|
||||
|
||||
Transforms:
|
||||
- "Copyright: 2025 Edward Betts <...>" -> "Copyright: 2025-2026 Edward Betts <...>"
|
||||
- "Copyright: 2022-2024 Edward Betts <...>" -> "Copyright: 2022-2026 Edward Betts <...>"
|
||||
|
||||
Args:
|
||||
repo_dir: Repository directory
|
||||
|
||||
Raises:
|
||||
ExternalCommandError: If dch command fails
|
||||
"""
|
||||
copyright_path = repo_dir / "debian" / "copyright"
|
||||
if not copyright_path.exists():
|
||||
return
|
||||
|
||||
full_name_email = "Edward Betts <edward@4angle.com>"
|
||||
year = datetime.date.today().year
|
||||
original = copyright_path.read_text(encoding="utf-8")
|
||||
|
||||
holder_re = re.escape(full_name_email)
|
||||
copyright_line_re = re.compile(
|
||||
rf"^(?P<indent>\s*)"
|
||||
rf"(?:(?P<label>Copyright:\s+))?"
|
||||
rf"(?P<years>\d{{4}}(?:-\d{{4}})?)"
|
||||
rf"(?P<suffix>\s+{holder_re}\s*)$"
|
||||
)
|
||||
|
||||
copyright_field_start_re = re.compile(r"^\s*Copyright:\s+")
|
||||
field_start_re = re.compile(r"^(?:\S[^:]*):\s*")
|
||||
continuation_re = re.compile(r"^\s+")
|
||||
|
||||
def update_years_token(years: str) -> Optional[str]:
|
||||
"""Return an updated years token, or None if no change is needed."""
|
||||
if "-" in years:
|
||||
start_s, end_s = years.split("-", 1)
|
||||
start = int(start_s)
|
||||
end = int(end_s)
|
||||
else:
|
||||
start = end = int(years)
|
||||
|
||||
if end >= year:
|
||||
return None
|
||||
|
||||
return f"{start}-{year}" if start != year else str(year)
|
||||
|
||||
lines = original.splitlines(keepends=True)
|
||||
out: list[str] = []
|
||||
in_copyright_field = False
|
||||
|
||||
for line in lines:
|
||||
stripped = line.rstrip("\n")
|
||||
|
||||
if field_start_re.match(stripped) and not copyright_field_start_re.match(stripped):
|
||||
in_copyright_field = False
|
||||
|
||||
if copyright_field_start_re.match(stripped):
|
||||
in_copyright_field = True
|
||||
|
||||
m = copyright_line_re.match(stripped)
|
||||
|
||||
should_consider = False
|
||||
if m and m.group("label"):
|
||||
should_consider = True
|
||||
elif m and in_copyright_field and continuation_re.match(stripped):
|
||||
should_consider = True
|
||||
|
||||
if should_consider and m:
|
||||
new_years = update_years_token(m.group("years"))
|
||||
if new_years is not None:
|
||||
indent = m.group("indent")
|
||||
label = m.group("label") or ""
|
||||
suffix = m.group("suffix")
|
||||
line = f"{indent}{label}{new_years}{suffix}\n"
|
||||
|
||||
out.append(line)
|
||||
|
||||
updated = "".join(out)
|
||||
|
||||
if updated != original:
|
||||
copyright_path.write_text(updated, encoding="utf-8")
|
||||
run_command(
|
||||
["dch", "-a", "Update copyright year."],
|
||||
repo_dir,
|
||||
"Add changelog entry for copyright year update",
|
||||
)
|
||||
|
||||
|
||||
def update_debian_watch(repo_dir: Path) -> None:
|
||||
"""Upgrade debian/watch from version 4 to version 5.
|
||||
|
||||
Supports GitHub and PyPI watch entries.
|
||||
|
||||
Args:
|
||||
repo_dir: Repository directory
|
||||
|
||||
Raises:
|
||||
ExternalCommandError: If dch command fails
|
||||
"""
|
||||
watch_path = repo_dir / "debian" / "watch"
|
||||
if not watch_path.exists():
|
||||
return
|
||||
|
||||
try:
|
||||
original = watch_path.read_text(encoding="utf-8")
|
||||
except FileNotFoundError:
|
||||
return
|
||||
|
||||
# Already version 5
|
||||
if re.search(r"(?im)^\s*Version:\s*5\s*$", original):
|
||||
return
|
||||
|
||||
# GitHub URL patterns
|
||||
github_url_re = re.compile(
|
||||
r"""(?xi)
|
||||
https?://github\.com/
|
||||
(?P<owner>[^/\s]+)/(?P<repo>[^/\s#?]+)
|
||||
(?:/(?:releases|tags))?
|
||||
(?:[^\s]*)?
|
||||
"""
|
||||
)
|
||||
|
||||
# PyPI URL patterns
|
||||
pypi_debian_net_re = re.compile(
|
||||
r"""(?xi)
|
||||
https?://pypi\.debian\.net/
|
||||
(?P<name>[a-z0-9][a-z0-9._-]*)
|
||||
/
|
||||
"""
|
||||
)
|
||||
|
||||
pypi_python_org_source_re = re.compile(
|
||||
r"""(?xi)
|
||||
https?://pypi(?:\.python)?\.org/
|
||||
packages/source/
|
||||
(?P<letter>[a-z0-9])/
|
||||
(?P<name>[a-z0-9][a-z0-9._-]*)
|
||||
/
|
||||
"""
|
||||
)
|
||||
|
||||
pythonhosted_re = re.compile(
|
||||
r"""(?xi)
|
||||
https?://files\.pythonhosted\.org/
|
||||
packages/source/
|
||||
(?P<letter>[a-z0-9])/
|
||||
(?P<name>[a-z0-9][a-z0-9._-]*)
|
||||
/
|
||||
"""
|
||||
)
|
||||
|
||||
# Join wrapped lines
|
||||
joined_lines = []
|
||||
buf = ""
|
||||
for line in original.splitlines():
|
||||
if line.rstrip().endswith("\\"):
|
||||
buf += line.rstrip()[:-1] + " "
|
||||
else:
|
||||
joined_lines.append(buf + line)
|
||||
buf = ""
|
||||
if buf:
|
||||
joined_lines.append(buf)
|
||||
joined = "\n".join(joined_lines)
|
||||
|
||||
# Try GitHub first
|
||||
m = github_url_re.search(joined)
|
||||
if m:
|
||||
owner, repo = m.group("owner"), m.group("repo")
|
||||
if repo.endswith(".git"):
|
||||
repo = repo[:-4]
|
||||
new_body = (
|
||||
"Version: 5\n"
|
||||
"\n"
|
||||
"Template: GitHub\n"
|
||||
f"Owner: {owner}\n"
|
||||
f"Project: {repo}\n"
|
||||
)
|
||||
else:
|
||||
# Try PyPI
|
||||
pypi_name = None
|
||||
for pattern in [pypi_debian_net_re, pypi_python_org_source_re, pythonhosted_re]:
|
||||
m = pattern.search(joined)
|
||||
if m:
|
||||
pypi_name = m.group("name")
|
||||
break
|
||||
|
||||
if not pypi_name:
|
||||
# Can't upgrade this watch file
|
||||
return
|
||||
|
||||
new_body = f"Version: 5\n\nTemplate: Pypi\nDist: {pypi_name}\n"
|
||||
|
||||
# Preserve leading comments
|
||||
leading = []
|
||||
for line in original.splitlines(keepends=True):
|
||||
if line.lstrip().startswith("#") or not line.strip():
|
||||
leading.append(line)
|
||||
continue
|
||||
break
|
||||
header = "".join(leading).rstrip()
|
||||
if header:
|
||||
header += "\n\n"
|
||||
|
||||
new_contents = header + new_body
|
||||
|
||||
if new_contents != original:
|
||||
watch_path.write_text(new_contents, encoding="utf-8")
|
||||
run_command(
|
||||
["dch", "-a", "Update debian/watch to format version 5."],
|
||||
repo_dir,
|
||||
"Add changelog entry for debian/watch update",
|
||||
)
|
||||
|
||||
|
||||
def add_salsa_ci(repo_dir: Path) -> None:
|
||||
"""Add debian/salsa-ci.yml if missing.
|
||||
|
||||
Args:
|
||||
repo_dir: Repository directory
|
||||
|
||||
Raises:
|
||||
ExternalCommandError: If git or dch commands fail
|
||||
"""
|
||||
salsa_ci_path = repo_dir / "debian" / "salsa-ci.yml"
|
||||
if salsa_ci_path.exists():
|
||||
return
|
||||
|
||||
control_path = repo_dir / "debian" / "control"
|
||||
if not control_path.exists():
|
||||
return
|
||||
|
||||
# Extract salsa repo from Vcs-Git
|
||||
vcs_git_re = re.compile(r"^Vcs-Git: https://salsa\.debian\.org/(.*).git$")
|
||||
salsa_repo = None
|
||||
with control_path.open() as f:
|
||||
for line in f:
|
||||
if not line.startswith("Vcs-Git"):
|
||||
continue
|
||||
m = vcs_git_re.match(line.rstrip())
|
||||
if m:
|
||||
salsa_repo = m.group(1)
|
||||
break
|
||||
|
||||
if not salsa_repo:
|
||||
return
|
||||
|
||||
content = """---
|
||||
include:
|
||||
- https://salsa.debian.org/salsa-ci-team/pipeline/raw/master/recipes/debian.yml
|
||||
"""
|
||||
|
||||
salsa_ci_path.write_text(content)
|
||||
|
||||
run_command(
|
||||
["git", "add", str(salsa_ci_path)],
|
||||
repo_dir,
|
||||
"Add debian/salsa-ci.yml to git",
|
||||
)
|
||||
|
||||
run_command(
|
||||
["dch", "-a", "Add debian/salsa-ci.yml."],
|
||||
repo_dir,
|
||||
"Add changelog entry for salsa-ci.yml",
|
||||
)
|
||||
|
||||
# Update salsa project settings for homeassistant-team
|
||||
if salsa_repo.startswith("homeassistant-team"):
|
||||
run_command(
|
||||
[
|
||||
"salsa",
|
||||
"update_projects",
|
||||
salsa_repo,
|
||||
"--jobs",
|
||||
"yes",
|
||||
"--ci-config-path",
|
||||
"debian/salsa-ci.yml",
|
||||
],
|
||||
repo_dir,
|
||||
"Update salsa project CI settings",
|
||||
check=False, # Don't fail if this doesn't work
|
||||
)
|
||||
|
||||
|
||||
def run_package_updates(repo_dir: Path) -> None:
|
||||
"""Run all package update operations.
|
||||
|
||||
Performs the following updates:
|
||||
- Update debian/control (remove obsolete fields, update Standards-Version)
|
||||
- Update copyright year
|
||||
- Upgrade debian/watch to version 5
|
||||
- Add debian/salsa-ci.yml if missing
|
||||
|
||||
Args:
|
||||
repo_dir: Repository directory
|
||||
|
||||
Raises:
|
||||
ExternalCommandError: If any update command fails
|
||||
"""
|
||||
print("Updating package files...")
|
||||
update_debian_control(repo_dir)
|
||||
update_debian_copyright_year(repo_dir)
|
||||
update_debian_watch(repo_dir)
|
||||
add_salsa_ci(repo_dir)
|
||||
|
||||
|
||||
def update_package(package: str) -> None:
|
||||
"""Update a Debian package to a new upstream version.
|
||||
|
||||
Checks out the package if not already present, imports the new upstream
|
||||
version using gbp, and creates a new changelog entry.
|
||||
|
||||
Raises:
|
||||
PackageNotFoundError: If package not found in source info
|
||||
RepositoryStateError: If repository has uncommitted changes
|
||||
ExternalCommandError: If any external command fails
|
||||
MissingToolError: If required tools are not available
|
||||
"""
|
||||
# Validate package exists and check required tools
|
||||
source_info = validate_package_info(package)
|
||||
check_required_tools()
|
||||
|
||||
# Resolve directory paths
|
||||
team_dir, pkg_dir, repo_dir = resolve_package_directories(package, source_info)
|
||||
|
||||
# Checkout package if needed, or validate existing checkout
|
||||
if not repo_dir.exists():
|
||||
ensure_package_checkout(package, source_info, pkg_dir)
|
||||
else:
|
||||
validate_repository_state(repo_dir)
|
||||
print(f"Package {package} already checked out at {pkg_dir}")
|
||||
|
||||
# Run gbp pq workflow
|
||||
run_gbp_pq_workflow(repo_dir)
|
||||
|
||||
# Import new upstream version
|
||||
print("Importing new upstream version...")
|
||||
result = subprocess.run(
|
||||
["gbp", "import-orig", "--uscan", "--pristine-tar", "--no-interactive"],
|
||||
cwd=repo_dir,
|
||||
)
|
||||
if result.returncode != 0:
|
||||
print("Error: gbp import-orig failed", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
import_upstream_version(repo_dir)
|
||||
|
||||
# Add changelog entry
|
||||
print("Adding changelog entry...")
|
||||
if not add_changelog_entry(repo_dir):
|
||||
print("Error: Failed to add changelog entry", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
version = extract_upstream_version_from_git_log(repo_dir)
|
||||
if not version:
|
||||
raise ExternalCommandError(
|
||||
"git log",
|
||||
0,
|
||||
"Could not find upstream version in git log.",
|
||||
)
|
||||
|
||||
new_version = f"{version}-1"
|
||||
run_command(
|
||||
[
|
||||
"dch",
|
||||
"--release-heuristic",
|
||||
"log",
|
||||
"--newversion",
|
||||
new_version,
|
||||
"New upstream release.",
|
||||
],
|
||||
repo_dir,
|
||||
"Add changelog entry",
|
||||
)
|
||||
|
||||
# Run package updates
|
||||
run_package_updates(repo_dir)
|
||||
|
||||
print(f"Successfully updated {package}")
|
||||
print(repo_dir)
|
||||
|
|
@ -557,7 +1172,11 @@ def update_command() -> None:
|
|||
@cli.command("update-pkg", help="Update a package to a new upstream version.")
|
||||
@click.argument("package")
|
||||
def update_pkg_command(package: str) -> None:
|
||||
update_package(package)
|
||||
try:
|
||||
update_package(package)
|
||||
except PackageUpdateError as e:
|
||||
print(f"Error: {e}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
|
|
|
|||
914
tests/test_update_pkg.py
Normal file
914
tests/test_update_pkg.py
Normal file
|
|
@ -0,0 +1,914 @@
|
|||
"""Tests for update-pkg command functions."""
|
||||
|
||||
import datetime
|
||||
import json
|
||||
import os
|
||||
import subprocess
|
||||
import urllib.error
|
||||
from pathlib import Path
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
import debian_todo
|
||||
|
||||
|
||||
class TestRunCommand:
|
||||
"""Tests for run_command helper function."""
|
||||
|
||||
def test_successful_command(self, tmp_path):
|
||||
"""Test running a successful command."""
|
||||
with patch("debian_todo.subprocess.run") as mock_run:
|
||||
mock_run.return_value = MagicMock(returncode=0, stdout="", stderr="")
|
||||
result = debian_todo.run_command(
|
||||
["echo", "test"], tmp_path, "Test command"
|
||||
)
|
||||
assert result.returncode == 0
|
||||
|
||||
def test_command_with_capture_output(self, tmp_path):
|
||||
"""Test running a command with output capture."""
|
||||
with patch("debian_todo.subprocess.run") as mock_run:
|
||||
mock_run.return_value = MagicMock(
|
||||
returncode=0, stdout="output", stderr=""
|
||||
)
|
||||
result = debian_todo.run_command(
|
||||
["echo", "test"], tmp_path, "Test command", capture_output=True
|
||||
)
|
||||
assert result.stdout == "output"
|
||||
mock_run.assert_called_once_with(
|
||||
["echo", "test"], cwd=tmp_path, capture_output=True, text=True
|
||||
)
|
||||
|
||||
def test_command_failure_with_check(self, tmp_path):
|
||||
"""Test command failure raises ExternalCommandError when check=True."""
|
||||
with patch("debian_todo.subprocess.run") as mock_run:
|
||||
mock_run.return_value = MagicMock(returncode=1, stdout="", stderr="")
|
||||
with pytest.raises(debian_todo.ExternalCommandError) as exc_info:
|
||||
debian_todo.run_command(
|
||||
["false"], tmp_path, "Test command", check=True
|
||||
)
|
||||
assert exc_info.value.returncode == 1
|
||||
assert "false" in exc_info.value.command
|
||||
assert "Test command failed" in str(exc_info.value)
|
||||
|
||||
def test_command_failure_without_check(self, tmp_path):
|
||||
"""Test command failure returns result when check=False."""
|
||||
with patch("debian_todo.subprocess.run") as mock_run:
|
||||
mock_run.return_value = MagicMock(returncode=1, stdout="", stderr="")
|
||||
result = debian_todo.run_command(
|
||||
["false"], tmp_path, "Test command", check=False
|
||||
)
|
||||
assert result.returncode == 1
|
||||
|
||||
def test_missing_command(self, tmp_path):
|
||||
"""Test missing command raises MissingToolError."""
|
||||
with patch("debian_todo.subprocess.run") as mock_run:
|
||||
mock_run.side_effect = FileNotFoundError("command not found")
|
||||
with pytest.raises(debian_todo.MissingToolError) as exc_info:
|
||||
debian_todo.run_command(
|
||||
["nonexistent"], tmp_path, "Test command"
|
||||
)
|
||||
assert "nonexistent" in str(exc_info.value)
|
||||
|
||||
|
||||
class TestCheckRequiredTools:
|
||||
"""Tests for check_required_tools function."""
|
||||
|
||||
def test_all_tools_available(self):
|
||||
"""Test when all required tools are available."""
|
||||
with patch("debian_todo.shutil.which") as mock_which:
|
||||
mock_which.return_value = "/usr/bin/tool"
|
||||
debian_todo.check_required_tools()
|
||||
assert mock_which.call_count == 3
|
||||
|
||||
def test_missing_salsa(self):
|
||||
"""Test when salsa is missing."""
|
||||
with patch("debian_todo.shutil.which") as mock_which:
|
||||
def which_side_effect(tool):
|
||||
return None if tool == "salsa" else "/usr/bin/tool"
|
||||
mock_which.side_effect = which_side_effect
|
||||
with pytest.raises(debian_todo.MissingToolError) as exc_info:
|
||||
debian_todo.check_required_tools()
|
||||
assert "salsa" in str(exc_info.value)
|
||||
|
||||
def test_missing_gbp(self):
|
||||
"""Test when gbp is missing."""
|
||||
with patch("debian_todo.shutil.which") as mock_which:
|
||||
def which_side_effect(tool):
|
||||
return None if tool == "gbp" else "/usr/bin/tool"
|
||||
mock_which.side_effect = which_side_effect
|
||||
with pytest.raises(debian_todo.MissingToolError) as exc_info:
|
||||
debian_todo.check_required_tools()
|
||||
assert "gbp" in str(exc_info.value)
|
||||
|
||||
def test_missing_multiple_tools(self):
|
||||
"""Test when multiple tools are missing."""
|
||||
with patch("debian_todo.shutil.which") as mock_which:
|
||||
mock_which.return_value = None
|
||||
with pytest.raises(debian_todo.MissingToolError) as exc_info:
|
||||
debian_todo.check_required_tools()
|
||||
error_msg = str(exc_info.value)
|
||||
assert "salsa" in error_msg
|
||||
assert "gbp" in error_msg
|
||||
assert "dch" in error_msg
|
||||
|
||||
|
||||
class TestFetchTodoListErrors:
|
||||
"""Tests for fetch_todo_list error handling."""
|
||||
|
||||
def test_http_error(self):
|
||||
"""Test HTTP error handling."""
|
||||
with patch("debian_todo.urlopen") as mock_urlopen:
|
||||
mock_urlopen.side_effect = urllib.error.HTTPError(
|
||||
debian_todo.TODO_URL, 404, "Not Found", {}, None
|
||||
)
|
||||
with pytest.raises(debian_todo.PackageUpdateError) as exc_info:
|
||||
debian_todo.fetch_todo_list()
|
||||
assert "HTTP error 404" in str(exc_info.value)
|
||||
assert "Not Found" in str(exc_info.value)
|
||||
|
||||
def test_network_error(self):
|
||||
"""Test network error handling."""
|
||||
with patch("debian_todo.urlopen") as mock_urlopen:
|
||||
mock_urlopen.side_effect = urllib.error.URLError("Connection refused")
|
||||
with pytest.raises(debian_todo.PackageUpdateError) as exc_info:
|
||||
debian_todo.fetch_todo_list()
|
||||
assert "Network error" in str(exc_info.value)
|
||||
assert "Connection refused" in str(exc_info.value)
|
||||
|
||||
def test_timeout_error(self):
|
||||
"""Test timeout error handling."""
|
||||
with patch("debian_todo.urlopen") as mock_urlopen:
|
||||
mock_urlopen.side_effect = TimeoutError("Request timed out")
|
||||
with pytest.raises(debian_todo.PackageUpdateError) as exc_info:
|
||||
debian_todo.fetch_todo_list(timeout=10)
|
||||
assert "Timeout after 10s" in str(exc_info.value)
|
||||
|
||||
def test_invalid_json(self):
|
||||
"""Test invalid JSON response handling."""
|
||||
with patch("debian_todo.urlopen") as mock_urlopen:
|
||||
mock_response = MagicMock()
|
||||
mock_response.read.return_value = b"not valid json {"
|
||||
mock_response.__enter__ = lambda self: self
|
||||
mock_response.__exit__ = lambda self, *args: None
|
||||
mock_urlopen.return_value = mock_response
|
||||
with pytest.raises(debian_todo.PackageUpdateError) as exc_info:
|
||||
debian_todo.fetch_todo_list()
|
||||
assert "Invalid JSON" in str(exc_info.value)
|
||||
|
||||
def test_successful_fetch(self):
|
||||
"""Test successful fetch."""
|
||||
with patch("debian_todo.urlopen") as mock_urlopen:
|
||||
mock_response = MagicMock()
|
||||
mock_response.read.return_value = b'[{"source": "pkg1"}]'
|
||||
mock_response.__enter__ = lambda self: self
|
||||
mock_response.__exit__ = lambda self, *args: None
|
||||
mock_urlopen.return_value = mock_response
|
||||
result = debian_todo.fetch_todo_list()
|
||||
assert result == [{"source": "pkg1"}]
|
||||
|
||||
|
||||
class TestTeamSlugToDisplayName:
|
||||
"""Tests for team_slug_to_display_name function."""
|
||||
|
||||
def test_homeassistant_team(self):
|
||||
assert debian_todo.team_slug_to_display_name("homeassistant-team") == "HA"
|
||||
|
||||
def test_python_team(self):
|
||||
assert debian_todo.team_slug_to_display_name("python-team") == "python"
|
||||
|
||||
def test_openstack_team(self):
|
||||
assert debian_todo.team_slug_to_display_name("openstack-team") == "openstack"
|
||||
|
||||
def test_other_team(self):
|
||||
assert debian_todo.team_slug_to_display_name("rust-team") == "rust"
|
||||
|
||||
def test_no_team_suffix(self):
|
||||
assert debian_todo.team_slug_to_display_name("username") == "username"
|
||||
|
||||
|
||||
class TestHasUncommittedChanges:
|
||||
"""Tests for has_uncommitted_changes function."""
|
||||
|
||||
def test_no_changes(self, tmp_path):
|
||||
with patch("debian_todo.subprocess.run") as mock_run:
|
||||
mock_run.return_value = MagicMock(stdout="", returncode=0)
|
||||
assert debian_todo.has_uncommitted_changes(tmp_path) is False
|
||||
mock_run.assert_called_once_with(
|
||||
["git", "status", "--porcelain"],
|
||||
cwd=tmp_path,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
|
||||
def test_has_changes(self, tmp_path):
|
||||
with patch("debian_todo.subprocess.run") as mock_run:
|
||||
mock_run.return_value = MagicMock(stdout=" M file.txt\n", returncode=0)
|
||||
assert debian_todo.has_uncommitted_changes(tmp_path) is True
|
||||
|
||||
def test_untracked_files(self, tmp_path):
|
||||
with patch("debian_todo.subprocess.run") as mock_run:
|
||||
mock_run.return_value = MagicMock(stdout="?? newfile.txt\n", returncode=0)
|
||||
assert debian_todo.has_uncommitted_changes(tmp_path) is True
|
||||
|
||||
def test_whitespace_only_output(self, tmp_path):
|
||||
with patch("debian_todo.subprocess.run") as mock_run:
|
||||
mock_run.return_value = MagicMock(stdout=" \n", returncode=0)
|
||||
assert debian_todo.has_uncommitted_changes(tmp_path) is False
|
||||
|
||||
|
||||
class TestExtractUpstreamVersionFromGitLog:
|
||||
"""Tests for extract_upstream_version_from_git_log function."""
|
||||
|
||||
def test_extracts_version(self, tmp_path):
|
||||
git_log_output = """commit abc123
|
||||
Author: Someone <someone@example.com>
|
||||
Date: Mon Jan 1 12:00:00 2024 +0000
|
||||
|
||||
Update upstream source from tag 'upstream/1.2.3'
|
||||
"""
|
||||
with patch("debian_todo.subprocess.run") as mock_run:
|
||||
mock_run.return_value = MagicMock(
|
||||
stdout=git_log_output, returncode=0
|
||||
)
|
||||
result = debian_todo.extract_upstream_version_from_git_log(tmp_path)
|
||||
assert result == "1.2.3"
|
||||
|
||||
def test_extracts_complex_version(self, tmp_path):
|
||||
git_log_output = """commit abc123
|
||||
Author: Someone <someone@example.com>
|
||||
Date: Mon Jan 1 12:00:00 2024 +0000
|
||||
|
||||
Update upstream source from tag 'upstream/2024.1.0rc1'
|
||||
"""
|
||||
with patch("debian_todo.subprocess.run") as mock_run:
|
||||
mock_run.return_value = MagicMock(
|
||||
stdout=git_log_output, returncode=0
|
||||
)
|
||||
result = debian_todo.extract_upstream_version_from_git_log(tmp_path)
|
||||
assert result == "2024.1.0rc1"
|
||||
|
||||
def test_no_upstream_tag(self, tmp_path):
|
||||
git_log_output = """commit abc123
|
||||
Author: Someone <someone@example.com>
|
||||
Date: Mon Jan 1 12:00:00 2024 +0000
|
||||
|
||||
Fixed a bug
|
||||
"""
|
||||
with patch("debian_todo.subprocess.run") as mock_run:
|
||||
mock_run.return_value = MagicMock(
|
||||
stdout=git_log_output, returncode=0
|
||||
)
|
||||
result = debian_todo.extract_upstream_version_from_git_log(tmp_path)
|
||||
assert result is None
|
||||
|
||||
def test_git_log_fails(self, tmp_path):
|
||||
with patch("debian_todo.subprocess.run") as mock_run:
|
||||
mock_run.return_value = MagicMock(stdout="", returncode=1)
|
||||
result = debian_todo.extract_upstream_version_from_git_log(tmp_path)
|
||||
assert result is None
|
||||
|
||||
|
||||
class TestValidatePackageInfo:
|
||||
"""Tests for validate_package_info function."""
|
||||
|
||||
def test_valid_package(self, monkeypatch):
|
||||
"""Test validating a package that exists."""
|
||||
monkeypatch.setattr(
|
||||
debian_todo,
|
||||
"load_source_info_map",
|
||||
lambda: {"mypkg": {"vcs_git": "python-team", "uploaders": ""}},
|
||||
)
|
||||
result = debian_todo.validate_package_info("mypkg")
|
||||
assert result == {"vcs_git": "python-team", "uploaders": ""}
|
||||
|
||||
def test_package_not_found(self, monkeypatch):
|
||||
"""Test package not in source info map."""
|
||||
monkeypatch.setattr(debian_todo, "load_source_info_map", lambda: {})
|
||||
with pytest.raises(debian_todo.PackageNotFoundError) as exc_info:
|
||||
debian_todo.validate_package_info("nonexistent")
|
||||
assert "nonexistent" in str(exc_info.value)
|
||||
|
||||
def test_package_no_vcs_git(self, monkeypatch):
|
||||
"""Test package with no vcs_git field."""
|
||||
monkeypatch.setattr(
|
||||
debian_todo,
|
||||
"load_source_info_map",
|
||||
lambda: {"mypkg": {"vcs_git": "", "uploaders": ""}},
|
||||
)
|
||||
with pytest.raises(debian_todo.PackageNotFoundError) as exc_info:
|
||||
debian_todo.validate_package_info("mypkg")
|
||||
assert "mypkg" in str(exc_info.value)
|
||||
|
||||
|
||||
class TestResolvePackageDirectories:
|
||||
"""Tests for resolve_package_directories function."""
|
||||
|
||||
def test_python_team_package(self, tmp_path, monkeypatch):
|
||||
"""Test resolving directories for python-team package."""
|
||||
monkeypatch.setattr(debian_todo, "DEBIAN_SRC_BASE", tmp_path)
|
||||
source_info = {"vcs_git": "python-team", "uploaders": ""}
|
||||
team_dir, pkg_dir, repo_dir = debian_todo.resolve_package_directories(
|
||||
"mypkg", source_info
|
||||
)
|
||||
assert team_dir == tmp_path / "python"
|
||||
assert pkg_dir == tmp_path / "python" / "mypkg"
|
||||
assert repo_dir == tmp_path / "python" / "mypkg" / "mypkg"
|
||||
|
||||
def test_homeassistant_team_package(self, tmp_path, monkeypatch):
|
||||
"""Test resolving directories for homeassistant-team package."""
|
||||
monkeypatch.setattr(debian_todo, "DEBIAN_SRC_BASE", tmp_path)
|
||||
source_info = {"vcs_git": "homeassistant-team", "uploaders": ""}
|
||||
team_dir, pkg_dir, repo_dir = debian_todo.resolve_package_directories(
|
||||
"ha-pkg", source_info
|
||||
)
|
||||
assert team_dir == tmp_path / "ha"
|
||||
assert pkg_dir == tmp_path / "ha" / "ha-pkg"
|
||||
assert repo_dir == tmp_path / "ha" / "ha-pkg" / "ha-pkg"
|
||||
|
||||
|
||||
class TestEnsurePackageCheckout:
|
||||
"""Tests for ensure_package_checkout function."""
|
||||
|
||||
def test_successful_checkout(self, tmp_path):
|
||||
"""Test successful package checkout."""
|
||||
pkg_dir = tmp_path / "python" / "mypkg"
|
||||
source_info = {"vcs_git": "python-team", "uploaders": ""}
|
||||
|
||||
with patch("debian_todo.run_command") as mock_run:
|
||||
debian_todo.ensure_package_checkout("mypkg", source_info, pkg_dir)
|
||||
assert pkg_dir.exists()
|
||||
mock_run.assert_called_once_with(
|
||||
["salsa", "checkout", "python-team/deps/mypkg"],
|
||||
pkg_dir,
|
||||
"Checkout python-team/deps/mypkg",
|
||||
)
|
||||
|
||||
def test_checkout_with_existing_parent(self, tmp_path):
|
||||
"""Test checkout when parent directory already exists."""
|
||||
pkg_dir = tmp_path / "python" / "mypkg"
|
||||
pkg_dir.mkdir(parents=True)
|
||||
source_info = {"vcs_git": "python-team", "uploaders": ""}
|
||||
|
||||
with patch("debian_todo.run_command") as mock_run:
|
||||
debian_todo.ensure_package_checkout("mypkg", source_info, pkg_dir)
|
||||
assert pkg_dir.exists()
|
||||
|
||||
|
||||
class TestValidateRepositoryState:
|
||||
"""Tests for validate_repository_state function."""
|
||||
|
||||
def test_clean_repository(self, tmp_path):
|
||||
"""Test validating a clean repository."""
|
||||
with patch("debian_todo.has_uncommitted_changes") as mock_has_changes:
|
||||
mock_has_changes.return_value = False
|
||||
debian_todo.validate_repository_state(tmp_path)
|
||||
|
||||
def test_dirty_repository(self, tmp_path):
|
||||
"""Test validating a repository with uncommitted changes."""
|
||||
with patch("debian_todo.has_uncommitted_changes") as mock_has_changes:
|
||||
mock_has_changes.return_value = True
|
||||
with pytest.raises(debian_todo.RepositoryStateError) as exc_info:
|
||||
debian_todo.validate_repository_state(tmp_path)
|
||||
assert "uncommitted changes" in str(exc_info.value)
|
||||
|
||||
|
||||
class TestRunGbpPqWorkflow:
|
||||
"""Tests for run_gbp_pq_workflow function."""
|
||||
|
||||
def test_successful_workflow(self, tmp_path):
|
||||
"""Test successful gbp pq import and switch."""
|
||||
with patch("debian_todo.run_command") as mock_run:
|
||||
debian_todo.run_gbp_pq_workflow(tmp_path)
|
||||
assert mock_run.call_count == 2
|
||||
mock_run.assert_any_call(
|
||||
["gbp", "pq", "import"], tmp_path, "Import patch queue"
|
||||
)
|
||||
mock_run.assert_any_call(
|
||||
["gbp", "pq", "switch"], tmp_path, "Switch to patch branch"
|
||||
)
|
||||
|
||||
def test_import_fails(self, tmp_path):
|
||||
"""Test when gbp pq import fails."""
|
||||
with patch("debian_todo.run_command") as mock_run:
|
||||
mock_run.side_effect = debian_todo.ExternalCommandError(
|
||||
"gbp pq import", 1, "Import failed"
|
||||
)
|
||||
with pytest.raises(debian_todo.ExternalCommandError):
|
||||
debian_todo.run_gbp_pq_workflow(tmp_path)
|
||||
|
||||
|
||||
class TestImportUpstreamVersion:
|
||||
"""Tests for import_upstream_version function."""
|
||||
|
||||
def test_successful_import(self, tmp_path, capsys):
|
||||
"""Test successful upstream import."""
|
||||
with patch("debian_todo.run_command") as mock_run:
|
||||
debian_todo.import_upstream_version(tmp_path)
|
||||
mock_run.assert_called_once_with(
|
||||
["gbp", "import-orig", "--uscan", "--pristine-tar", "--no-interactive"],
|
||||
tmp_path,
|
||||
"Import upstream version",
|
||||
)
|
||||
captured = capsys.readouterr()
|
||||
assert "Importing new upstream version" in captured.out
|
||||
|
||||
def test_import_fails(self, tmp_path):
|
||||
"""Test when upstream import fails."""
|
||||
with patch("debian_todo.run_command") as mock_run:
|
||||
mock_run.side_effect = debian_todo.ExternalCommandError(
|
||||
"gbp import-orig", 1, "Import failed"
|
||||
)
|
||||
with pytest.raises(debian_todo.ExternalCommandError):
|
||||
debian_todo.import_upstream_version(tmp_path)
|
||||
|
||||
|
||||
class TestRunPackageUpdates:
|
||||
"""Tests for run_package_updates function."""
|
||||
|
||||
def test_runs_all_updates(self, tmp_path):
|
||||
"""Test that all update functions are called."""
|
||||
with patch("debian_todo.update_debian_control") as mock_control:
|
||||
with patch("debian_todo.update_debian_copyright_year") as mock_copyright:
|
||||
with patch("debian_todo.update_debian_watch") as mock_watch:
|
||||
with patch("debian_todo.add_salsa_ci") as mock_salsa:
|
||||
debian_todo.run_package_updates(tmp_path)
|
||||
mock_control.assert_called_once_with(tmp_path)
|
||||
mock_copyright.assert_called_once_with(tmp_path)
|
||||
mock_watch.assert_called_once_with(tmp_path)
|
||||
mock_salsa.assert_called_once_with(tmp_path)
|
||||
|
||||
|
||||
class TestUpdateDebianControl:
|
||||
"""Tests for update_debian_control function."""
|
||||
|
||||
def test_removes_priority_optional(self, tmp_path):
|
||||
"""Test removing obsolete Priority: optional."""
|
||||
debian_dir = tmp_path / "debian"
|
||||
debian_dir.mkdir()
|
||||
control_path = debian_dir / "control"
|
||||
control_path.write_text("Source: mypkg\nPriority: optional\n")
|
||||
|
||||
with patch("debian_todo.run_command") as mock_run:
|
||||
debian_todo.update_debian_control(tmp_path)
|
||||
mock_run.assert_called_once()
|
||||
assert "Priority: optional" in mock_run.call_args[0][0][2]
|
||||
|
||||
# Verify Priority: optional was removed
|
||||
content = control_path.read_text()
|
||||
assert "Priority: optional" not in content
|
||||
|
||||
def test_updates_standards_version(self, tmp_path):
|
||||
"""Test updating Standards-Version."""
|
||||
debian_dir = tmp_path / "debian"
|
||||
debian_dir.mkdir()
|
||||
control_path = debian_dir / "control"
|
||||
control_path.write_text("Source: mypkg\nStandards-Version: 4.6.0\n")
|
||||
|
||||
with patch("debian_todo.run_command") as mock_run:
|
||||
debian_todo.update_debian_control(tmp_path)
|
||||
mock_run.assert_called_once()
|
||||
assert "Standards-Version" in mock_run.call_args[0][0][2]
|
||||
|
||||
# Verify Standards-Version was updated
|
||||
content = control_path.read_text()
|
||||
assert "Standards-Version: 4.7.3" in content
|
||||
|
||||
def test_no_changes_needed(self, tmp_path):
|
||||
"""Test when no changes are needed."""
|
||||
debian_dir = tmp_path / "debian"
|
||||
debian_dir.mkdir()
|
||||
control_path = debian_dir / "control"
|
||||
control_path.write_text("Source: mypkg\nStandards-Version: 4.7.3\n")
|
||||
|
||||
with patch("debian_todo.run_command") as mock_run:
|
||||
debian_todo.update_debian_control(tmp_path)
|
||||
mock_run.assert_not_called()
|
||||
|
||||
def test_missing_control_file(self, tmp_path):
|
||||
"""Test when debian/control doesn't exist."""
|
||||
debian_todo.update_debian_control(tmp_path) # Should not raise
|
||||
|
||||
|
||||
class TestUpdateDebianCopyrightYear:
|
||||
"""Tests for update_debian_copyright_year function."""
|
||||
|
||||
def test_updates_single_year(self, tmp_path):
|
||||
"""Test updating a single year to a range."""
|
||||
debian_dir = tmp_path / "debian"
|
||||
debian_dir.mkdir()
|
||||
copyright_path = debian_dir / "copyright"
|
||||
current_year = datetime.date.today().year
|
||||
last_year = current_year - 1
|
||||
copyright_path.write_text(
|
||||
f"Copyright: {last_year} Edward Betts <edward@4angle.com>\n"
|
||||
)
|
||||
|
||||
with patch("debian_todo.run_command") as mock_run:
|
||||
debian_todo.update_debian_copyright_year(tmp_path)
|
||||
mock_run.assert_called_once()
|
||||
|
||||
# Verify year was updated
|
||||
content = copyright_path.read_text()
|
||||
assert f"{last_year}-{current_year}" in content
|
||||
|
||||
def test_updates_year_range(self, tmp_path):
|
||||
"""Test extending a year range."""
|
||||
debian_dir = tmp_path / "debian"
|
||||
debian_dir.mkdir()
|
||||
copyright_path = debian_dir / "copyright"
|
||||
current_year = datetime.date.today().year
|
||||
copyright_path.write_text(
|
||||
f"Copyright: 2020-{current_year - 2} Edward Betts <edward@4angle.com>\n"
|
||||
)
|
||||
|
||||
with patch("debian_todo.run_command") as mock_run:
|
||||
debian_todo.update_debian_copyright_year(tmp_path)
|
||||
mock_run.assert_called_once()
|
||||
|
||||
# Verify year range was updated
|
||||
content = copyright_path.read_text()
|
||||
assert f"2020-{current_year}" in content
|
||||
|
||||
def test_no_update_needed(self, tmp_path):
|
||||
"""Test when copyright year is already current."""
|
||||
debian_dir = tmp_path / "debian"
|
||||
debian_dir.mkdir()
|
||||
copyright_path = debian_dir / "copyright"
|
||||
current_year = datetime.date.today().year
|
||||
copyright_path.write_text(
|
||||
f"Copyright: {current_year} Edward Betts <edward@4angle.com>\n"
|
||||
)
|
||||
|
||||
with patch("debian_todo.run_command") as mock_run:
|
||||
debian_todo.update_debian_copyright_year(tmp_path)
|
||||
mock_run.assert_not_called()
|
||||
|
||||
def test_missing_copyright_file(self, tmp_path):
|
||||
"""Test when debian/copyright doesn't exist."""
|
||||
debian_todo.update_debian_copyright_year(tmp_path) # Should not raise
|
||||
|
||||
|
||||
class TestUpdateDebianWatch:
|
||||
"""Tests for update_debian_watch function."""
|
||||
|
||||
def test_upgrades_github_watch(self, tmp_path):
|
||||
"""Test upgrading a GitHub watch file."""
|
||||
debian_dir = tmp_path / "debian"
|
||||
debian_dir.mkdir()
|
||||
watch_path = debian_dir / "watch"
|
||||
watch_path.write_text(
|
||||
"version=4\n"
|
||||
"https://github.com/owner/repo/releases .*/v?([\\d.]+)\\.tar\\.gz\n"
|
||||
)
|
||||
|
||||
with patch("debian_todo.run_command") as mock_run:
|
||||
debian_todo.update_debian_watch(tmp_path)
|
||||
mock_run.assert_called_once()
|
||||
|
||||
# Verify watch was upgraded
|
||||
content = watch_path.read_text()
|
||||
assert "Version: 5" in content
|
||||
assert "Template: GitHub" in content
|
||||
assert "Owner: owner" in content
|
||||
assert "Project: repo" in content
|
||||
|
||||
def test_upgrades_pypi_watch(self, tmp_path):
|
||||
"""Test upgrading a PyPI watch file."""
|
||||
debian_dir = tmp_path / "debian"
|
||||
debian_dir.mkdir()
|
||||
watch_path = debian_dir / "watch"
|
||||
watch_path.write_text(
|
||||
"version=4\n"
|
||||
"https://pypi.debian.net/mypackage/mypackage-([\\d.]+)\\.tar\\.gz\n"
|
||||
)
|
||||
|
||||
with patch("debian_todo.run_command") as mock_run:
|
||||
debian_todo.update_debian_watch(tmp_path)
|
||||
mock_run.assert_called_once()
|
||||
|
||||
# Verify watch was upgraded
|
||||
content = watch_path.read_text()
|
||||
assert "Version: 5" in content
|
||||
assert "Template: Pypi" in content
|
||||
assert "Dist: mypackage" in content
|
||||
|
||||
def test_already_version_5(self, tmp_path):
|
||||
"""Test when watch is already version 5."""
|
||||
debian_dir = tmp_path / "debian"
|
||||
debian_dir.mkdir()
|
||||
watch_path = debian_dir / "watch"
|
||||
watch_path.write_text("Version: 5\n\nTemplate: GitHub\n")
|
||||
|
||||
with patch("debian_todo.run_command") as mock_run:
|
||||
debian_todo.update_debian_watch(tmp_path)
|
||||
mock_run.assert_not_called()
|
||||
|
||||
def test_missing_watch_file(self, tmp_path):
|
||||
"""Test when debian/watch doesn't exist."""
|
||||
debian_todo.update_debian_watch(tmp_path) # Should not raise
|
||||
|
||||
|
||||
class TestAddSalsaCi:
|
||||
"""Tests for add_salsa_ci function."""
|
||||
|
||||
def test_adds_salsa_ci_yml(self, tmp_path):
|
||||
"""Test adding debian/salsa-ci.yml."""
|
||||
debian_dir = tmp_path / "debian"
|
||||
debian_dir.mkdir()
|
||||
control_path = debian_dir / "control"
|
||||
control_path.write_text(
|
||||
"Source: mypkg\n"
|
||||
"Vcs-Git: https://salsa.debian.org/python-team/packages/mypkg.git\n"
|
||||
)
|
||||
|
||||
with patch("debian_todo.run_command") as mock_run:
|
||||
debian_todo.add_salsa_ci(tmp_path)
|
||||
assert mock_run.call_count == 2 # git add and dch
|
||||
|
||||
# Verify file was created
|
||||
salsa_ci_path = debian_dir / "salsa-ci.yml"
|
||||
assert salsa_ci_path.exists()
|
||||
content = salsa_ci_path.read_text()
|
||||
assert "salsa-ci-team/pipeline" in content
|
||||
|
||||
def test_already_exists(self, tmp_path):
|
||||
"""Test when debian/salsa-ci.yml already exists."""
|
||||
debian_dir = tmp_path / "debian"
|
||||
debian_dir.mkdir()
|
||||
salsa_ci_path = debian_dir / "salsa-ci.yml"
|
||||
salsa_ci_path.write_text("existing content\n")
|
||||
|
||||
with patch("debian_todo.run_command") as mock_run:
|
||||
debian_todo.add_salsa_ci(tmp_path)
|
||||
mock_run.assert_not_called()
|
||||
|
||||
def test_missing_control_file(self, tmp_path):
|
||||
"""Test when debian/control doesn't exist."""
|
||||
debian_todo.add_salsa_ci(tmp_path) # Should not raise
|
||||
|
||||
|
||||
class TestAddChangelogEntry:
|
||||
"""Tests for add_changelog_entry function."""
|
||||
|
||||
def test_adds_changelog_entry(self, tmp_path):
|
||||
with patch.object(
|
||||
debian_todo, "extract_upstream_version_from_git_log"
|
||||
) as mock_extract:
|
||||
mock_extract.return_value = "1.2.3"
|
||||
with patch("debian_todo.subprocess.run") as mock_run:
|
||||
mock_run.return_value = MagicMock(returncode=0)
|
||||
result = debian_todo.add_changelog_entry(tmp_path)
|
||||
assert result is True
|
||||
mock_run.assert_called_once_with(
|
||||
[
|
||||
"dch",
|
||||
"--release-heuristic",
|
||||
"log",
|
||||
"--newversion",
|
||||
"1.2.3-1",
|
||||
"New upstream release.",
|
||||
],
|
||||
cwd=tmp_path,
|
||||
)
|
||||
|
||||
def test_dch_fails(self, tmp_path):
|
||||
with patch.object(
|
||||
debian_todo, "extract_upstream_version_from_git_log"
|
||||
) as mock_extract:
|
||||
mock_extract.return_value = "1.2.3"
|
||||
with patch("debian_todo.subprocess.run") as mock_run:
|
||||
mock_run.return_value = MagicMock(returncode=1)
|
||||
result = debian_todo.add_changelog_entry(tmp_path)
|
||||
assert result is False
|
||||
|
||||
def test_no_version_found(self, tmp_path):
|
||||
with patch.object(
|
||||
debian_todo, "extract_upstream_version_from_git_log"
|
||||
) as mock_extract:
|
||||
mock_extract.return_value = None
|
||||
result = debian_todo.add_changelog_entry(tmp_path)
|
||||
assert result is False
|
||||
|
||||
|
||||
class TestUpdatePackage:
|
||||
"""Tests for update_package function."""
|
||||
|
||||
def test_package_not_found(self, monkeypatch):
|
||||
monkeypatch.setattr(
|
||||
debian_todo, "load_source_info_map", lambda: {}
|
||||
)
|
||||
with pytest.raises(debian_todo.PackageNotFoundError) as exc_info:
|
||||
debian_todo.update_package("nonexistent-pkg")
|
||||
assert "nonexistent-pkg" in str(exc_info.value)
|
||||
|
||||
def test_package_no_vcs_git(self, monkeypatch):
|
||||
monkeypatch.setattr(
|
||||
debian_todo,
|
||||
"load_source_info_map",
|
||||
lambda: {"mypkg": {"vcs_git": "", "uploaders": ""}},
|
||||
)
|
||||
with pytest.raises(debian_todo.PackageNotFoundError) as exc_info:
|
||||
debian_todo.update_package("mypkg")
|
||||
assert "mypkg" in str(exc_info.value)
|
||||
|
||||
def test_new_checkout(self, tmp_path, monkeypatch):
|
||||
"""Test checking out a package that doesn't exist yet."""
|
||||
monkeypatch.setattr(
|
||||
debian_todo,
|
||||
"load_source_info_map",
|
||||
lambda: {"mypkg": {"vcs_git": "python-team", "uploaders": ""}},
|
||||
)
|
||||
monkeypatch.setattr(debian_todo, "DEBIAN_SRC_BASE", tmp_path)
|
||||
|
||||
subprocess_calls = []
|
||||
|
||||
def mock_subprocess_run(cmd, **kwargs):
|
||||
subprocess_calls.append((cmd, kwargs))
|
||||
result = MagicMock(returncode=0, stdout="", stderr="")
|
||||
# After salsa checkout, create the repo directory
|
||||
if cmd[0] == "salsa":
|
||||
pkg_dir = tmp_path / "python" / "mypkg"
|
||||
repo_dir = pkg_dir / "mypkg"
|
||||
repo_dir.mkdir(parents=True, exist_ok=True)
|
||||
return result
|
||||
|
||||
monkeypatch.setattr(debian_todo.subprocess, "run", mock_subprocess_run)
|
||||
monkeypatch.setattr(
|
||||
debian_todo,
|
||||
"extract_upstream_version_from_git_log",
|
||||
lambda _: "1.0.0",
|
||||
)
|
||||
monkeypatch.setattr(debian_todo.shutil, "which", lambda _: "/usr/bin/tool")
|
||||
|
||||
# Mock the update functions to avoid needing debian/ directory
|
||||
with patch("debian_todo.run_package_updates"):
|
||||
debian_todo.update_package("mypkg")
|
||||
|
||||
# Check salsa checkout was called
|
||||
assert subprocess_calls[0][0] == ["salsa", "checkout", "python-team/deps/mypkg"]
|
||||
# Check gbp pq commands were called
|
||||
assert subprocess_calls[1][0] == ["gbp", "pq", "import"]
|
||||
assert subprocess_calls[2][0] == ["gbp", "pq", "switch"]
|
||||
# Check gbp import-orig was called
|
||||
assert subprocess_calls[3][0] == [
|
||||
"gbp", "import-orig", "--uscan", "--pristine-tar", "--no-interactive"
|
||||
]
|
||||
# Check dch was called
|
||||
assert subprocess_calls[4][0][0] == "dch"
|
||||
|
||||
def test_existing_checkout_clean(self, tmp_path, monkeypatch):
|
||||
"""Test updating a package that's already checked out and clean."""
|
||||
monkeypatch.setattr(
|
||||
debian_todo,
|
||||
"load_source_info_map",
|
||||
lambda: {"mypkg": {"vcs_git": "homeassistant-team", "uploaders": ""}},
|
||||
)
|
||||
monkeypatch.setattr(debian_todo, "DEBIAN_SRC_BASE", tmp_path)
|
||||
|
||||
# Create existing repo directory
|
||||
repo_dir = tmp_path / "ha" / "mypkg" / "mypkg"
|
||||
repo_dir.mkdir(parents=True)
|
||||
|
||||
subprocess_calls = []
|
||||
|
||||
def mock_subprocess_run(cmd, **kwargs):
|
||||
subprocess_calls.append((cmd, kwargs))
|
||||
if cmd == ["git", "status", "--porcelain"]:
|
||||
return MagicMock(stdout="", returncode=0)
|
||||
return MagicMock(returncode=0, stdout="", stderr="")
|
||||
|
||||
monkeypatch.setattr(debian_todo.subprocess, "run", mock_subprocess_run)
|
||||
monkeypatch.setattr(
|
||||
debian_todo,
|
||||
"extract_upstream_version_from_git_log",
|
||||
lambda _: "2.0.0",
|
||||
)
|
||||
monkeypatch.setattr(debian_todo.shutil, "which", lambda _: "/usr/bin/tool")
|
||||
|
||||
# Mock the update functions to avoid needing debian/ directory
|
||||
with patch("debian_todo.run_package_updates"):
|
||||
debian_todo.update_package("mypkg")
|
||||
|
||||
# Should not call salsa checkout
|
||||
salsa_calls = [c for c in subprocess_calls if c[0][0] == "salsa"]
|
||||
assert len(salsa_calls) == 0
|
||||
|
||||
# Should call git status, gbp pq, gbp import-orig, dch
|
||||
assert subprocess_calls[0][0] == ["git", "status", "--porcelain"]
|
||||
assert subprocess_calls[1][0] == ["gbp", "pq", "import"]
|
||||
assert subprocess_calls[2][0] == ["gbp", "pq", "switch"]
|
||||
assert subprocess_calls[3][0][0] == "gbp"
|
||||
assert subprocess_calls[4][0][0] == "dch"
|
||||
|
||||
def test_existing_checkout_dirty(self, tmp_path, monkeypatch):
|
||||
"""Test that update aborts if there are uncommitted changes."""
|
||||
monkeypatch.setattr(
|
||||
debian_todo,
|
||||
"load_source_info_map",
|
||||
lambda: {"mypkg": {"vcs_git": "python-team", "uploaders": ""}},
|
||||
)
|
||||
monkeypatch.setattr(debian_todo, "DEBIAN_SRC_BASE", tmp_path)
|
||||
|
||||
# Create existing repo directory
|
||||
repo_dir = tmp_path / "python" / "mypkg" / "mypkg"
|
||||
repo_dir.mkdir(parents=True)
|
||||
|
||||
def mock_subprocess_run(cmd, **kwargs):
|
||||
if cmd == ["git", "status", "--porcelain"]:
|
||||
return MagicMock(stdout=" M dirty.txt\n", returncode=0)
|
||||
return MagicMock(returncode=0)
|
||||
|
||||
monkeypatch.setattr(debian_todo.subprocess, "run", mock_subprocess_run)
|
||||
monkeypatch.setattr(debian_todo.shutil, "which", lambda _: "/usr/bin/tool")
|
||||
|
||||
with pytest.raises(debian_todo.RepositoryStateError) as exc_info:
|
||||
debian_todo.update_package("mypkg")
|
||||
|
||||
assert "uncommitted changes" in str(exc_info.value)
|
||||
|
||||
def test_salsa_checkout_fails(self, tmp_path, monkeypatch):
|
||||
"""Test error handling when salsa checkout fails."""
|
||||
monkeypatch.setattr(
|
||||
debian_todo,
|
||||
"load_source_info_map",
|
||||
lambda: {"mypkg": {"vcs_git": "python-team", "uploaders": ""}},
|
||||
)
|
||||
monkeypatch.setattr(debian_todo, "DEBIAN_SRC_BASE", tmp_path)
|
||||
|
||||
def mock_subprocess_run(cmd, **kwargs):
|
||||
if cmd[0] == "salsa":
|
||||
return MagicMock(returncode=1, stdout="", stderr="")
|
||||
return MagicMock(returncode=0, stdout="", stderr="")
|
||||
|
||||
monkeypatch.setattr(debian_todo.subprocess, "run", mock_subprocess_run)
|
||||
monkeypatch.setattr(debian_todo.shutil, "which", lambda _: "/usr/bin/tool")
|
||||
|
||||
with pytest.raises(debian_todo.ExternalCommandError) as exc_info:
|
||||
debian_todo.update_package("mypkg")
|
||||
|
||||
assert "salsa" in str(exc_info.value)
|
||||
|
||||
def test_gbp_import_fails(self, tmp_path, monkeypatch):
|
||||
"""Test error handling when gbp import-orig fails."""
|
||||
monkeypatch.setattr(
|
||||
debian_todo,
|
||||
"load_source_info_map",
|
||||
lambda: {"mypkg": {"vcs_git": "python-team", "uploaders": ""}},
|
||||
)
|
||||
monkeypatch.setattr(debian_todo, "DEBIAN_SRC_BASE", tmp_path)
|
||||
|
||||
# Create existing repo directory
|
||||
repo_dir = tmp_path / "python" / "mypkg" / "mypkg"
|
||||
repo_dir.mkdir(parents=True)
|
||||
|
||||
call_count = {"count": 0}
|
||||
|
||||
def mock_subprocess_run(cmd, **kwargs):
|
||||
if cmd == ["git", "status", "--porcelain"]:
|
||||
return MagicMock(stdout="", returncode=0)
|
||||
if cmd[0] == "gbp":
|
||||
call_count["count"] += 1
|
||||
# Fail on third gbp call (import-orig), succeed on pq commands
|
||||
if call_count["count"] >= 3:
|
||||
return MagicMock(returncode=1, stdout="", stderr="")
|
||||
return MagicMock(returncode=0, stdout="", stderr="")
|
||||
|
||||
monkeypatch.setattr(debian_todo.subprocess, "run", mock_subprocess_run)
|
||||
monkeypatch.setattr(debian_todo.shutil, "which", lambda _: "/usr/bin/tool")
|
||||
|
||||
with pytest.raises(debian_todo.ExternalCommandError) as exc_info:
|
||||
debian_todo.update_package("mypkg")
|
||||
|
||||
assert "gbp" in str(exc_info.value)
|
||||
|
||||
def test_changelog_entry_fails(self, tmp_path, monkeypatch):
|
||||
"""Test error handling when adding changelog entry fails."""
|
||||
monkeypatch.setattr(
|
||||
debian_todo,
|
||||
"load_source_info_map",
|
||||
lambda: {"mypkg": {"vcs_git": "python-team", "uploaders": ""}},
|
||||
)
|
||||
monkeypatch.setattr(debian_todo, "DEBIAN_SRC_BASE", tmp_path)
|
||||
|
||||
# Create existing repo directory
|
||||
repo_dir = tmp_path / "python" / "mypkg" / "mypkg"
|
||||
repo_dir.mkdir(parents=True)
|
||||
|
||||
def mock_subprocess_run(cmd, **kwargs):
|
||||
if cmd == ["git", "status", "--porcelain"]:
|
||||
return MagicMock(stdout="", returncode=0)
|
||||
return MagicMock(returncode=0, stdout="", stderr="")
|
||||
|
||||
monkeypatch.setattr(debian_todo.subprocess, "run", mock_subprocess_run)
|
||||
monkeypatch.setattr(
|
||||
debian_todo,
|
||||
"extract_upstream_version_from_git_log",
|
||||
lambda _: None,
|
||||
)
|
||||
monkeypatch.setattr(debian_todo.shutil, "which", lambda _: "/usr/bin/tool")
|
||||
|
||||
with pytest.raises(debian_todo.ExternalCommandError) as exc_info:
|
||||
debian_todo.update_package("mypkg")
|
||||
|
||||
assert "upstream version" in str(exc_info.value)
|
||||
Loading…
Add table
Add a link
Reference in a new issue