executet 'ruff format --check .'
Some checks are pending
Mark stable commit / test-unit (push) Waiting to run
Mark stable commit / test-integration (push) Waiting to run
Mark stable commit / test-env-virtual (push) Waiting to run
Mark stable commit / test-env-nix (push) Waiting to run
Mark stable commit / test-e2e (push) Waiting to run
Mark stable commit / test-virgin-user (push) Waiting to run
Mark stable commit / test-virgin-root (push) Waiting to run
Mark stable commit / lint-shell (push) Waiting to run
Mark stable commit / lint-python (push) Waiting to run
Mark stable commit / mark-stable (push) Blocked by required conditions

This commit is contained in:
Kevin Veen-Birkenbach
2025-12-18 14:04:44 +01:00
parent 763f02a9a4
commit f4339a746a
155 changed files with 1327 additions and 636 deletions

View File

@@ -25,12 +25,12 @@ __all__ = ["cli"]
def __getattr__(name: str) -> Any:
"""
Lazily expose ``pkgmgr.cli`` as attribute on the top-level package.
"""
Lazily expose ``pkgmgr.cli`` as attribute on the top-level package.
This keeps ``import pkgmgr`` lightweight while still allowing
``from pkgmgr import cli`` in tests and entry points.
"""
if name == "cli":
return import_module("pkgmgr.cli")
raise AttributeError(f"module 'pkgmgr' has no attribute {name!r}")
This keeps ``import pkgmgr`` lightweight while still allowing
``from pkgmgr import cli`` in tests and entry points.
"""
if name == "cli":
return import_module("pkgmgr.cli")
raise AttributeError(f"module 'pkgmgr' has no attribute {name!r}")

View File

@@ -3,4 +3,4 @@ from __future__ import annotations
# expose subpackages for patch() / resolve_name() friendliness
from . import release as release # noqa: F401
__all__ = ["release"]
__all__ = ["release"]

View File

@@ -48,9 +48,13 @@ def close_branch(
# Confirmation
if not force:
answer = input(
f"Merge branch '{name}' into '{target_base}' and delete it afterwards? (y/N): "
).strip().lower()
answer = (
input(
f"Merge branch '{name}' into '{target_base}' and delete it afterwards? (y/N): "
)
.strip()
.lower()
)
if answer != "y":
print("Aborted closing branch.")
return

View File

@@ -41,15 +41,19 @@ def drop_branch(
# Confirmation
if not force:
answer = input(
f"Delete branch '{name}' locally and on origin? This is destructive! (y/N): "
).strip().lower()
answer = (
input(
f"Delete branch '{name}' locally and on origin? This is destructive! (y/N): "
)
.strip()
.lower()
)
if answer != "y":
print("Aborted dropping branch.")
return
delete_local_branch(name, cwd=cwd, force=False)
# Remote delete (special-case message)
try:
delete_remote_branch("origin", name, cwd=cwd)

View File

@@ -1,15 +1,18 @@
import yaml
import os
from pkgmgr.core.config.save import save_user_config
from pkgmgr.core.config.save import save_user_config
def interactive_add(config,USER_CONFIG_PATH:str):
def interactive_add(config, USER_CONFIG_PATH: str):
"""Interactively prompt the user to add a new repository entry to the user config."""
print("Adding a new repository configuration entry.")
new_entry = {}
new_entry["provider"] = input("Provider (e.g., github.com): ").strip()
new_entry["account"] = input("Account (e.g., yourusername): ").strip()
new_entry["repository"] = input("Repository name (e.g., mytool): ").strip()
new_entry["command"] = input("Command (optional, leave blank to auto-detect): ").strip()
new_entry["command"] = input(
"Command (optional, leave blank to auto-detect): "
).strip()
new_entry["description"] = input("Description (optional): ").strip()
new_entry["replacement"] = input("Replacement (optional): ").strip()
new_entry["alias"] = input("Alias (optional): ").strip()
@@ -25,12 +28,12 @@ def interactive_add(config,USER_CONFIG_PATH:str):
confirm = input("Add this entry to user config? (y/N): ").strip().lower()
if confirm == "y":
if os.path.exists(USER_CONFIG_PATH):
with open(USER_CONFIG_PATH, 'r') as f:
with open(USER_CONFIG_PATH, "r") as f:
user_config = yaml.safe_load(f) or {}
else:
user_config = {"repositories": []}
user_config.setdefault("repositories", [])
user_config["repositories"].append(new_entry)
save_user_config(user_config,USER_CONFIG_PATH)
save_user_config(user_config, USER_CONFIG_PATH)
else:
print("Entry not added.")
print("Entry not added.")

View File

@@ -107,11 +107,15 @@ def config_init(
# Already known?
if key in default_keys:
skipped += 1
print(f"[SKIP] (defaults) {provider}/{account}/{repo_name}")
print(
f"[SKIP] (defaults) {provider}/{account}/{repo_name}"
)
continue
if key in existing_keys:
skipped += 1
print(f"[SKIP] (user-config) {provider}/{account}/{repo_name}")
print(
f"[SKIP] (user-config) {provider}/{account}/{repo_name}"
)
continue
print(f"[ADD] {provider}/{account}/{repo_name}")
@@ -121,7 +125,9 @@ def config_init(
if verified_commit:
print(f"[INFO] Latest commit: {verified_commit}")
else:
print("[WARN] Could not read commit (not a git repo or no commits).")
print(
"[WARN] Could not read commit (not a git repo or no commits)."
)
entry: Dict[str, Any] = {
"provider": provider,

View File

@@ -1,6 +1,7 @@
import yaml
from pkgmgr.core.config.load import load_config
def show_config(selected_repos, user_config_path, full_config=False):
"""Display configuration for one or more repositories, or the entire merged config."""
if full_config:
@@ -8,8 +9,10 @@ def show_config(selected_repos, user_config_path, full_config=False):
print(yaml.dump(merged, default_flow_style=False))
else:
for repo in selected_repos:
identifier = f'{repo.get("provider")}/{repo.get("account")}/{repo.get("repository")}'
identifier = (
f"{repo.get('provider')}/{repo.get('account')}/{repo.get('repository')}"
)
print(f"Repository: {identifier}")
for key, value in repo.items():
print(f" {key}: {value}")
print("-" * 40)
print("-" * 40)

View File

@@ -66,10 +66,7 @@ def _ensure_repo_dir(
repo_dir = get_repo_dir(repositories_base_dir, repo)
if not os.path.exists(repo_dir):
print(
f"Repository directory '{repo_dir}' does not exist. "
"Cloning it now..."
)
print(f"Repository directory '{repo_dir}' does not exist. Cloning it now...")
clone_repos(
[repo],
repositories_base_dir,
@@ -79,10 +76,7 @@ def _ensure_repo_dir(
clone_mode,
)
if not os.path.exists(repo_dir):
print(
f"Cloning failed for repository {identifier}. "
"Skipping installation."
)
print(f"Cloning failed for repository {identifier}. Skipping installation.")
return None
return repo_dir
@@ -115,7 +109,9 @@ def _verify_repo(
if silent:
# Non-interactive mode: continue with a warning.
print(f"[Warning] Continuing despite verification failure for {identifier} (--silent).")
print(
f"[Warning] Continuing despite verification failure for {identifier} (--silent)."
)
else:
choice = input("Continue anyway? [y/N]: ").strip().lower()
if choice != "y":
@@ -232,12 +228,16 @@ def install_repos(
code = exc.code if isinstance(exc.code, int) else str(exc.code)
failures.append((identifier, f"installer failed (exit={code})"))
if not quiet:
print(f"[Warning] install: repository {identifier} failed (exit={code}). Continuing...")
print(
f"[Warning] install: repository {identifier} failed (exit={code}). Continuing..."
)
continue
except Exception as exc:
failures.append((identifier, f"unexpected error: {exc}"))
if not quiet:
print(f"[Warning] install: repository {identifier} hit an unexpected error: {exc}. Continuing...")
print(
f"[Warning] install: repository {identifier} hit an unexpected error: {exc}. Continuing..."
)
continue
if failures and emit_summary and not quiet:

View File

@@ -14,6 +14,10 @@ from pkgmgr.actions.install.installers.python import PythonInstaller # noqa: F4
from pkgmgr.actions.install.installers.makefile import MakefileInstaller # noqa: F401
# OS-specific installers
from pkgmgr.actions.install.installers.os_packages.arch_pkgbuild import ArchPkgbuildInstaller # noqa: F401
from pkgmgr.actions.install.installers.os_packages.debian_control import DebianControlInstaller # noqa: F401
from pkgmgr.actions.install.installers.os_packages.arch_pkgbuild import (
ArchPkgbuildInstaller,
) # noqa: F401
from pkgmgr.actions.install.installers.os_packages.debian_control import (
DebianControlInstaller,
) # noqa: F401
from pkgmgr.actions.install.installers.os_packages.rpm_spec import RpmSpecInstaller # noqa: F401

View File

@@ -41,7 +41,9 @@ class BaseInstaller(ABC):
return caps
for matcher in CAPABILITY_MATCHERS:
if matcher.applies_to_layer(self.layer) and matcher.is_provided(ctx, self.layer):
if matcher.applies_to_layer(self.layer) and matcher.is_provided(
ctx, self.layer
):
caps.add(matcher.name)
return caps

View File

@@ -16,7 +16,9 @@ class MakefileInstaller(BaseInstaller):
def supports(self, ctx: RepoContext) -> bool:
if os.environ.get("PKGMGR_DISABLE_MAKEFILE_INSTALLER") == "1":
if not ctx.quiet:
print("[INFO] PKGMGR_DISABLE_MAKEFILE_INSTALLER=1 skipping MakefileInstaller.")
print(
"[INFO] PKGMGR_DISABLE_MAKEFILE_INSTALLER=1 skipping MakefileInstaller."
)
return False
makefile_path = os.path.join(ctx.repo_dir, self.MAKEFILE_NAME)
@@ -46,7 +48,9 @@ class MakefileInstaller(BaseInstaller):
return
if not ctx.quiet:
print(f"[pkgmgr] Running make install for {ctx.identifier} (MakefileInstaller)")
print(
f"[pkgmgr] Running make install for {ctx.identifier} (MakefileInstaller)"
)
run_command("make install", cwd=ctx.repo_dir, preview=ctx.preview)

View File

@@ -57,7 +57,9 @@ class NixConflictResolver:
# 3) Fallback: output-name based lookup (also covers nix suggesting: `nix profile remove pkgmgr`)
if not tokens:
tokens = self._profile.find_remove_tokens_for_output(ctx, self._runner, output)
tokens = self._profile.find_remove_tokens_for_output(
ctx, self._runner, output
)
if tokens:
if not quiet:
@@ -94,7 +96,9 @@ class NixConflictResolver:
continue
if not quiet:
print("[nix] conflict detected but could not resolve profile entries to remove.")
print(
"[nix] conflict detected but could not resolve profile entries to remove."
)
return False
return False

View File

@@ -75,7 +75,9 @@ class NixFlakeInstaller(BaseInstaller):
# Core install path
# ---------------------------------------------------------------------
def _install_only(self, ctx: "RepoContext", output: str, allow_failure: bool) -> None:
def _install_only(
self, ctx: "RepoContext", output: str, allow_failure: bool
) -> None:
install_cmd = f"nix profile install {self._installable(ctx, output)}"
if not ctx.quiet:
@@ -96,7 +98,9 @@ class NixFlakeInstaller(BaseInstaller):
output=output,
):
if not ctx.quiet:
print(f"[nix] output '{output}' successfully installed after conflict cleanup.")
print(
f"[nix] output '{output}' successfully installed after conflict cleanup."
)
return
if not ctx.quiet:
@@ -107,20 +111,26 @@ class NixFlakeInstaller(BaseInstaller):
# If indices are supported, try legacy index-upgrade path.
if self._indices_supported is not False:
indices = self._profile.find_installed_indices_for_output(ctx, self._runner, output)
indices = self._profile.find_installed_indices_for_output(
ctx, self._runner, output
)
upgraded = False
for idx in indices:
if self._upgrade_index(ctx, idx):
upgraded = True
if not ctx.quiet:
print(f"[nix] output '{output}' successfully upgraded (index {idx}).")
print(
f"[nix] output '{output}' successfully upgraded (index {idx})."
)
if upgraded:
return
if indices and not ctx.quiet:
print(f"[nix] upgrade failed; removing indices {indices} and reinstalling '{output}'.")
print(
f"[nix] upgrade failed; removing indices {indices} and reinstalling '{output}'."
)
for idx in indices:
self._remove_index(ctx, idx)
@@ -139,7 +149,9 @@ class NixFlakeInstaller(BaseInstaller):
print(f"[nix] output '{output}' successfully re-installed.")
return
print(f"[ERROR] Failed to install Nix flake output '{output}' (exit {final.returncode})")
print(
f"[ERROR] Failed to install Nix flake output '{output}' (exit {final.returncode})"
)
if not allow_failure:
raise SystemExit(final.returncode)
@@ -149,7 +161,9 @@ class NixFlakeInstaller(BaseInstaller):
# force_update path
# ---------------------------------------------------------------------
def _force_upgrade_output(self, ctx: "RepoContext", output: str, allow_failure: bool) -> None:
def _force_upgrade_output(
self, ctx: "RepoContext", output: str, allow_failure: bool
) -> None:
# Prefer token path if indices unsupported (new nix)
if self._indices_supported is False:
self._remove_tokens_for_output(ctx, output)
@@ -158,14 +172,18 @@ class NixFlakeInstaller(BaseInstaller):
print(f"[nix] output '{output}' successfully upgraded.")
return
indices = self._profile.find_installed_indices_for_output(ctx, self._runner, output)
indices = self._profile.find_installed_indices_for_output(
ctx, self._runner, output
)
upgraded_any = False
for idx in indices:
if self._upgrade_index(ctx, idx):
upgraded_any = True
if not ctx.quiet:
print(f"[nix] output '{output}' successfully upgraded (index {idx}).")
print(
f"[nix] output '{output}' successfully upgraded (index {idx})."
)
if upgraded_any:
if not ctx.quiet:
@@ -173,7 +191,9 @@ class NixFlakeInstaller(BaseInstaller):
return
if indices and not ctx.quiet:
print(f"[nix] upgrade failed; removing indices {indices} and reinstalling '{output}'.")
print(
f"[nix] upgrade failed; removing indices {indices} and reinstalling '{output}'."
)
for idx in indices:
self._remove_index(ctx, idx)
@@ -223,7 +243,9 @@ class NixFlakeInstaller(BaseInstaller):
return
if not ctx.quiet:
print(f"[nix] indices unsupported; removing by token(s): {', '.join(tokens)}")
print(
f"[nix] indices unsupported; removing by token(s): {', '.join(tokens)}"
)
for t in tokens:
self._runner.run(ctx, f"nix profile remove {t}", allow_failure=True)

View File

@@ -101,7 +101,9 @@ class NixProfileInspector:
data = self.list_json(ctx, runner)
entries = normalize_elements(data)
tokens: List[str] = [out] # critical: matches nix's own suggestion for conflicts
tokens: List[str] = [
out
] # critical: matches nix's own suggestion for conflicts
for e in entries:
if entry_matches_output(e, out):

View File

@@ -48,7 +48,9 @@ class NixProfileListReader:
return uniq
def indices_matching_store_prefixes(self, ctx: "RepoContext", prefixes: List[str]) -> List[int]:
def indices_matching_store_prefixes(
self, ctx: "RepoContext", prefixes: List[str]
) -> List[int]:
prefixes = [self._store_prefix(p) for p in prefixes if p]
prefixes = [p for p in prefixes if p]
if not prefixes:

View File

@@ -11,6 +11,7 @@ if TYPE_CHECKING:
from pkgmgr.actions.install.context import RepoContext
from .runner import CommandRunner
@dataclass(frozen=True)
class RetryPolicy:
max_attempts: int = 7
@@ -35,13 +36,19 @@ class GitHubRateLimitRetry:
install_cmd: str,
) -> RunResult:
quiet = bool(getattr(ctx, "quiet", False))
delays = list(self._fibonacci_backoff(self._policy.base_delay_seconds, self._policy.max_attempts))
delays = list(
self._fibonacci_backoff(
self._policy.base_delay_seconds, self._policy.max_attempts
)
)
last: RunResult | None = None
for attempt, base_delay in enumerate(delays, start=1):
if not quiet:
print(f"[nix] attempt {attempt}/{self._policy.max_attempts}: {install_cmd}")
print(
f"[nix] attempt {attempt}/{self._policy.max_attempts}: {install_cmd}"
)
res = runner.run(ctx, install_cmd, allow_failure=True)
last = res
@@ -56,7 +63,9 @@ class GitHubRateLimitRetry:
if attempt >= self._policy.max_attempts:
break
jitter = random.randint(self._policy.jitter_seconds_min, self._policy.jitter_seconds_max)
jitter = random.randint(
self._policy.jitter_seconds_min, self._policy.jitter_seconds_max
)
wait_time = base_delay + jitter
if not quiet:
@@ -67,7 +76,11 @@ class GitHubRateLimitRetry:
time.sleep(wait_time)
return last if last is not None else RunResult(returncode=1, stdout="", stderr="nix install retry failed")
return (
last
if last is not None
else RunResult(returncode=1, stdout="", stderr="nix install retry failed")
)
@staticmethod
def _is_github_rate_limit_error(text: str) -> bool:

View File

@@ -9,6 +9,7 @@ from .types import RunResult
if TYPE_CHECKING:
from pkgmgr.actions.install.context import RepoContext
class CommandRunner:
"""
Executes commands (shell=True) inside a repository directory (if provided).
@@ -40,7 +41,9 @@ class CommandRunner:
raise
return RunResult(returncode=1, stdout="", stderr=str(e))
res = RunResult(returncode=p.returncode, stdout=p.stdout or "", stderr=p.stderr or "")
res = RunResult(
returncode=p.returncode, stdout=p.stdout or "", stderr=p.stderr or ""
)
if res.returncode != 0 and not quiet:
self._print_compact_failure(res)

View File

@@ -20,7 +20,9 @@ class NixConflictTextParser:
tokens: List[str] = []
for m in pat.finditer(text or ""):
t = (m.group(1) or "").strip()
if (t.startswith("'") and t.endswith("'")) or (t.startswith('"') and t.endswith('"')):
if (t.startswith("'") and t.endswith("'")) or (
t.startswith('"') and t.endswith('"')
):
t = t[1:-1]
if t:
tokens.append(t)

View File

@@ -14,7 +14,9 @@ class PythonInstaller(BaseInstaller):
def supports(self, ctx: RepoContext) -> bool:
if os.environ.get("PKGMGR_DISABLE_PYTHON_INSTALLER") == "1":
print("[INFO] PythonInstaller disabled via PKGMGR_DISABLE_PYTHON_INSTALLER.")
print(
"[INFO] PythonInstaller disabled via PKGMGR_DISABLE_PYTHON_INSTALLER."
)
return False
return os.path.exists(os.path.join(ctx.repo_dir, "pyproject.toml"))

View File

@@ -132,7 +132,11 @@ class InstallationPipeline:
continue
if not quiet:
if ctx.force_update and state.layer is not None and installer_layer == state.layer:
if (
ctx.force_update
and state.layer is not None
and installer_layer == state.layer
):
print(
f"[pkgmgr] Running installer {installer.__class__.__name__} "
f"for {identifier} in '{repo_dir}' (upgrade requested)..."

View File

@@ -16,6 +16,7 @@ from .types import MirrorMap, Repository
# Helpers
# -----------------------------------------------------------------------------
def _repo_key(repo: Repository) -> Tuple[str, str, str]:
"""
Normalised key for identifying a repository in config files.
@@ -47,6 +48,7 @@ def _load_user_config(path: str) -> Dict[str, object]:
# Main merge command
# -----------------------------------------------------------------------------
def merge_mirrors(
selected_repos: List[Repository],
repositories_base_dir: str,

View File

@@ -66,7 +66,9 @@ def _setup_remote_mirrors_for_repo(
# Probe only git URLs (do not try ls-remote against PyPI etc.)
# If there are no mirrors at all, probe the primary git URL.
git_mirrors = {k: v for k, v in ctx.resolved_mirrors.items() if _is_git_remote_url(v)}
git_mirrors = {
k: v for k, v in ctx.resolved_mirrors.items() if _is_git_remote_url(v)
}
if not git_mirrors:
primary = determine_primary_remote_url(repo, ctx)

View File

@@ -17,7 +17,7 @@ def hostport_from_git_url(url: str) -> Tuple[str, Optional[str]]:
netloc = netloc.split("@", 1)[1]
if netloc.startswith("[") and "]" in netloc:
host = netloc[1:netloc.index("]")]
host = netloc[1 : netloc.index("]")]
rest = netloc[netloc.index("]") + 1 :]
port = rest[1:] if rest.startswith(":") else None
return host.strip(), (port.strip() if port else None)
@@ -43,7 +43,7 @@ def normalize_provider_host(host: str) -> str:
return ""
if host.startswith("[") and "]" in host:
host = host[1:host.index("]")]
host = host[1 : host.index("]")]
if ":" in host and host.count(":") == 1:
host = host.rsplit(":", 1)[0]

View File

@@ -4,7 +4,16 @@ from pkgmgr.core.repository.dir import get_repo_dir
from pkgmgr.core.command.run import run_command
import sys
def exec_proxy_command(proxy_prefix: str, selected_repos, repositories_base_dir, all_repos, proxy_command: str, extra_args, preview: bool):
def exec_proxy_command(
proxy_prefix: str,
selected_repos,
repositories_base_dir,
all_repos,
proxy_command: str,
extra_args,
preview: bool,
):
"""Execute a given proxy command with extra arguments for each repository."""
error_repos = []
max_exit_code = 0
@@ -22,7 +31,9 @@ def exec_proxy_command(proxy_prefix: str, selected_repos, repositories_base_dir,
try:
run_command(full_cmd, cwd=repo_dir, preview=preview)
except SystemExit as e:
print(f"[ERROR] Command failed in {repo_identifier} with exit code {e.code}.")
print(
f"[ERROR] Command failed in {repo_identifier} with exit code {e.code}."
)
error_repos.append((repo_identifier, e.code))
max_exit_code = max(max_exit_code, e.code)
@@ -30,4 +41,4 @@ def exec_proxy_command(proxy_prefix: str, selected_repos, repositories_base_dir,
print("\nSummary of failed commands:")
for repo_identifier, exit_code in error_repos:
print(f"- {repo_identifier} failed with exit code {exit_code}")
sys.exit(max_exit_code)
sys.exit(max_exit_code)

View File

@@ -121,7 +121,7 @@ def update_pyproject_version(
pattern = r'^(version\s*=\s*")([^"]+)(")'
new_content, count = re.subn(
pattern,
lambda m: f'{m.group(1)}{new_version}{m.group(3)}',
lambda m: f"{m.group(1)}{new_version}{m.group(3)}",
content,
flags=re.MULTILINE,
)
@@ -162,7 +162,7 @@ def update_flake_version(
pattern = r'(version\s*=\s*")([^"]+)(")'
new_content, count = re.subn(
pattern,
lambda m: f'{m.group(1)}{new_version}{m.group(3)}',
lambda m: f"{m.group(1)}{new_version}{m.group(3)}",
content,
)

View File

@@ -80,7 +80,9 @@ def is_highest_version_tag(tag: str) -> bool:
return True
latest = max(parsed_all)
print(f"[INFO] Latest tag (parsed): v{'.'.join(map(str, latest))}, Current tag: {tag}")
print(
f"[INFO] Latest tag (parsed): v{'.'.join(map(str, latest))}, Current tag: {tag}"
)
return parsed_current >= latest
@@ -93,7 +95,9 @@ def update_latest_tag(new_tag: str, *, preview: bool = False) -> None:
- 'latest' is forced (floating tag), therefore the push uses --force.
"""
target_ref = f"{new_tag}^{{}}"
print(f"[INFO] Updating 'latest' tag to point at {new_tag} (commit {target_ref})...")
print(
f"[INFO] Updating 'latest' tag to point at {new_tag} (commit {target_ref})..."
)
tag_force_annotated(
name="latest",

View File

@@ -76,7 +76,9 @@ def _release_impl(
if paths.arch_pkgbuild:
update_pkgbuild_version(paths.arch_pkgbuild, new_ver_str, preview=preview)
else:
print("[INFO] No PKGBUILD found (packaging/arch/PKGBUILD or PKGBUILD). Skipping.")
print(
"[INFO] No PKGBUILD found (packaging/arch/PKGBUILD or PKGBUILD). Skipping."
)
if paths.rpm_spec:
update_spec_version(paths.rpm_spec, new_ver_str, preview=preview)
@@ -123,7 +125,9 @@ def _release_impl(
paths.rpm_spec,
paths.debian_changelog,
]
existing_files = [p for p in files_to_add if isinstance(p, str) and p and os.path.exists(p)]
existing_files = [
p for p in files_to_add if isinstance(p, str) and p and os.path.exists(p)
]
if preview:
add(existing_files, preview=True)
@@ -135,13 +139,17 @@ def _release_impl(
if is_highest_version_tag(new_tag):
update_latest_tag(new_tag, preview=True)
else:
print(f"[PREVIEW] Skipping 'latest' update (tag {new_tag} is not the highest).")
print(
f"[PREVIEW] Skipping 'latest' update (tag {new_tag} is not the highest)."
)
if close and branch not in ("main", "master"):
if force:
print(f"[PREVIEW] Would delete branch {branch} (forced).")
else:
print(f"[PREVIEW] Would ask whether to delete branch {branch} after release.")
print(
f"[PREVIEW] Would ask whether to delete branch {branch} after release."
)
return
add(existing_files, preview=False)
@@ -157,7 +165,9 @@ def _release_impl(
if is_highest_version_tag(new_tag):
update_latest_tag(new_tag, preview=False)
else:
print(f"[INFO] Skipping 'latest' update (tag {new_tag} is not the highest).")
print(
f"[INFO] Skipping 'latest' update (tag {new_tag} is not the highest)."
)
except GitRunError as exc:
print(f"[WARN] Failed to update floating 'latest' tag for {new_tag}: {exc}")
print("'latest' tag was not updated.")
@@ -166,7 +176,9 @@ def _release_impl(
if close:
if branch in ("main", "master"):
print(f"[INFO] close=True but current branch is {branch}; skipping branch deletion.")
print(
f"[INFO] close=True but current branch is {branch}; skipping branch deletion."
)
return
if not should_delete_branch(force=force):

View File

@@ -55,7 +55,9 @@ def clone_repos(
clone_url = _build_clone_url(repo, clone_mode)
if not clone_url:
print(f"[WARNING] Cannot build clone URL for '{repo_identifier}'. Skipping.")
print(
f"[WARNING] Cannot build clone URL for '{repo_identifier}'. Skipping."
)
continue
shallow = clone_mode == "shallow"
@@ -84,7 +86,11 @@ def clone_repos(
continue
print(f"[WARNING] SSH clone failed for '{repo_identifier}': {exc}")
choice = input("Do you want to attempt HTTPS clone instead? (y/N): ").strip().lower()
choice = (
input("Do you want to attempt HTTPS clone instead? (y/N): ")
.strip()
.lower()
)
if choice != "y":
print(f"[INFO] HTTPS clone not attempted for '{repo_identifier}'.")
continue

View File

@@ -63,6 +63,4 @@ def _strip_git_suffix(name: str) -> str:
def _ensure_valid_repo_name(name: str) -> None:
if not _NAME_RE.fullmatch(name):
raise ValueError(
"Repository name must match: lowercase a-z, 0-9, '_' and '-'."
)
raise ValueError("Repository name must match: lowercase a-z, 0-9, '_' and '-'.")

View File

@@ -66,9 +66,7 @@ class TemplateRenderer:
for root, _, files in os.walk(self.templates_dir):
for fn in files:
if fn.endswith(".j2"):
rel = os.path.relpath(
os.path.join(root, fn), self.templates_dir
)
rel = os.path.relpath(os.path.join(root, fn), self.templates_dir)
print(f"[Preview] Would render template: {rel} -> {rel[:-3]}")
@staticmethod

View File

@@ -24,9 +24,13 @@ def deinstall_repos(
# Remove alias link/file (interactive)
if os.path.exists(alias_path):
confirm = input(
f"Are you sure you want to delete link '{alias_path}' for {repo_identifier}? [y/N]: "
).strip().lower()
confirm = (
input(
f"Are you sure you want to delete link '{alias_path}' for {repo_identifier}? [y/N]: "
)
.strip()
.lower()
)
if confirm == "y":
if preview:
print(f"[Preview] Would remove link '{alias_path}'.")

View File

@@ -3,22 +3,33 @@ import os
from pkgmgr.core.repository.identifier import get_repo_identifier
from pkgmgr.core.repository.dir import get_repo_dir
def delete_repos(selected_repos, repositories_base_dir, all_repos, preview=False):
for repo in selected_repos:
repo_identifier = get_repo_identifier(repo, all_repos)
repo_dir = get_repo_dir(repositories_base_dir, repo)
if os.path.exists(repo_dir):
confirm = input(f"Are you sure you want to delete directory '{repo_dir}' for {repo_identifier}? [y/N]: ").strip().lower()
confirm = (
input(
f"Are you sure you want to delete directory '{repo_dir}' for {repo_identifier}? [y/N]: "
)
.strip()
.lower()
)
if confirm == "y":
if preview:
print(f"[Preview] Would delete directory '{repo_dir}' for {repo_identifier}.")
print(
f"[Preview] Would delete directory '{repo_dir}' for {repo_identifier}."
)
else:
try:
shutil.rmtree(repo_dir)
print(f"Deleted repository directory '{repo_dir}' for {repo_identifier}.")
print(
f"Deleted repository directory '{repo_dir}' for {repo_identifier}."
)
except Exception as e:
print(f"Error deleting '{repo_dir}' for {repo_identifier}: {e}")
else:
print(f"Skipped deletion of '{repo_dir}' for {repo_identifier}.")
else:
print(f"Repository directory '{repo_dir}' not found for {repo_identifier}.")
print(f"Repository directory '{repo_dir}' not found for {repo_identifier}.")

View File

@@ -233,9 +233,7 @@ def list_repositories(
categories.append(str(repo["category"]))
yaml_tags: List[str] = list(map(str, repo.get("tags", [])))
display_tags: List[str] = sorted(
set(yaml_tags + list(map(str, extra_tags)))
)
display_tags: List[str] = sorted(set(yaml_tags + list(map(str, extra_tags))))
rows.append(
{
@@ -288,13 +286,7 @@ def list_repositories(
status_padded = status.ljust(status_width)
status_colored = _color_status(status_padded)
print(
f"{ident_col} "
f"{status_colored} "
f"{cat_col} "
f"{tag_col} "
f"{dir_col}"
)
print(f"{ident_col} {status_colored} {cat_col} {tag_col} {dir_col}")
# ------------------------------------------------------------------
# Detailed section (alias value red, same status coloring)

View File

@@ -55,12 +55,16 @@ class UpdateManager:
code = exc.code if isinstance(exc.code, int) else str(exc.code)
failures.append((identifier, f"pull failed (exit={code})"))
if not quiet:
print(f"[Warning] update: pull failed for {identifier} (exit={code}). Continuing...")
print(
f"[Warning] update: pull failed for {identifier} (exit={code}). Continuing..."
)
continue
except Exception as exc:
failures.append((identifier, f"pull failed: {exc}"))
if not quiet:
print(f"[Warning] update: pull failed for {identifier}: {exc}. Continuing...")
print(
f"[Warning] update: pull failed for {identifier}: {exc}. Continuing..."
)
continue
try:
@@ -82,12 +86,16 @@ class UpdateManager:
code = exc.code if isinstance(exc.code, int) else str(exc.code)
failures.append((identifier, f"install failed (exit={code})"))
if not quiet:
print(f"[Warning] update: install failed for {identifier} (exit={code}). Continuing...")
print(
f"[Warning] update: install failed for {identifier} (exit={code}). Continuing..."
)
continue
except Exception as exc:
failures.append((identifier, f"install failed: {exc}"))
if not quiet:
print(f"[Warning] update: install failed for {identifier}: {exc}. Continuing...")
print(
f"[Warning] update: install failed for {identifier}: {exc}. Continuing..."
)
continue
if failures and not quiet:

View File

@@ -31,6 +31,7 @@ class OSReleaseInfo:
"""
Minimal /etc/os-release representation for distro detection.
"""
id: str = ""
id_like: str = ""
pretty_name: str = ""
@@ -63,4 +64,6 @@ class OSReleaseInfo:
def is_fedora_family(self) -> bool:
ids = self.ids()
return bool(ids.intersection({"fedora", "rhel", "centos", "rocky", "almalinux"}))
return bool(
ids.intersection({"fedora", "rhel", "centos", "rocky", "almalinux"})
)

View File

@@ -58,7 +58,9 @@ class SystemUpdater:
run_command("sudo pacman -Syu --noconfirm", preview=preview)
return
print("[Warning] Cannot update Arch system: missing required tools (sudo/yay/pacman).")
print(
"[Warning] Cannot update Arch system: missing required tools (sudo/yay/pacman)."
)
def _update_debian(self, *, preview: bool) -> None:
from pkgmgr.core.command.run import run_command
@@ -67,7 +69,9 @@ class SystemUpdater:
apt_get = shutil.which("apt-get")
if not (sudo and apt_get):
print("[Warning] Cannot update Debian/Ubuntu system: missing required tools (sudo/apt-get).")
print(
"[Warning] Cannot update Debian/Ubuntu system: missing required tools (sudo/apt-get)."
)
return
env = "DEBIAN_FRONTEND=noninteractive"

View File

@@ -29,6 +29,7 @@ For details on any command, run:
\033[1mpkgmgr <command> --help\033[0m
"""
def main() -> None:
"""
Entry point for the pkgmgr CLI.
@@ -41,9 +42,7 @@ def main() -> None:
repositories_dir = os.path.expanduser(
directories.get("repositories", "~/Repositories")
)
binaries_dir = os.path.expanduser(
directories.get("binaries", "~/.local/bin")
)
binaries_dir = os.path.expanduser(directories.get("binaries", "~/.local/bin"))
# Ensure the merged config actually contains the resolved directories
config_merged.setdefault("directories", {})

View File

@@ -135,9 +135,7 @@ def handle_changelog(
target_tag=range_arg,
)
if cur_tag is None:
print(
f"[WARN] Tag {range_arg!r} not found or not a SemVer tag."
)
print(f"[WARN] Tag {range_arg!r} not found or not a SemVer tag.")
print("[INFO] Falling back to full history.")
from_ref = None
to_ref = None

View File

@@ -213,9 +213,7 @@ def handle_config(args, ctx: CLIContext) -> None:
)
if key == mod_key:
entry["ignore"] = args.set == "true"
print(
f"Set ignore for {key} to {entry['ignore']}"
)
print(f"Set ignore for {key} to {entry['ignore']}")
save_user_config(user_config, user_config_path)
return

View File

@@ -4,7 +4,12 @@ from __future__ import annotations
import sys
from typing import Any, Dict, List
from pkgmgr.actions.mirror import diff_mirrors, list_mirrors, merge_mirrors, setup_mirrors
from pkgmgr.actions.mirror import (
diff_mirrors,
list_mirrors,
merge_mirrors,
setup_mirrors,
)
from pkgmgr.cli.context import CLIContext
Repository = Dict[str, Any]
@@ -56,11 +61,15 @@ def handle_mirror_command(
preview = getattr(args, "preview", False)
if source == target:
print("[ERROR] For 'mirror merge', source and target must differ (config vs file).")
print(
"[ERROR] For 'mirror merge', source and target must differ (config vs file)."
)
sys.exit(2)
explicit_config_path = getattr(args, "config_path", None)
user_config_path = explicit_config_path or getattr(ctx, "user_config_path", None)
user_config_path = explicit_config_path or getattr(
ctx, "user_config_path", None
)
merge_mirrors(
selected_repos=selected,

View File

@@ -18,7 +18,9 @@ def handle_publish(args, ctx: CLIContext, selected: List[Repository]) -> None:
for repo in selected:
identifier = get_repo_identifier(repo, ctx.all_repositories)
repo_dir = repo.get("directory") or get_repo_dir(ctx.repositories_base_dir, repo)
repo_dir = repo.get("directory") or get_repo_dir(
ctx.repositories_base_dir, repo
)
if not os.path.isdir(repo_dir):
print(f"[WARN] Skipping {identifier}: directory missing.")

View File

@@ -36,9 +36,13 @@ def handle_release(
identifier = get_repo_identifier(repo, ctx.all_repositories)
try:
repo_dir = repo.get("directory") or get_repo_dir(ctx.repositories_base_dir, repo)
repo_dir = repo.get("directory") or get_repo_dir(
ctx.repositories_base_dir, repo
)
except Exception as exc:
print(f"[WARN] Skipping repository {identifier}: failed to resolve directory: {exc}")
print(
f"[WARN] Skipping repository {identifier}: failed to resolve directory: {exc}"
)
continue
if not os.path.isdir(repo_dir):

View File

@@ -32,9 +32,8 @@ def _resolve_repository_directory(repository: Repository, ctx: CLIContext) -> st
if repo_dir:
return repo_dir
base_dir = (
getattr(ctx, "repositories_base_dir", None)
or getattr(ctx, "repositories_dir", None)
base_dir = getattr(ctx, "repositories_base_dir", None) or getattr(
ctx, "repositories_dir", None
)
if not base_dir:
raise RuntimeError(

View File

@@ -38,9 +38,9 @@ def _print_pkgmgr_self_version() -> None:
# Common distribution/module naming variants.
python_candidates = [
"package-manager", # PyPI dist name in your project
"package_manager", # module-ish variant
"pkgmgr", # console/alias-ish
"package-manager", # PyPI dist name in your project
"package_manager", # module-ish variant
"pkgmgr", # console/alias-ish
]
nix_candidates = [
"pkgmgr",

View File

@@ -33,8 +33,7 @@ def add_branch_subparsers(
"name",
nargs="?",
help=(
"Name of the new branch (optional; will be asked interactively "
"if omitted)"
"Name of the new branch (optional; will be asked interactively if omitted)"
),
)
branch_open.add_argument(
@@ -54,8 +53,7 @@ def add_branch_subparsers(
"name",
nargs="?",
help=(
"Name of the branch to close (optional; current branch is used "
"if omitted)"
"Name of the branch to close (optional; current branch is used if omitted)"
),
)
branch_close.add_argument(
@@ -84,8 +82,7 @@ def add_branch_subparsers(
"name",
nargs="?",
help=(
"Name of the branch to drop (optional; current branch is used "
"if omitted)"
"Name of the branch to drop (optional; current branch is used if omitted)"
),
)
branch_drop.add_argument(

View File

@@ -20,7 +20,9 @@ def add_mirror_subparsers(subparsers: argparse._SubParsersAction) -> None:
required=True,
)
mirror_list = mirror_subparsers.add_parser("list", help="List configured mirrors for repositories")
mirror_list = mirror_subparsers.add_parser(
"list", help="List configured mirrors for repositories"
)
add_identifier_arguments(mirror_list)
mirror_list.add_argument(
"--source",
@@ -29,15 +31,21 @@ def add_mirror_subparsers(subparsers: argparse._SubParsersAction) -> None:
help="Which mirror source to show.",
)
mirror_diff = mirror_subparsers.add_parser("diff", help="Show differences between config mirrors and MIRRORS file")
mirror_diff = mirror_subparsers.add_parser(
"diff", help="Show differences between config mirrors and MIRRORS file"
)
add_identifier_arguments(mirror_diff)
mirror_merge = mirror_subparsers.add_parser(
"merge",
help="Merge mirrors between config and MIRRORS file (example: pkgmgr mirror merge config file --all)",
)
mirror_merge.add_argument("source", choices=["config", "file"], help="Source of mirrors.")
mirror_merge.add_argument("target", choices=["config", "file"], help="Target of mirrors.")
mirror_merge.add_argument(
"source", choices=["config", "file"], help="Source of mirrors."
)
mirror_merge.add_argument(
"target", choices=["config", "file"], help="Target of mirrors."
)
add_identifier_arguments(mirror_merge)
mirror_merge.add_argument(
"--config-path",

View File

@@ -48,9 +48,6 @@ def add_navigation_subparsers(
"--command",
nargs=argparse.REMAINDER,
dest="shell_command",
help=(
"The shell command (and its arguments) to execute in each "
"repository"
),
help=("The shell command (and its arguments) to execute in each repository"),
default=[],
)

View File

@@ -53,10 +53,7 @@ def _add_proxy_identifier_arguments(parser: argparse.ArgumentParser) -> None:
parser.add_argument(
"identifiers",
nargs="*",
help=(
"Identifier(s) for repositories. "
"Default: Repository of current folder."
),
help=("Identifier(s) for repositories. Default: Repository of current folder."),
)
parser.add_argument(
"--all",
@@ -118,12 +115,7 @@ def _proxy_has_explicit_selection(args: argparse.Namespace) -> bool:
string_filter = getattr(args, "string", "") or ""
# Proxy commands currently do not support --tag, so it is not checked here.
return bool(
use_all
or identifiers
or categories
or string_filter
)
return bool(use_all or identifiers or categories or string_filter)
def _select_repo_for_current_directory(
@@ -204,9 +196,7 @@ def maybe_handle_proxy(args: argparse.Namespace, ctx: CLIContext) -> bool:
If the top-level command is one of the proxy subcommands
(git / docker / docker compose), handle it here and return True.
"""
all_proxy_subcommands = {
sub for subs in PROXY_COMMANDS.values() for sub in subs
}
all_proxy_subcommands = {sub for subs in PROXY_COMMANDS.values() for sub in subs}
if args.command not in all_proxy_subcommands:
return False

View File

@@ -22,9 +22,8 @@ def resolve_repository_path(repository: Repository, ctx: CLIContext) -> str:
if value:
return value
base_dir = (
getattr(ctx, "repositories_base_dir", None)
or getattr(ctx, "repositories_dir", None)
base_dir = getattr(ctx, "repositories_base_dir", None) or getattr(
ctx, "repositories_dir", None
)
if not base_dir:
raise RuntimeError(

View File

@@ -57,7 +57,9 @@ def _build_workspace_filename(identifiers: List[str]) -> str:
return "_".join(sorted_identifiers) + ".code-workspace"
def _build_workspace_data(selected: List[Repository], ctx: CLIContext) -> Dict[str, Any]:
def _build_workspace_data(
selected: List[Repository], ctx: CLIContext
) -> Dict[str, Any]:
folders = [{"path": resolve_repository_path(repo, ctx)} for repo in selected]
return {
"folders": folders,

View File

@@ -2,10 +2,11 @@ import os
import hashlib
import re
def generate_alias(repo, bin_dir, existing_aliases):
"""
Generate an alias for a repository based on its repository name.
Steps:
1. Keep only consonants from the repository name (letters from BCDFGHJKLMNPQRSTVWXYZ).
2. Collapse consecutive identical consonants.
@@ -39,4 +40,4 @@ def generate_alias(repo, bin_dir, existing_aliases):
while conflict(candidate3):
candidate3 += "x"
candidate3 = candidate3[:12]
return candidate3
return candidate3

View File

@@ -98,8 +98,7 @@ def create_ink(
if alias_name == repo_identifier:
if not quiet:
print(
f"Alias '{alias_name}' equals identifier. "
"Skipping alias creation."
f"Alias '{alias_name}' equals identifier. Skipping alias creation."
)
return

View File

@@ -8,6 +8,7 @@ class CliLayer(str, Enum):
"""
CLI layer precedence (lower number = stronger layer).
"""
OS_PACKAGES = "os-packages"
NIX = "nix"
PYTHON = "python"

View File

@@ -34,11 +34,7 @@ def _nix_binary_candidates(home: str, names: List[str]) -> List[str]:
"""
Build possible Nix profile binary paths for a list of candidate names.
"""
return [
os.path.join(home, ".nix-profile", "bin", name)
for name in names
if name
]
return [os.path.join(home, ".nix-profile", "bin", name) for name in names if name]
def _path_binary_candidates(names: List[str]) -> List[str]:
@@ -148,7 +144,8 @@ def resolve_command_for_repo(
# c) Nix profile binaries
nix_binaries = [
path for path in _nix_binary_candidates(home, candidate_names)
path
for path in _nix_binary_candidates(home, candidate_names)
if _is_executable(path)
]
nix_binary = nix_binaries[0] if nix_binaries else None

View File

@@ -51,6 +51,7 @@ Repo = Dict[str, Any]
# Hilfsfunktionen
# ---------------------------------------------------------------------------
def _deep_merge(base: Dict[str, Any], override: Dict[str, Any]) -> Dict[str, Any]:
"""
Recursively merge two dictionaries.
@@ -58,11 +59,7 @@ def _deep_merge(base: Dict[str, Any], override: Dict[str, Any]) -> Dict[str, Any
Values from `override` win over values in `base`.
"""
for key, value in override.items():
if (
key in base
and isinstance(base[key], dict)
and isinstance(value, dict)
):
if key in base and isinstance(base[key], dict) and isinstance(value, dict):
_deep_merge(base[key], value)
else:
base[key] = value
@@ -93,9 +90,7 @@ def _merge_repo_lists(
- Wenn category_name gesetzt ist, wird dieser in
repo["category_files"] eingetragen.
"""
index: Dict[Tuple[str, str, str], Repo] = {
_repo_key(r): r for r in base_list
}
index: Dict[Tuple[str, str, str], Repo] = {_repo_key(r): r for r in base_list}
for src in new_list:
key = _repo_key(src)
@@ -233,10 +228,12 @@ def _load_defaults_from_package_or_project() -> Dict[str, Any]:
return {"directories": {}, "repositories": []}
# ---------------------------------------------------------------------------
# Hauptfunktion
# ---------------------------------------------------------------------------
def load_config(user_config_path: str) -> Dict[str, Any]:
"""
Load and merge configuration for pkgmgr.
@@ -289,8 +286,12 @@ def load_config(user_config_path: str) -> Dict[str, Any]:
# repositories
merged["repositories"] = []
_merge_repo_lists(merged["repositories"], defaults["repositories"], category_name=None)
_merge_repo_lists(merged["repositories"], user_cfg["repositories"], category_name=None)
_merge_repo_lists(
merged["repositories"], defaults["repositories"], category_name=None
)
_merge_repo_lists(
merged["repositories"], user_cfg["repositories"], category_name=None
)
# andere Top-Level-Keys (falls vorhanden)
other_keys = (set(defaults.keys()) | set(user_cfg.keys())) - {

View File

@@ -1,9 +1,10 @@
import yaml
import os
def save_user_config(user_config,USER_CONFIG_PATH:str):
def save_user_config(user_config, USER_CONFIG_PATH: str):
"""Save the user configuration to USER_CONFIG_PATH."""
os.makedirs(os.path.dirname(USER_CONFIG_PATH), exist_ok=True)
with open(USER_CONFIG_PATH, 'w') as f:
with open(USER_CONFIG_PATH, "w") as f:
yaml.dump(user_config, f)
print(f"User configuration updated in {USER_CONFIG_PATH}.")
print(f"User configuration updated in {USER_CONFIG_PATH}.")

View File

@@ -16,7 +16,9 @@ class EnvTokenProvider:
source_name: str = "env"
def get(self, request: TokenRequest) -> Optional[TokenResult]:
for key in env_var_candidates(request.provider_kind, request.host, request.owner):
for key in env_var_candidates(
request.provider_kind, request.host, request.owner
):
val = os.environ.get(key)
if val:
return TokenResult(token=val.strip(), source=self.source_name)

View File

@@ -15,6 +15,7 @@ class GhTokenProvider:
This does NOT persist anything; it only reads what `gh` already knows.
"""
source_name: str = "gh"
def get(self, request: TokenRequest) -> Optional[TokenResult]:

View File

@@ -21,9 +21,7 @@ def _import_keyring():
try:
import keyring # type: ignore
except Exception as exc: # noqa: BLE001
raise KeyringUnavailableError(
"python-keyring is not installed."
) from exc
raise KeyringUnavailableError("python-keyring is not installed.") from exc
# Some environments have keyring installed but no usable backend.
# We do a lightweight "backend sanity check" by attempting to read the backend.

View File

@@ -9,7 +9,12 @@ from .providers.env import EnvTokenProvider
from .providers.gh import GhTokenProvider
from .providers.keyring import KeyringTokenProvider
from .providers.prompt import PromptTokenProvider
from .types import KeyringUnavailableError, NoCredentialsError, TokenRequest, TokenResult
from .types import (
KeyringUnavailableError,
NoCredentialsError,
TokenRequest,
TokenResult,
)
from .validate import validate_token
@@ -55,7 +60,10 @@ class TokenResolver:
print(f" {msg}", file=sys.stderr)
print(" Tokens will NOT be persisted securely.", file=sys.stderr)
print("", file=sys.stderr)
print(" To enable secure token storage, install python-keyring:", file=sys.stderr)
print(
" To enable secure token storage, install python-keyring:",
file=sys.stderr,
)
print(" pip install keyring", file=sys.stderr)
print("", file=sys.stderr)
print(" Or install via system packages:", file=sys.stderr)

View File

@@ -13,7 +13,9 @@ class KeyringKey:
username: str
def build_keyring_key(provider_kind: str, host: str, owner: Optional[str]) -> KeyringKey:
def build_keyring_key(
provider_kind: str, host: str, owner: Optional[str]
) -> KeyringKey:
"""Build a stable keyring key.
- service: "pkgmgr:<provider>"
@@ -21,11 +23,15 @@ def build_keyring_key(provider_kind: str, host: str, owner: Optional[str]) -> Ke
"""
provider_kind = str(provider_kind).strip().lower()
host = str(host).strip()
owner_part = (str(owner).strip() if owner else "-")
return KeyringKey(service=f"pkgmgr:{provider_kind}", username=f"{host}|{owner_part}")
owner_part = str(owner).strip() if owner else "-"
return KeyringKey(
service=f"pkgmgr:{provider_kind}", username=f"{host}|{owner_part}"
)
def env_var_candidates(provider_kind: str, host: str, owner: Optional[str]) -> list[str]:
def env_var_candidates(
provider_kind: str, host: str, owner: Optional[str]
) -> list[str]:
"""Return a list of environment variable names to try.
Order is from most specific to most generic.
@@ -44,7 +50,7 @@ def env_var_candidates(provider_kind: str, host: str, owner: Optional[str]) -> l
candidates.append(f"PKGMGR_{kind}_TOKEN")
candidates.append(f"PKGMGR_TOKEN_{kind}")
candidates.append("PKGMGR_TOKEN")
return candidates

View File

@@ -18,4 +18,6 @@ def add_all(*, cwd: str = ".", preview: bool = False) -> None:
try:
run(["add", "-A"], cwd=cwd, preview=preview)
except GitRunError as exc:
raise GitAddAllError("Failed to stage all changes with `git add -A`.", cwd=cwd) from exc
raise GitAddAllError(
"Failed to stage all changes with `git add -A`.", cwd=cwd
) from exc

View File

@@ -18,4 +18,6 @@ def branch_move(branch: str, *, cwd: str = ".", preview: bool = False) -> None:
try:
run(["branch", "-M", branch], cwd=cwd, preview=preview)
except GitRunError as exc:
raise GitBranchMoveError(f"Failed to move/rename current branch to {branch!r}.", cwd=cwd) from exc
raise GitBranchMoveError(
f"Failed to move/rename current branch to {branch!r}.", cwd=cwd
) from exc

View File

@@ -4,21 +4,26 @@ from __future__ import annotations
class GitBaseError(RuntimeError):
"""Base error raised for Git related failures."""
class GitRunError(GitBaseError):
"""Base error raised for Git related failures."""
class GitNotRepositoryError(GitBaseError):
"""Raised when the current working directory is not a git repository."""
class GitQueryError(GitRunError):
"""Base class for read-only git query failures."""
class GitCommandError(GitRunError):
"""
Base class for state-changing git command failures.
Use subclasses to provide stable error types for callers.
"""
def __init__(self, message: str, *, cwd: str = ".") -> None:
super().__init__(message)
if cwd in locals():

View File

@@ -16,6 +16,7 @@ def _is_missing_key_error(exc: GitRunError) -> bool:
# 'git config --get' returns exit code 1 when the key is not set.
return "exit code: 1" in msg
def get_config_value(key: str, *, cwd: str = ".") -> Optional[str]:
"""
Return a value from `git config --get <key>`, or None if not set.

View File

@@ -15,4 +15,4 @@ def get_current_branch(cwd: str = ".") -> Optional[str]:
output = run(["rev-parse", "--abbrev-ref", "HEAD"], cwd=cwd)
except GitRunError:
return None
return output or None
return output or None

View File

@@ -30,4 +30,4 @@ def get_remote_head_commit(
) from exc
# minimal parsing: first token is the hash
return (out.split()[0].strip() if out else "")
return out.split()[0].strip() if out else ""

View File

@@ -4,6 +4,7 @@ from typing import Set
from ..run import run
def get_remote_push_urls(remote: str, cwd: str = ".") -> Set[str]:
"""
Return all push URLs configured for a remote.

View File

@@ -44,9 +44,7 @@ def run(
stderr = exc.stderr or ""
if _is_not_repo_error(stderr):
raise GitNotRepositoryError(
f"Not a git repository: {cwd!r}\n"
f"Command: {cmd_str}\n"
f"STDERR:\n{stderr}"
f"Not a git repository: {cwd!r}\nCommand: {cmd_str}\nSTDERR:\n{stderr}"
) from exc
raise GitRunError(

View File

@@ -34,7 +34,15 @@ def get_repo_dir(repositories_base_dir: str, repo: Dict[str, Any]) -> str:
account = repo.get("account")
repository = repo.get("repository")
missing = [k for k, v in [("provider", provider), ("account", account), ("repository", repository)] if not v]
missing = [
k
for k, v in [
("provider", provider),
("account", account),
("repository", repository),
]
if not v
]
if missing:
print(
"Error: repository entry is missing required keys.\n"

View File

@@ -9,4 +9,4 @@ def get_repo_identifier(repo, all_repos):
if count == 1:
return repo_name
else:
return f'{repo.get("provider")}/{repo.get("account")}/{repo.get("repository")}'
return f"{repo.get('provider')}/{repo.get('account')}/{repo.get('repository')}"

View File

@@ -1,3 +1,3 @@
def filter_ignored(repos):
"""Filter out repositories that have 'ignore' set to True."""
return [r for r in repos if not r.get("ignore", False)]
return [r for r in repos if not r.get("ignore", False)]

View File

@@ -109,7 +109,9 @@ def resolve_repo_paths(repo_dir: str) -> RepoPaths:
]
)
if rpm_spec is None:
rpm_spec = _find_first_spec_in_dir(os.path.join(repo_dir, "packaging", "fedora"))
rpm_spec = _find_first_spec_in_dir(
os.path.join(repo_dir, "packaging", "fedora")
)
if rpm_spec is None:
rpm_spec = _find_first_spec_in_dir(repo_dir)

View File

@@ -1,5 +1,4 @@
def resolve_repos(identifiers:[], all_repos:[]):
def resolve_repos(identifiers: [], all_repos: []):
"""
Given a list of identifier strings, return a list of repository configs.
The identifier can be:
@@ -11,7 +10,9 @@ def resolve_repos(identifiers:[], all_repos:[]):
for ident in identifiers:
matches = []
for repo in all_repos:
full_id = f'{repo.get("provider")}/{repo.get("account")}/{repo.get("repository")}'
full_id = (
f"{repo.get('provider')}/{repo.get('account')}/{repo.get('repository')}"
)
if ident == full_id:
matches.append(repo)
elif ident == repo.get("alias"):
@@ -24,4 +25,4 @@ def resolve_repos(identifiers:[], all_repos:[]):
print(f"Identifier '{ident}' did not match any repository in config.")
else:
selected.extend(matches)
return selected
return selected

View File

@@ -66,18 +66,26 @@ def verify_repository(repo, repo_dir, mode="local", no_verification=False):
if expected_commit:
if not commit_hash:
commit_check_passed = False
error_details.append(f"Expected commit: {expected_commit}, but could not determine current commit.")
error_details.append(
f"Expected commit: {expected_commit}, but could not determine current commit."
)
elif commit_hash != expected_commit:
commit_check_passed = False
error_details.append(f"Expected commit: {expected_commit}, found: {commit_hash}")
error_details.append(
f"Expected commit: {expected_commit}, found: {commit_hash}"
)
if expected_gpg_keys:
if not signing_key:
gpg_check_passed = False
error_details.append(f"Expected one of GPG keys: {expected_gpg_keys}, but no signing key was found.")
error_details.append(
f"Expected one of GPG keys: {expected_gpg_keys}, but no signing key was found."
)
elif signing_key not in expected_gpg_keys:
gpg_check_passed = False
error_details.append(f"Expected one of GPG keys: {expected_gpg_keys}, found: {signing_key}")
error_details.append(
f"Expected one of GPG keys: {expected_gpg_keys}, found: {signing_key}"
)
if expected_commit and expected_gpg_keys:
verified_ok = commit_check_passed and gpg_check_passed

View File

@@ -13,6 +13,7 @@ class InstalledVersion:
"""
Represents a resolved installed version and the matched name.
"""
name: str
version: str

View File

@@ -43,10 +43,14 @@ class SemVer:
minor = int(parts[1])
patch = int(parts[2])
except ValueError as exc:
raise ValueError(f"Semantic version components must be integers: {value!r}") from exc
raise ValueError(
f"Semantic version components must be integers: {value!r}"
) from exc
if major < 0 or minor < 0 or patch < 0:
raise ValueError(f"Semantic version components must be non-negative: {value!r}")
raise ValueError(
f"Semantic version components must be non-negative: {value!r}"
)
return cls(major=major, minor=minor, patch=patch)

View File

@@ -37,9 +37,7 @@ class TestIntegrationBranchCommands(unittest.TestCase):
`pkgmgr branch open feature/test --base develop` must forward
the name and base branch to open_branch() with cwd=".".
"""
self._run_pkgmgr(
["branch", "open", "feature/test", "--base", "develop"]
)
self._run_pkgmgr(["branch", "open", "feature/test", "--base", "develop"])
mock_open_branch.assert_called_once()
_, kwargs = mock_open_branch.call_args
@@ -74,9 +72,7 @@ class TestIntegrationBranchCommands(unittest.TestCase):
`pkgmgr branch close feature/test --base develop` must forward
the name and base branch to close_branch() with cwd=".".
"""
self._run_pkgmgr(
["branch", "close", "feature/test", "--base", "develop"]
)
self._run_pkgmgr(["branch", "close", "feature/test", "--base", "develop"])
mock_close_branch.assert_called_once()
_, kwargs = mock_close_branch.call_args

View File

@@ -3,15 +3,14 @@ import tempfile
import unittest
from pathlib import Path
class TestMakefileThreeTimes(unittest.TestCase):
def test_make_install_three_times(self):
with tempfile.TemporaryDirectory(prefix="makefile-3x-") as tmp:
repo = Path(tmp)
# Minimal Makefile with install target
(repo / "Makefile").write_text(
"install:\n\t@echo install >> install.log\n"
)
(repo / "Makefile").write_text("install:\n\t@echo install >> install.log\n")
for i in range(1, 4):
print(f"\n=== RUN {i}/3 ===")

View File

@@ -114,7 +114,9 @@ class TestIntegrationInstalPKGMGRShallow(unittest.TestCase):
# Optional XDG override for a fully isolated environment
os.environ.setdefault("XDG_CONFIG_HOME", os.path.join(temp_home, ".config"))
os.environ.setdefault("XDG_CACHE_HOME", os.path.join(temp_home, ".cache"))
os.environ.setdefault("XDG_DATA_HOME", os.path.join(temp_home, ".local", "share"))
os.environ.setdefault(
"XDG_DATA_HOME", os.path.join(temp_home, ".local", "share")
)
# 🔧 IMPORTANT FIX: allow Git to access /src safely
configure_git_safe_directory()

View File

@@ -14,17 +14,16 @@ class TestPkgmgrInstallThreeTimesNix(unittest.TestCase):
env["HOME"] = tmp
# Ensure nix is found
env["PATH"] = "/nix/var/nix/profiles/default/bin:" + os.environ.get("PATH", "")
env["PATH"] = "/nix/var/nix/profiles/default/bin:" + os.environ.get(
"PATH", ""
)
# IMPORTANT:
# nix run uses git+file:///src internally -> Git will reject /src if it's not a safe.directory.
# Our test sets HOME to a temp dir, so we must provide a temp global gitconfig.
gitconfig = tmp_path / ".gitconfig"
gitconfig.write_text(
"[safe]\n"
"\tdirectory = /src\n"
"\tdirectory = /src/.git\n"
"\tdirectory = *\n"
"[safe]\n\tdirectory = /src\n\tdirectory = /src/.git\n\tdirectory = *\n"
)
env["GIT_CONFIG_GLOBAL"] = str(gitconfig)

View File

@@ -16,10 +16,8 @@ class TestPkgmgrInstallThreeTimesVenv(unittest.TestCase):
env["HOME"] = tmp
# pkgmgr kommt aus dem Projekt-venv
env["PATH"] = (
f"{Path.cwd() / '.venv' / 'bin'}:"
f"{bin_dir}:"
+ os.environ.get("PATH", "")
env["PATH"] = f"{Path.cwd() / '.venv' / 'bin'}:{bin_dir}:" + os.environ.get(
"PATH", ""
)
# nix explizit deaktivieren → Python/Venv-Pfad

View File

@@ -69,9 +69,7 @@ class TestIntegrationMakeCommands(unittest.TestCase):
- '--preview' ensures that no destructive make commands are
actually executed inside the container.
"""
self._run_pkgmgr_make(
["make", "install", "--preview", "pkgmgr"]
)
self._run_pkgmgr_make(["make", "install", "--preview", "pkgmgr"])
if __name__ == "__main__":

View File

@@ -24,9 +24,7 @@ import unittest
# Resolve project root (the repo where flake.nix lives, e.g. /src)
PROJECT_ROOT = os.path.abspath(
os.path.join(os.path.dirname(__file__), "..", "..")
)
PROJECT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".."))
def _run_cmd(cmd: list[str]) -> subprocess.CompletedProcess:
@@ -69,13 +67,18 @@ class TestNixBuildPkgmgrAllDistros(unittest.TestCase):
_run_cmd(["id"])
# --- nix build .#pkgmgr -L ---
proc = _run_cmd([
"nix",
"--option", "sandbox", "false",
"build", ".#pkgmgr",
"-L",
])
proc = _run_cmd(
[
"nix",
"--option",
"sandbox",
"false",
"build",
".#pkgmgr",
"-L",
]
)
if proc.returncode != 0:
raise AssertionError(
"nix build .#pkgmgr -L failed inside the test container.\n"

View File

@@ -164,5 +164,6 @@ class TestIntegrationReleaseCommand(unittest.TestCase):
self.assertIn("usage:", output)
self.assertIn("pkgmgr release", output)
if __name__ == "__main__":
unittest.main()

View File

@@ -17,6 +17,7 @@ import sys
import unittest
from typing import List
def _run_main(argv: List[str]) -> None:
"""
Helper to run main.py with the given argv.
@@ -62,4 +63,4 @@ class TestToolsHelp(unittest.TestCase):
if __name__ == "__main__":
unittest.main()
unittest.main()

View File

@@ -30,10 +30,7 @@ from test_install_pkgmgr_shallow import (
def _make_temp_gitconfig_with_safe_dirs(home: Path) -> Path:
gitconfig = home / ".gitconfig"
gitconfig.write_text(
"[safe]\n"
"\tdirectory = /src\n"
"\tdirectory = /src/.git\n"
"\tdirectory = *\n"
"[safe]\n\tdirectory = /src\n\tdirectory = /src/.git\n\tdirectory = *\n"
)
return gitconfig

View File

@@ -29,10 +29,7 @@ from test_install_pkgmgr_shallow import (
def _make_temp_gitconfig_with_safe_dirs(home: Path) -> Path:
gitconfig = home / ".gitconfig"
gitconfig.write_text(
"[safe]\n"
"\tdirectory = /src\n"
"\tdirectory = /src/.git\n"
"\tdirectory = *\n"
"[safe]\n\tdirectory = /src\n\tdirectory = /src/.git\n\tdirectory = *\n"
)
return gitconfig

View File

@@ -31,9 +31,7 @@ from typing import List
from pkgmgr.core.config.load import load_config
# Resolve project root (the repo where main.py lives, e.g. /src)
PROJECT_ROOT = os.path.abspath(
os.path.join(os.path.dirname(__file__), "..", "..")
)
PROJECT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".."))
CONFIG_PATH = os.path.join(PROJECT_ROOT, "config", "config.yaml")

View File

@@ -227,9 +227,7 @@ class TestBranchCLI(unittest.TestCase):
Ensure that `pkgmgr branch drop <name> --force` passes force=True.
"""
parser = self._create_parser()
args = parser.parse_args(
["branch", "drop", "feature/tmp-branch", "--force"]
)
args = parser.parse_args(["branch", "drop", "feature/tmp-branch", "--force"])
self.assertTrue(args.force)

View File

@@ -83,7 +83,10 @@ class TestInstallReposIntegration(unittest.TestCase):
selected_repos = [repo_system, repo_nix]
all_repos = selected_repos
with tempfile.TemporaryDirectory() as tmp_base, tempfile.TemporaryDirectory() as tmp_bin:
with (
tempfile.TemporaryDirectory() as tmp_base,
tempfile.TemporaryDirectory() as tmp_bin,
):
# Fake repo directories (what get_repo_dir will return)
repo_system_dir = os.path.join(tmp_base, "repo-system")
repo_nix_dir = os.path.join(tmp_base, "repo-nix")
@@ -103,11 +106,12 @@ class TestInstallReposIntegration(unittest.TestCase):
# Patch resolve_command_for_repo at the *pipeline* module level,
# because InstallationPipeline imports it there.
with patch(
"pkgmgr.actions.install.pipeline.resolve_command_for_repo"
) as mock_resolve, patch(
"pkgmgr.actions.install.os.path.exists"
) as mock_exists_install:
with (
patch(
"pkgmgr.actions.install.pipeline.resolve_command_for_repo"
) as mock_resolve,
patch("pkgmgr.actions.install.os.path.exists") as mock_exists_install,
):
def fake_resolve(repo, repo_identifier: str, repo_dir: str):
"""

View File

@@ -29,7 +29,9 @@ from unittest.mock import MagicMock, PropertyMock, patch
class TestIntegrationMirrorCommands(unittest.TestCase):
"""Integration tests for `pkgmgr mirror` commands."""
def _run_pkgmgr(self, args: List[str], extra_env: Optional[Dict[str, str]] = None) -> str:
def _run_pkgmgr(
self, args: List[str], extra_env: Optional[Dict[str, str]] = None
) -> str:
"""Execute pkgmgr with the given arguments and return captured output."""
original_argv = list(sys.argv)
original_env = dict(os.environ)
@@ -80,20 +82,65 @@ class TestIntegrationMirrorCommands(unittest.TestCase):
with ExitStack() as stack:
# build_context is imported directly in these modules:
stack.enter_context(_p("pkgmgr.actions.mirror.list_cmd.build_context", return_value=dummy_ctx))
stack.enter_context(_p("pkgmgr.actions.mirror.diff_cmd.build_context", return_value=dummy_ctx))
stack.enter_context(_p("pkgmgr.actions.mirror.merge_cmd.build_context", return_value=dummy_ctx))
stack.enter_context(_p("pkgmgr.actions.mirror.setup_cmd.build_context", return_value=dummy_ctx))
stack.enter_context(_p("pkgmgr.actions.mirror.remote_provision.build_context", return_value=dummy_ctx))
stack.enter_context(
_p(
"pkgmgr.actions.mirror.list_cmd.build_context",
return_value=dummy_ctx,
)
)
stack.enter_context(
_p(
"pkgmgr.actions.mirror.diff_cmd.build_context",
return_value=dummy_ctx,
)
)
stack.enter_context(
_p(
"pkgmgr.actions.mirror.merge_cmd.build_context",
return_value=dummy_ctx,
)
)
stack.enter_context(
_p(
"pkgmgr.actions.mirror.setup_cmd.build_context",
return_value=dummy_ctx,
)
)
stack.enter_context(
_p(
"pkgmgr.actions.mirror.remote_provision.build_context",
return_value=dummy_ctx,
)
)
# Deterministic remote probing (new refactor: probe_remote_reachable)
stack.enter_context(_p("pkgmgr.core.git.queries.probe_remote_reachable", return_value=True))
stack.enter_context(_p("pkgmgr.actions.mirror.setup_cmd.probe_remote_reachable", return_value=True))
stack.enter_context(
_p(
"pkgmgr.core.git.queries.probe_remote_reachable",
return_value=True,
)
)
stack.enter_context(
_p(
"pkgmgr.actions.mirror.setup_cmd.probe_remote_reachable",
return_value=True,
)
)
# setup_cmd imports ensure_origin_remote directly:
stack.enter_context(_p("pkgmgr.actions.mirror.setup_cmd.ensure_origin_remote", return_value=None))
stack.enter_context(
_p(
"pkgmgr.actions.mirror.setup_cmd.ensure_origin_remote",
return_value=None,
)
)
# Extra safety: if any code calls git_remote.ensure_origin_remote directly
stack.enter_context(_p("pkgmgr.actions.mirror.git_remote.ensure_origin_remote", return_value=None))
stack.enter_context(
_p(
"pkgmgr.actions.mirror.git_remote.ensure_origin_remote",
return_value=None,
)
)
# remote provisioning: remote_provision imports ensure_remote_repo directly from core:
stack.enter_context(
@@ -135,8 +182,12 @@ class TestIntegrationMirrorCommands(unittest.TestCase):
self.assertTrue(output.strip(), "Expected output from mirror diff")
def test_mirror_merge_config_to_file_preview_all(self) -> None:
output = self._run_pkgmgr(["mirror", "merge", "config", "file", "--preview", "--all"])
self.assertTrue(output.strip(), "Expected output from mirror merge (config -> file)")
output = self._run_pkgmgr(
["mirror", "merge", "config", "file", "--preview", "--all"]
)
self.assertTrue(
output.strip(), "Expected output from mirror merge (config -> file)"
)
def test_mirror_setup_preview_all(self) -> None:
output = self._run_pkgmgr(["mirror", "setup", "--preview", "--all"])
@@ -148,7 +199,9 @@ class TestIntegrationMirrorCommands(unittest.TestCase):
def test_mirror_provision_preview_all(self) -> None:
output = self._run_pkgmgr(["mirror", "provision", "--preview", "--all"])
self.assertTrue(output.strip(), "Expected output from mirror provision (preview)")
self.assertTrue(
output.strip(), "Expected output from mirror provision (preview)"
)
if __name__ == "__main__":

View File

@@ -10,6 +10,7 @@ class FakeRunResult:
"""
Mimics your runner returning a structured result object.
"""
returncode: int
stdout: str
stderr: str = ""
@@ -19,6 +20,7 @@ class FakeRunner:
"""
Minimal runner stub: returns exactly what we configure.
"""
def __init__(self, result):
self._result = result
@@ -37,26 +39,34 @@ class TestE2ENixProfileListJsonParsing(unittest.TestCase):
def test_list_json_accepts_raw_string(self) -> None:
from pkgmgr.actions.install.installers.nix.profile import NixProfileInspector
payload = {"elements": {"pkgmgr-1": {"attrPath": "packages.x86_64-linux.pkgmgr"}}}
payload = {
"elements": {"pkgmgr-1": {"attrPath": "packages.x86_64-linux.pkgmgr"}}
}
raw = json.dumps(payload)
runner = FakeRunner(raw)
inspector = NixProfileInspector()
data = inspector.list_json(ctx=None, runner=runner)
self.assertEqual(data["elements"]["pkgmgr-1"]["attrPath"], "packages.x86_64-linux.pkgmgr")
self.assertEqual(
data["elements"]["pkgmgr-1"]["attrPath"], "packages.x86_64-linux.pkgmgr"
)
def test_list_json_accepts_runresult_object(self) -> None:
from pkgmgr.actions.install.installers.nix.profile import NixProfileInspector
payload = {"elements": {"pkgmgr-1": {"attrPath": "packages.x86_64-linux.pkgmgr"}}}
payload = {
"elements": {"pkgmgr-1": {"attrPath": "packages.x86_64-linux.pkgmgr"}}
}
raw = json.dumps(payload)
runner = FakeRunner(FakeRunResult(returncode=0, stdout=raw))
inspector = NixProfileInspector()
data = inspector.list_json(ctx=None, runner=runner)
self.assertEqual(data["elements"]["pkgmgr-1"]["attrPath"], "packages.x86_64-linux.pkgmgr")
self.assertEqual(
data["elements"]["pkgmgr-1"]["attrPath"], "packages.x86_64-linux.pkgmgr"
)
if __name__ == "__main__":

View File

@@ -74,26 +74,28 @@ class TestRecursiveCapabilitiesIntegration(unittest.TestCase):
patched_installers = []
for label, inst in installers:
def always_supports(self, ctx):
return True
def make_run(label_name: str):
def _run(self, ctx):
called_installers.append(label_name)
return _run
inst.supports = always_supports.__get__(inst, inst.__class__) # type: ignore[assignment]
inst.run = make_run(label).__get__(inst, inst.__class__) # type: ignore[assignment]
patched_installers.append(inst)
with patch.object(install_mod, "INSTALLERS", patched_installers), patch.object(
install_mod, "get_repo_identifier", return_value="dummy-repo"
), patch.object(
install_mod, "get_repo_dir", return_value=repo_dir
), patch.object(
install_mod, "verify_repository", return_value=(True, [], None, None)
), patch.object(
install_mod, "clone_repos"
with (
patch.object(install_mod, "INSTALLERS", patched_installers),
patch.object(install_mod, "get_repo_identifier", return_value="dummy-repo"),
patch.object(install_mod, "get_repo_dir", return_value=repo_dir),
patch.object(
install_mod, "verify_repository", return_value=(True, [], None, None)
),
patch.object(install_mod, "clone_repos"),
):
install_repos(
selected_repos=selected_repos,

View File

@@ -29,10 +29,12 @@ class TestIntegrationReleasePublishHook(unittest.TestCase):
# Go through real parser to ensure CLI surface is wired correctly
args = self._parse(["release", "patch"])
with patch("pkgmgr.cli.commands.release.run_release") as m_release, patch(
"pkgmgr.cli.commands.release.run_publish"
) as m_publish, patch(
"pkgmgr.cli.commands.release.sys.stdin.isatty", return_value=False
with (
patch("pkgmgr.cli.commands.release.run_release") as m_release,
patch("pkgmgr.cli.commands.release.run_publish") as m_publish,
patch(
"pkgmgr.cli.commands.release.sys.stdin.isatty", return_value=False
),
):
handle_release(args=args, ctx=self._ctx(), selected=selected)
@@ -53,9 +55,10 @@ class TestIntegrationReleasePublishHook(unittest.TestCase):
args = self._parse(["release", "patch", "--no-publish"])
with patch("pkgmgr.cli.commands.release.run_release") as m_release, patch(
"pkgmgr.cli.commands.release.run_publish"
) as m_publish:
with (
patch("pkgmgr.cli.commands.release.run_release") as m_release,
patch("pkgmgr.cli.commands.release.run_publish") as m_publish,
):
handle_release(args=args, ctx=self._ctx(), selected=selected)
m_release.assert_called_once()

View File

@@ -24,7 +24,10 @@ class TestIntegrationReposCreatePreview(unittest.TestCase):
repositories_base_dir="/tmp/Repositories",
binaries_dir="/tmp/bin",
all_repositories=[],
config_merged={"directories": {"repositories": "/tmp/Repositories"}, "repositories": []},
config_merged={
"directories": {"repositories": "/tmp/Repositories"},
"repositories": [],
},
user_config_path="/tmp/user.yml",
)

View File

@@ -18,9 +18,13 @@ def _find_repo_root() -> Path:
"""
here = Path(__file__).resolve()
for parent in here.parents:
if (parent / "pyproject.toml").is_file() and (parent / "src" / "pkgmgr").is_dir():
if (parent / "pyproject.toml").is_file() and (
parent / "src" / "pkgmgr"
).is_dir():
return parent
raise RuntimeError("Could not determine repository root for pkgmgr integration test")
raise RuntimeError(
"Could not determine repository root for pkgmgr integration test"
)
class TestRepositoryPathsExist(unittest.TestCase):

View File

@@ -25,7 +25,6 @@ class TestTokenResolverIntegration(unittest.TestCase):
# 1) ENV: empty
# ------------------------------------------------------------------
with patch.dict("os.environ", {}, clear=True):
# ------------------------------------------------------------------
# 2) GH CLI is available
# ------------------------------------------------------------------
@@ -37,14 +36,12 @@ class TestTokenResolverIntegration(unittest.TestCase):
"pkgmgr.core.credentials.providers.gh.subprocess.check_output",
return_value="gh-invalid-token\n",
):
# ------------------------------------------------------------------
# 3) Keyring returns an existing (invalid) token
# ------------------------------------------------------------------
with patch(
"pkgmgr.core.credentials.providers.keyring._import_keyring"
) as mock_import_keyring:
mock_keyring = mock_import_keyring.return_value
mock_keyring.get_password.return_value = "keyring-invalid-token"
@@ -59,7 +56,6 @@ class TestTokenResolverIntegration(unittest.TestCase):
"pkgmgr.core.credentials.providers.prompt.getpass",
return_value="new-valid-token",
):
# ------------------------------------------------------------------
# 5) Validation logic:
# - gh token invalid
@@ -77,7 +73,6 @@ class TestTokenResolverIntegration(unittest.TestCase):
"pkgmgr.core.credentials.resolver.validate_token",
side_effect=validate_side_effect,
) as validate_mock:
result = resolver.get_token(
provider_kind="github",
host="github.com",

View File

@@ -46,14 +46,22 @@ class TestUpdateSilentContinues(unittest.TestCase):
def install_side_effect(selected_repos, *_args, **kwargs):
repo = selected_repos[0]
install_calls.append((repo["repository"], kwargs.get("silent"), kwargs.get("emit_summary")))
install_calls.append(
(repo["repository"], kwargs.get("silent"), kwargs.get("emit_summary"))
)
if repo["repository"] == "repo-b":
raise SystemExit(3)
return None
# Patch at the exact import locations used inside UpdateManager.run()
with patch("pkgmgr.actions.repository.pull.pull_with_verification", side_effect=pull_side_effect), patch(
"pkgmgr.actions.install.install_repos", side_effect=install_side_effect
with (
patch(
"pkgmgr.actions.repository.pull.pull_with_verification",
side_effect=pull_side_effect,
),
patch(
"pkgmgr.actions.install.install_repos", side_effect=install_side_effect
),
):
# 1) silent=True: should NOT raise (even though failures happened)
UpdateManager().run(
@@ -73,7 +81,9 @@ class TestUpdateSilentContinues(unittest.TestCase):
# Ensure it tried all pulls, and installs happened for B and C only.
self.assertEqual(pull_calls, ["repo-a", "repo-b", "repo-c"])
self.assertEqual([r for r, _silent, _emit in install_calls], ["repo-b", "repo-c"])
self.assertEqual(
[r for r, _silent, _emit in install_calls], ["repo-b", "repo-c"]
)
# Ensure UpdateManager suppressed install summary spam by passing emit_summary=False.
for _repo_name, _silent, emit_summary in install_calls:
@@ -103,7 +113,9 @@ class TestUpdateSilentContinues(unittest.TestCase):
# Still must have processed all repos (continue-on-failure behavior).
self.assertEqual(pull_calls, ["repo-a", "repo-b", "repo-c"])
self.assertEqual([r for r, _silent, _emit in install_calls], ["repo-b", "repo-c"])
self.assertEqual(
[r for r, _silent, _emit in install_calls], ["repo-b", "repo-c"]
)
if __name__ == "__main__":

View File

@@ -8,8 +8,13 @@ from pkgmgr.core.git.commands import GitDeleteRemoteBranchError
class TestCloseBranch(unittest.TestCase):
@patch("builtins.input", return_value="y")
@patch("pkgmgr.actions.branch.close_branch.get_current_branch", return_value="feature-x")
@patch("pkgmgr.actions.branch.close_branch.resolve_base_branch", return_value="main")
@patch(
"pkgmgr.actions.branch.close_branch.get_current_branch",
return_value="feature-x",
)
@patch(
"pkgmgr.actions.branch.close_branch.resolve_base_branch", return_value="main"
)
@patch("pkgmgr.actions.branch.close_branch.fetch")
@patch("pkgmgr.actions.branch.close_branch.checkout")
@patch("pkgmgr.actions.branch.close_branch.pull")
@@ -40,22 +45,36 @@ class TestCloseBranch(unittest.TestCase):
delete_remote_branch.assert_called_once_with("origin", "feature-x", cwd=".")
@patch("pkgmgr.actions.branch.close_branch.get_current_branch", return_value="main")
@patch("pkgmgr.actions.branch.close_branch.resolve_base_branch", return_value="main")
@patch(
"pkgmgr.actions.branch.close_branch.resolve_base_branch", return_value="main"
)
def test_refuses_to_close_base_branch(self, _resolve, _current) -> None:
with self.assertRaises(RuntimeError):
close_branch(None)
@patch("builtins.input", return_value="n")
@patch("pkgmgr.actions.branch.close_branch.get_current_branch", return_value="feature-x")
@patch("pkgmgr.actions.branch.close_branch.resolve_base_branch", return_value="main")
@patch(
"pkgmgr.actions.branch.close_branch.get_current_branch",
return_value="feature-x",
)
@patch(
"pkgmgr.actions.branch.close_branch.resolve_base_branch", return_value="main"
)
@patch("pkgmgr.actions.branch.close_branch.fetch")
def test_close_branch_aborts_on_no(self, fetch, _resolve, _current, _input_mock) -> None:
def test_close_branch_aborts_on_no(
self, fetch, _resolve, _current, _input_mock
) -> None:
close_branch(None, cwd=".")
fetch.assert_not_called()
@patch("builtins.input")
@patch("pkgmgr.actions.branch.close_branch.get_current_branch", return_value="feature-x")
@patch("pkgmgr.actions.branch.close_branch.resolve_base_branch", return_value="main")
@patch(
"pkgmgr.actions.branch.close_branch.get_current_branch",
return_value="feature-x",
)
@patch(
"pkgmgr.actions.branch.close_branch.resolve_base_branch", return_value="main"
)
@patch("pkgmgr.actions.branch.close_branch.fetch")
@patch("pkgmgr.actions.branch.close_branch.checkout")
@patch("pkgmgr.actions.branch.close_branch.pull")
@@ -90,14 +109,22 @@ class TestCloseBranch(unittest.TestCase):
delete_local_branch.assert_called_once_with("feature-x", cwd=".", force=False)
delete_remote_branch.assert_called_once_with("origin", "feature-x", cwd=".")
@patch("pkgmgr.actions.branch.close_branch.get_current_branch", side_effect=GitRunError("fail"))
@patch(
"pkgmgr.actions.branch.close_branch.get_current_branch",
side_effect=GitRunError("fail"),
)
def test_close_branch_errors_if_cannot_detect_branch(self, _current) -> None:
with self.assertRaises(RuntimeError):
close_branch(None)
@patch("builtins.input", return_value="y")
@patch("pkgmgr.actions.branch.close_branch.get_current_branch", return_value="feature-x")
@patch("pkgmgr.actions.branch.close_branch.resolve_base_branch", return_value="main")
@patch(
"pkgmgr.actions.branch.close_branch.get_current_branch",
return_value="feature-x",
)
@patch(
"pkgmgr.actions.branch.close_branch.resolve_base_branch", return_value="main"
)
@patch("pkgmgr.actions.branch.close_branch.fetch")
@patch("pkgmgr.actions.branch.close_branch.checkout")
@patch("pkgmgr.actions.branch.close_branch.pull")

Some files were not shown because too many files have changed in this diff Show More