Compare commits

..

6 Commits
v1.8.5 ... main

Author SHA1 Message Date
Kevin Veen-Birkenbach
f4339a746a executet 'ruff format --check .'
Some checks are pending
Mark stable commit / test-unit (push) Waiting to run
Mark stable commit / test-integration (push) Waiting to run
Mark stable commit / test-env-virtual (push) Waiting to run
Mark stable commit / test-env-nix (push) Waiting to run
Mark stable commit / test-e2e (push) Waiting to run
Mark stable commit / test-virgin-user (push) Waiting to run
Mark stable commit / test-virgin-root (push) Waiting to run
Mark stable commit / lint-shell (push) Waiting to run
Mark stable commit / lint-python (push) Waiting to run
Mark stable commit / mark-stable (push) Blocked by required conditions
2025-12-18 14:04:44 +01:00
Kevin Veen-Birkenbach
763f02a9a4 Release version 1.8.6
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2025-12-17 23:50:31 +01:00
Kevin Veen-Birkenbach
2eec873a17 Solved Debian Bug
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
https://chatgpt.com/share/69432655-a948-800f-8c0d-353921cdf644
2025-12-17 23:29:04 +01:00
Kevin Veen-Birkenbach
17ee947930 ci: pass NIX_CONFIG with GitHub token into all test containers
- Add NIX_CONFIG with GitHub access token to all CI test workflows
- Export NIX_CONFIG in Makefile for propagation to test scripts
- Forward NIX_CONFIG explicitly into all docker run invocations
- Prevent GitHub API rate limit errors during Nix-based tests

https://chatgpt.com/share/69432655-a948-800f-8c0d-353921cdf644
2025-12-17 23:29:04 +01:00
Kevin Veen-Birkenbach
b989bdd4eb Release version 1.8.5 2025-12-17 23:29:04 +01:00
Kevin Veen-Birkenbach
c4da8368d8 --- Release Error --- 2025-12-17 23:28:45 +01:00
174 changed files with 1377 additions and 651 deletions

View File

@@ -11,7 +11,9 @@ jobs:
fail-fast: false fail-fast: false
matrix: matrix:
distro: [arch, debian, ubuntu, fedora, centos] distro: [arch, debian, ubuntu, fedora, centos]
env:
NIX_CONFIG: |
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v4 uses: actions/checkout@v4

View File

@@ -12,7 +12,9 @@ jobs:
fail-fast: false fail-fast: false
matrix: matrix:
distro: [arch, debian, ubuntu, fedora, centos] distro: [arch, debian, ubuntu, fedora, centos]
env:
NIX_CONFIG: |
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v4 uses: actions/checkout@v4

View File

@@ -11,7 +11,9 @@ jobs:
fail-fast: false fail-fast: false
matrix: matrix:
distro: [arch, debian, ubuntu, fedora, centos] distro: [arch, debian, ubuntu, fedora, centos]
env:
NIX_CONFIG: |
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v4 uses: actions/checkout@v4

View File

@@ -7,7 +7,9 @@ jobs:
test-integration: test-integration:
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 30 timeout-minutes: 30
env:
NIX_CONFIG: |
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v4 uses: actions/checkout@v4

View File

@@ -7,7 +7,9 @@ jobs:
test-unit: test-unit:
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 30 timeout-minutes: 30
env:
NIX_CONFIG: |
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v4 uses: actions/checkout@v4

View File

@@ -11,7 +11,9 @@ jobs:
fail-fast: false fail-fast: false
matrix: matrix:
distro: [arch, debian, ubuntu, fedora, centos] distro: [arch, debian, ubuntu, fedora, centos]
env:
NIX_CONFIG: |
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v4 uses: actions/checkout@v4
@@ -19,13 +21,11 @@ jobs:
- name: Show Docker version - name: Show Docker version
run: docker version run: docker version
# 🔹 BUILD virgin image if missing
- name: Build virgin container (${{ matrix.distro }}) - name: Build virgin container (${{ matrix.distro }})
run: | run: |
set -euo pipefail set -euo pipefail
PKGMGR_DISTRO="${{ matrix.distro }}" make build-missing-virgin PKGMGR_DISTRO="${{ matrix.distro }}" make build-missing-virgin
# 🔹 RUN test inside virgin image
- name: Virgin ${{ matrix.distro }} pkgmgr test (root) - name: Virgin ${{ matrix.distro }} pkgmgr test (root)
run: | run: |
set -euo pipefail set -euo pipefail
@@ -34,6 +34,7 @@ jobs:
-v "$PWD":/opt/src/pkgmgr \ -v "$PWD":/opt/src/pkgmgr \
-v pkgmgr_repos:/root/Repositories \ -v pkgmgr_repos:/root/Repositories \
-v pkgmgr_pip_cache:/root/.cache/pip \ -v pkgmgr_pip_cache:/root/.cache/pip \
-e NIX_CONFIG="${NIX_CONFIG}" \
-w /opt/src/pkgmgr \ -w /opt/src/pkgmgr \
"pkgmgr-${{ matrix.distro }}-virgin" \ "pkgmgr-${{ matrix.distro }}-virgin" \
bash -lc ' bash -lc '

View File

@@ -11,7 +11,9 @@ jobs:
fail-fast: false fail-fast: false
matrix: matrix:
distro: [arch, debian, ubuntu, fedora, centos] distro: [arch, debian, ubuntu, fedora, centos]
env:
NIX_CONFIG: |
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v4 uses: actions/checkout@v4
@@ -19,19 +21,18 @@ jobs:
- name: Show Docker version - name: Show Docker version
run: docker version run: docker version
# 🔹 BUILD virgin image if missing
- name: Build virgin container (${{ matrix.distro }}) - name: Build virgin container (${{ matrix.distro }})
run: | run: |
set -euo pipefail set -euo pipefail
PKGMGR_DISTRO="${{ matrix.distro }}" make build-missing-virgin PKGMGR_DISTRO="${{ matrix.distro }}" make build-missing-virgin
# 🔹 RUN test inside virgin image as non-root
- name: Virgin ${{ matrix.distro }} pkgmgr test (user) - name: Virgin ${{ matrix.distro }} pkgmgr test (user)
run: | run: |
set -euo pipefail set -euo pipefail
docker run --rm \ docker run --rm \
-v "$PWD":/opt/src/pkgmgr \ -v "$PWD":/opt/src/pkgmgr \
-e NIX_CONFIG="${NIX_CONFIG}" \
-w /opt/src/pkgmgr \ -w /opt/src/pkgmgr \
"pkgmgr-${{ matrix.distro }}-virgin" \ "pkgmgr-${{ matrix.distro }}-virgin" \
bash -lc ' bash -lc '

View File

@@ -1,3 +1,8 @@
## [1.8.6] - 2025-12-17
* Prevent Rate Limits during GitHub Nix Setups
## [1.8.5] - 2025-12-17 ## [1.8.5] - 2025-12-17
* * Clearer Git error handling, especially when a directory is not a Git repository. * * Clearer Git error handling, especially when a directory is not a Git repository.

View File

@@ -10,6 +10,10 @@ DISTROS ?= arch debian ubuntu fedora centos
PKGMGR_DISTRO ?= arch PKGMGR_DISTRO ?= arch
export PKGMGR_DISTRO export PKGMGR_DISTRO
# Nix Config Variable (To avoid rate limit)
NIX_CONFIG ?=
export NIX_CONFIG
# ------------------------------------------------------------ # ------------------------------------------------------------
# Base images # Base images
# (kept for documentation/reference; actual build logic is in scripts/build) # (kept for documentation/reference; actual build logic is in scripts/build)

View File

@@ -32,7 +32,7 @@
rec { rec {
pkgmgr = pyPkgs.buildPythonApplication { pkgmgr = pyPkgs.buildPythonApplication {
pname = "package-manager"; pname = "package-manager";
version = "1.8.5"; version = "1.8.6";
# Use the git repo as source # Use the git repo as source
src = ./.; src = ./.;

View File

@@ -1,7 +1,7 @@
# Maintainer: Kevin Veen-Birkenbach <info@veen.world> # Maintainer: Kevin Veen-Birkenbach <info@veen.world>
pkgname=package-manager pkgname=package-manager
pkgver=1.8.5 pkgver=1.8.6
pkgrel=1 pkgrel=1
pkgdesc="Local-flake wrapper for Kevin's package-manager (Nix-based)." pkgdesc="Local-flake wrapper for Kevin's package-manager (Nix-based)."
arch=('any') arch=('any')

View File

@@ -1,3 +1,9 @@
package-manager (1.8.6-1) unstable; urgency=medium
* Prevent Rate Limits during GitHub Nix Setups
-- Kevin Veen-Birkenbach <kevin@veen.world> Wed, 17 Dec 2025 23:50:31 +0100
package-manager (1.8.5-1) unstable; urgency=medium package-manager (1.8.5-1) unstable; urgency=medium
* * Clearer Git error handling, especially when a directory is not a Git repository. * * Clearer Git error handling, especially when a directory is not a Git repository.

View File

@@ -1,5 +1,5 @@
Name: package-manager Name: package-manager
Version: 1.8.5 Version: 1.8.6
Release: 1%{?dist} Release: 1%{?dist}
Summary: Wrapper that runs Kevin's package-manager via Nix flake Summary: Wrapper that runs Kevin's package-manager via Nix flake
@@ -74,6 +74,9 @@ echo ">>> package-manager removed. Nix itself was not removed."
/usr/lib/package-manager/ /usr/lib/package-manager/
%changelog %changelog
* Wed Dec 17 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.8.6-1
- Prevent Rate Limits during GitHub Nix Setups
* Wed Dec 17 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.8.5-1 * Wed Dec 17 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.8.5-1
- * Clearer Git error handling, especially when a directory is not a Git repository. - * Clearer Git error handling, especially when a directory is not a Git repository.
* More reliable repository verification with improved commit and GPG signature checks. * More reliable repository verification with improved commit and GPG signature checks.

View File

@@ -7,7 +7,7 @@ build-backend = "setuptools.build_meta"
[project] [project]
name = "kpmx" name = "kpmx"
version = "1.8.5" version = "1.8.6"
description = "Kevin's package-manager tool (pkgmgr)" description = "Kevin's package-manager tool (pkgmgr)"
readme = "README.md" readme = "README.md"
requires-python = ">=3.9" requires-python = ">=3.9"

View File

@@ -11,6 +11,7 @@ docker run --rm \
-v "pkgmgr_nix_cache_${PKGMGR_DISTRO}:/root/.cache/nix" \ -v "pkgmgr_nix_cache_${PKGMGR_DISTRO}:/root/.cache/nix" \
-e REINSTALL_PKGMGR=1 \ -e REINSTALL_PKGMGR=1 \
-e TEST_PATTERN="${TEST_PATTERN}" \ -e TEST_PATTERN="${TEST_PATTERN}" \
-e NIX_CONFIG="${NIX_CONFIG}" \
--workdir /opt/src/pkgmgr \ --workdir /opt/src/pkgmgr \
"pkgmgr-${PKGMGR_DISTRO}" \ "pkgmgr-${PKGMGR_DISTRO}" \
bash -lc ' bash -lc '

View File

@@ -14,6 +14,7 @@ docker run --rm \
-v "pkgmgr_nix_cache_${PKGMGR_DISTRO}:/root/.cache/nix" \ -v "pkgmgr_nix_cache_${PKGMGR_DISTRO}:/root/.cache/nix" \
--workdir /opt/src/pkgmgr \ --workdir /opt/src/pkgmgr \
-e REINSTALL_PKGMGR=1 \ -e REINSTALL_PKGMGR=1 \
-e NIX_CONFIG="${NIX_CONFIG}" \
"${IMAGE}" \ "${IMAGE}" \
bash -lc ' bash -lc '
set -euo pipefail set -euo pipefail

View File

@@ -19,6 +19,7 @@ if OUTPUT=$(docker run --rm \
-e REINSTALL_PKGMGR=1 \ -e REINSTALL_PKGMGR=1 \
-v "$(pwd):/opt/src/pkgmgr" \ -v "$(pwd):/opt/src/pkgmgr" \
-w /opt/src/pkgmgr \ -w /opt/src/pkgmgr \
-e NIX_CONFIG="${NIX_CONFIG}" \
"${IMAGE}" \ "${IMAGE}" \
bash -lc ' bash -lc '
set -euo pipefail set -euo pipefail

View File

@@ -12,6 +12,7 @@ docker run --rm \
--workdir /opt/src/pkgmgr \ --workdir /opt/src/pkgmgr \
-e REINSTALL_PKGMGR=1 \ -e REINSTALL_PKGMGR=1 \
-e TEST_PATTERN="${TEST_PATTERN}" \ -e TEST_PATTERN="${TEST_PATTERN}" \
-e NIX_CONFIG="${NIX_CONFIG}" \
"pkgmgr-${PKGMGR_DISTRO}" \ "pkgmgr-${PKGMGR_DISTRO}" \
bash -lc ' bash -lc '
set -e; set -e;

View File

@@ -12,6 +12,7 @@ docker run --rm \
--workdir /opt/src/pkgmgr \ --workdir /opt/src/pkgmgr \
-e REINSTALL_PKGMGR=1 \ -e REINSTALL_PKGMGR=1 \
-e TEST_PATTERN="${TEST_PATTERN}" \ -e TEST_PATTERN="${TEST_PATTERN}" \
-e NIX_CONFIG="${NIX_CONFIG}" \
"pkgmgr-${PKGMGR_DISTRO}" \ "pkgmgr-${PKGMGR_DISTRO}" \
bash -lc ' bash -lc '
set -e; set -e;

View File

@@ -25,12 +25,12 @@ __all__ = ["cli"]
def __getattr__(name: str) -> Any: def __getattr__(name: str) -> Any:
""" """
Lazily expose ``pkgmgr.cli`` as attribute on the top-level package. Lazily expose ``pkgmgr.cli`` as attribute on the top-level package.
This keeps ``import pkgmgr`` lightweight while still allowing This keeps ``import pkgmgr`` lightweight while still allowing
``from pkgmgr import cli`` in tests and entry points. ``from pkgmgr import cli`` in tests and entry points.
""" """
if name == "cli": if name == "cli":
return import_module("pkgmgr.cli") return import_module("pkgmgr.cli")
raise AttributeError(f"module 'pkgmgr' has no attribute {name!r}") raise AttributeError(f"module 'pkgmgr' has no attribute {name!r}")

View File

@@ -3,4 +3,4 @@ from __future__ import annotations
# expose subpackages for patch() / resolve_name() friendliness # expose subpackages for patch() / resolve_name() friendliness
from . import release as release # noqa: F401 from . import release as release # noqa: F401
__all__ = ["release"] __all__ = ["release"]

View File

@@ -48,9 +48,13 @@ def close_branch(
# Confirmation # Confirmation
if not force: if not force:
answer = input( answer = (
f"Merge branch '{name}' into '{target_base}' and delete it afterwards? (y/N): " input(
).strip().lower() f"Merge branch '{name}' into '{target_base}' and delete it afterwards? (y/N): "
)
.strip()
.lower()
)
if answer != "y": if answer != "y":
print("Aborted closing branch.") print("Aborted closing branch.")
return return

View File

@@ -41,15 +41,19 @@ def drop_branch(
# Confirmation # Confirmation
if not force: if not force:
answer = input( answer = (
f"Delete branch '{name}' locally and on origin? This is destructive! (y/N): " input(
).strip().lower() f"Delete branch '{name}' locally and on origin? This is destructive! (y/N): "
)
.strip()
.lower()
)
if answer != "y": if answer != "y":
print("Aborted dropping branch.") print("Aborted dropping branch.")
return return
delete_local_branch(name, cwd=cwd, force=False) delete_local_branch(name, cwd=cwd, force=False)
# Remote delete (special-case message) # Remote delete (special-case message)
try: try:
delete_remote_branch("origin", name, cwd=cwd) delete_remote_branch("origin", name, cwd=cwd)

View File

@@ -1,15 +1,18 @@
import yaml import yaml
import os import os
from pkgmgr.core.config.save import save_user_config from pkgmgr.core.config.save import save_user_config
def interactive_add(config,USER_CONFIG_PATH:str):
def interactive_add(config, USER_CONFIG_PATH: str):
"""Interactively prompt the user to add a new repository entry to the user config.""" """Interactively prompt the user to add a new repository entry to the user config."""
print("Adding a new repository configuration entry.") print("Adding a new repository configuration entry.")
new_entry = {} new_entry = {}
new_entry["provider"] = input("Provider (e.g., github.com): ").strip() new_entry["provider"] = input("Provider (e.g., github.com): ").strip()
new_entry["account"] = input("Account (e.g., yourusername): ").strip() new_entry["account"] = input("Account (e.g., yourusername): ").strip()
new_entry["repository"] = input("Repository name (e.g., mytool): ").strip() new_entry["repository"] = input("Repository name (e.g., mytool): ").strip()
new_entry["command"] = input("Command (optional, leave blank to auto-detect): ").strip() new_entry["command"] = input(
"Command (optional, leave blank to auto-detect): "
).strip()
new_entry["description"] = input("Description (optional): ").strip() new_entry["description"] = input("Description (optional): ").strip()
new_entry["replacement"] = input("Replacement (optional): ").strip() new_entry["replacement"] = input("Replacement (optional): ").strip()
new_entry["alias"] = input("Alias (optional): ").strip() new_entry["alias"] = input("Alias (optional): ").strip()
@@ -25,12 +28,12 @@ def interactive_add(config,USER_CONFIG_PATH:str):
confirm = input("Add this entry to user config? (y/N): ").strip().lower() confirm = input("Add this entry to user config? (y/N): ").strip().lower()
if confirm == "y": if confirm == "y":
if os.path.exists(USER_CONFIG_PATH): if os.path.exists(USER_CONFIG_PATH):
with open(USER_CONFIG_PATH, 'r') as f: with open(USER_CONFIG_PATH, "r") as f:
user_config = yaml.safe_load(f) or {} user_config = yaml.safe_load(f) or {}
else: else:
user_config = {"repositories": []} user_config = {"repositories": []}
user_config.setdefault("repositories", []) user_config.setdefault("repositories", [])
user_config["repositories"].append(new_entry) user_config["repositories"].append(new_entry)
save_user_config(user_config,USER_CONFIG_PATH) save_user_config(user_config, USER_CONFIG_PATH)
else: else:
print("Entry not added.") print("Entry not added.")

View File

@@ -107,11 +107,15 @@ def config_init(
# Already known? # Already known?
if key in default_keys: if key in default_keys:
skipped += 1 skipped += 1
print(f"[SKIP] (defaults) {provider}/{account}/{repo_name}") print(
f"[SKIP] (defaults) {provider}/{account}/{repo_name}"
)
continue continue
if key in existing_keys: if key in existing_keys:
skipped += 1 skipped += 1
print(f"[SKIP] (user-config) {provider}/{account}/{repo_name}") print(
f"[SKIP] (user-config) {provider}/{account}/{repo_name}"
)
continue continue
print(f"[ADD] {provider}/{account}/{repo_name}") print(f"[ADD] {provider}/{account}/{repo_name}")
@@ -121,7 +125,9 @@ def config_init(
if verified_commit: if verified_commit:
print(f"[INFO] Latest commit: {verified_commit}") print(f"[INFO] Latest commit: {verified_commit}")
else: else:
print("[WARN] Could not read commit (not a git repo or no commits).") print(
"[WARN] Could not read commit (not a git repo or no commits)."
)
entry: Dict[str, Any] = { entry: Dict[str, Any] = {
"provider": provider, "provider": provider,

View File

@@ -1,6 +1,7 @@
import yaml import yaml
from pkgmgr.core.config.load import load_config from pkgmgr.core.config.load import load_config
def show_config(selected_repos, user_config_path, full_config=False): def show_config(selected_repos, user_config_path, full_config=False):
"""Display configuration for one or more repositories, or the entire merged config.""" """Display configuration for one or more repositories, or the entire merged config."""
if full_config: if full_config:
@@ -8,8 +9,10 @@ def show_config(selected_repos, user_config_path, full_config=False):
print(yaml.dump(merged, default_flow_style=False)) print(yaml.dump(merged, default_flow_style=False))
else: else:
for repo in selected_repos: for repo in selected_repos:
identifier = f'{repo.get("provider")}/{repo.get("account")}/{repo.get("repository")}' identifier = (
f"{repo.get('provider')}/{repo.get('account')}/{repo.get('repository')}"
)
print(f"Repository: {identifier}") print(f"Repository: {identifier}")
for key, value in repo.items(): for key, value in repo.items():
print(f" {key}: {value}") print(f" {key}: {value}")
print("-" * 40) print("-" * 40)

View File

@@ -66,10 +66,7 @@ def _ensure_repo_dir(
repo_dir = get_repo_dir(repositories_base_dir, repo) repo_dir = get_repo_dir(repositories_base_dir, repo)
if not os.path.exists(repo_dir): if not os.path.exists(repo_dir):
print( print(f"Repository directory '{repo_dir}' does not exist. Cloning it now...")
f"Repository directory '{repo_dir}' does not exist. "
"Cloning it now..."
)
clone_repos( clone_repos(
[repo], [repo],
repositories_base_dir, repositories_base_dir,
@@ -79,10 +76,7 @@ def _ensure_repo_dir(
clone_mode, clone_mode,
) )
if not os.path.exists(repo_dir): if not os.path.exists(repo_dir):
print( print(f"Cloning failed for repository {identifier}. Skipping installation.")
f"Cloning failed for repository {identifier}. "
"Skipping installation."
)
return None return None
return repo_dir return repo_dir
@@ -115,7 +109,9 @@ def _verify_repo(
if silent: if silent:
# Non-interactive mode: continue with a warning. # Non-interactive mode: continue with a warning.
print(f"[Warning] Continuing despite verification failure for {identifier} (--silent).") print(
f"[Warning] Continuing despite verification failure for {identifier} (--silent)."
)
else: else:
choice = input("Continue anyway? [y/N]: ").strip().lower() choice = input("Continue anyway? [y/N]: ").strip().lower()
if choice != "y": if choice != "y":
@@ -232,12 +228,16 @@ def install_repos(
code = exc.code if isinstance(exc.code, int) else str(exc.code) code = exc.code if isinstance(exc.code, int) else str(exc.code)
failures.append((identifier, f"installer failed (exit={code})")) failures.append((identifier, f"installer failed (exit={code})"))
if not quiet: if not quiet:
print(f"[Warning] install: repository {identifier} failed (exit={code}). Continuing...") print(
f"[Warning] install: repository {identifier} failed (exit={code}). Continuing..."
)
continue continue
except Exception as exc: except Exception as exc:
failures.append((identifier, f"unexpected error: {exc}")) failures.append((identifier, f"unexpected error: {exc}"))
if not quiet: if not quiet:
print(f"[Warning] install: repository {identifier} hit an unexpected error: {exc}. Continuing...") print(
f"[Warning] install: repository {identifier} hit an unexpected error: {exc}. Continuing..."
)
continue continue
if failures and emit_summary and not quiet: if failures and emit_summary and not quiet:

View File

@@ -14,6 +14,10 @@ from pkgmgr.actions.install.installers.python import PythonInstaller # noqa: F4
from pkgmgr.actions.install.installers.makefile import MakefileInstaller # noqa: F401 from pkgmgr.actions.install.installers.makefile import MakefileInstaller # noqa: F401
# OS-specific installers # OS-specific installers
from pkgmgr.actions.install.installers.os_packages.arch_pkgbuild import ArchPkgbuildInstaller # noqa: F401 from pkgmgr.actions.install.installers.os_packages.arch_pkgbuild import (
from pkgmgr.actions.install.installers.os_packages.debian_control import DebianControlInstaller # noqa: F401 ArchPkgbuildInstaller,
) # noqa: F401
from pkgmgr.actions.install.installers.os_packages.debian_control import (
DebianControlInstaller,
) # noqa: F401
from pkgmgr.actions.install.installers.os_packages.rpm_spec import RpmSpecInstaller # noqa: F401 from pkgmgr.actions.install.installers.os_packages.rpm_spec import RpmSpecInstaller # noqa: F401

View File

@@ -41,7 +41,9 @@ class BaseInstaller(ABC):
return caps return caps
for matcher in CAPABILITY_MATCHERS: for matcher in CAPABILITY_MATCHERS:
if matcher.applies_to_layer(self.layer) and matcher.is_provided(ctx, self.layer): if matcher.applies_to_layer(self.layer) and matcher.is_provided(
ctx, self.layer
):
caps.add(matcher.name) caps.add(matcher.name)
return caps return caps

View File

@@ -16,7 +16,9 @@ class MakefileInstaller(BaseInstaller):
def supports(self, ctx: RepoContext) -> bool: def supports(self, ctx: RepoContext) -> bool:
if os.environ.get("PKGMGR_DISABLE_MAKEFILE_INSTALLER") == "1": if os.environ.get("PKGMGR_DISABLE_MAKEFILE_INSTALLER") == "1":
if not ctx.quiet: if not ctx.quiet:
print("[INFO] PKGMGR_DISABLE_MAKEFILE_INSTALLER=1 skipping MakefileInstaller.") print(
"[INFO] PKGMGR_DISABLE_MAKEFILE_INSTALLER=1 skipping MakefileInstaller."
)
return False return False
makefile_path = os.path.join(ctx.repo_dir, self.MAKEFILE_NAME) makefile_path = os.path.join(ctx.repo_dir, self.MAKEFILE_NAME)
@@ -46,7 +48,9 @@ class MakefileInstaller(BaseInstaller):
return return
if not ctx.quiet: if not ctx.quiet:
print(f"[pkgmgr] Running make install for {ctx.identifier} (MakefileInstaller)") print(
f"[pkgmgr] Running make install for {ctx.identifier} (MakefileInstaller)"
)
run_command("make install", cwd=ctx.repo_dir, preview=ctx.preview) run_command("make install", cwd=ctx.repo_dir, preview=ctx.preview)

View File

@@ -57,7 +57,9 @@ class NixConflictResolver:
# 3) Fallback: output-name based lookup (also covers nix suggesting: `nix profile remove pkgmgr`) # 3) Fallback: output-name based lookup (also covers nix suggesting: `nix profile remove pkgmgr`)
if not tokens: if not tokens:
tokens = self._profile.find_remove_tokens_for_output(ctx, self._runner, output) tokens = self._profile.find_remove_tokens_for_output(
ctx, self._runner, output
)
if tokens: if tokens:
if not quiet: if not quiet:
@@ -94,7 +96,9 @@ class NixConflictResolver:
continue continue
if not quiet: if not quiet:
print("[nix] conflict detected but could not resolve profile entries to remove.") print(
"[nix] conflict detected but could not resolve profile entries to remove."
)
return False return False
return False return False

View File

@@ -75,7 +75,9 @@ class NixFlakeInstaller(BaseInstaller):
# Core install path # Core install path
# --------------------------------------------------------------------- # ---------------------------------------------------------------------
def _install_only(self, ctx: "RepoContext", output: str, allow_failure: bool) -> None: def _install_only(
self, ctx: "RepoContext", output: str, allow_failure: bool
) -> None:
install_cmd = f"nix profile install {self._installable(ctx, output)}" install_cmd = f"nix profile install {self._installable(ctx, output)}"
if not ctx.quiet: if not ctx.quiet:
@@ -96,7 +98,9 @@ class NixFlakeInstaller(BaseInstaller):
output=output, output=output,
): ):
if not ctx.quiet: if not ctx.quiet:
print(f"[nix] output '{output}' successfully installed after conflict cleanup.") print(
f"[nix] output '{output}' successfully installed after conflict cleanup."
)
return return
if not ctx.quiet: if not ctx.quiet:
@@ -107,20 +111,26 @@ class NixFlakeInstaller(BaseInstaller):
# If indices are supported, try legacy index-upgrade path. # If indices are supported, try legacy index-upgrade path.
if self._indices_supported is not False: if self._indices_supported is not False:
indices = self._profile.find_installed_indices_for_output(ctx, self._runner, output) indices = self._profile.find_installed_indices_for_output(
ctx, self._runner, output
)
upgraded = False upgraded = False
for idx in indices: for idx in indices:
if self._upgrade_index(ctx, idx): if self._upgrade_index(ctx, idx):
upgraded = True upgraded = True
if not ctx.quiet: if not ctx.quiet:
print(f"[nix] output '{output}' successfully upgraded (index {idx}).") print(
f"[nix] output '{output}' successfully upgraded (index {idx})."
)
if upgraded: if upgraded:
return return
if indices and not ctx.quiet: if indices and not ctx.quiet:
print(f"[nix] upgrade failed; removing indices {indices} and reinstalling '{output}'.") print(
f"[nix] upgrade failed; removing indices {indices} and reinstalling '{output}'."
)
for idx in indices: for idx in indices:
self._remove_index(ctx, idx) self._remove_index(ctx, idx)
@@ -139,7 +149,9 @@ class NixFlakeInstaller(BaseInstaller):
print(f"[nix] output '{output}' successfully re-installed.") print(f"[nix] output '{output}' successfully re-installed.")
return return
print(f"[ERROR] Failed to install Nix flake output '{output}' (exit {final.returncode})") print(
f"[ERROR] Failed to install Nix flake output '{output}' (exit {final.returncode})"
)
if not allow_failure: if not allow_failure:
raise SystemExit(final.returncode) raise SystemExit(final.returncode)
@@ -149,7 +161,9 @@ class NixFlakeInstaller(BaseInstaller):
# force_update path # force_update path
# --------------------------------------------------------------------- # ---------------------------------------------------------------------
def _force_upgrade_output(self, ctx: "RepoContext", output: str, allow_failure: bool) -> None: def _force_upgrade_output(
self, ctx: "RepoContext", output: str, allow_failure: bool
) -> None:
# Prefer token path if indices unsupported (new nix) # Prefer token path if indices unsupported (new nix)
if self._indices_supported is False: if self._indices_supported is False:
self._remove_tokens_for_output(ctx, output) self._remove_tokens_for_output(ctx, output)
@@ -158,14 +172,18 @@ class NixFlakeInstaller(BaseInstaller):
print(f"[nix] output '{output}' successfully upgraded.") print(f"[nix] output '{output}' successfully upgraded.")
return return
indices = self._profile.find_installed_indices_for_output(ctx, self._runner, output) indices = self._profile.find_installed_indices_for_output(
ctx, self._runner, output
)
upgraded_any = False upgraded_any = False
for idx in indices: for idx in indices:
if self._upgrade_index(ctx, idx): if self._upgrade_index(ctx, idx):
upgraded_any = True upgraded_any = True
if not ctx.quiet: if not ctx.quiet:
print(f"[nix] output '{output}' successfully upgraded (index {idx}).") print(
f"[nix] output '{output}' successfully upgraded (index {idx})."
)
if upgraded_any: if upgraded_any:
if not ctx.quiet: if not ctx.quiet:
@@ -173,7 +191,9 @@ class NixFlakeInstaller(BaseInstaller):
return return
if indices and not ctx.quiet: if indices and not ctx.quiet:
print(f"[nix] upgrade failed; removing indices {indices} and reinstalling '{output}'.") print(
f"[nix] upgrade failed; removing indices {indices} and reinstalling '{output}'."
)
for idx in indices: for idx in indices:
self._remove_index(ctx, idx) self._remove_index(ctx, idx)
@@ -223,7 +243,9 @@ class NixFlakeInstaller(BaseInstaller):
return return
if not ctx.quiet: if not ctx.quiet:
print(f"[nix] indices unsupported; removing by token(s): {', '.join(tokens)}") print(
f"[nix] indices unsupported; removing by token(s): {', '.join(tokens)}"
)
for t in tokens: for t in tokens:
self._runner.run(ctx, f"nix profile remove {t}", allow_failure=True) self._runner.run(ctx, f"nix profile remove {t}", allow_failure=True)

View File

@@ -101,7 +101,9 @@ class NixProfileInspector:
data = self.list_json(ctx, runner) data = self.list_json(ctx, runner)
entries = normalize_elements(data) entries = normalize_elements(data)
tokens: List[str] = [out] # critical: matches nix's own suggestion for conflicts tokens: List[str] = [
out
] # critical: matches nix's own suggestion for conflicts
for e in entries: for e in entries:
if entry_matches_output(e, out): if entry_matches_output(e, out):

View File

@@ -48,7 +48,9 @@ class NixProfileListReader:
return uniq return uniq
def indices_matching_store_prefixes(self, ctx: "RepoContext", prefixes: List[str]) -> List[int]: def indices_matching_store_prefixes(
self, ctx: "RepoContext", prefixes: List[str]
) -> List[int]:
prefixes = [self._store_prefix(p) for p in prefixes if p] prefixes = [self._store_prefix(p) for p in prefixes if p]
prefixes = [p for p in prefixes if p] prefixes = [p for p in prefixes if p]
if not prefixes: if not prefixes:

View File

@@ -11,6 +11,7 @@ if TYPE_CHECKING:
from pkgmgr.actions.install.context import RepoContext from pkgmgr.actions.install.context import RepoContext
from .runner import CommandRunner from .runner import CommandRunner
@dataclass(frozen=True) @dataclass(frozen=True)
class RetryPolicy: class RetryPolicy:
max_attempts: int = 7 max_attempts: int = 7
@@ -35,13 +36,19 @@ class GitHubRateLimitRetry:
install_cmd: str, install_cmd: str,
) -> RunResult: ) -> RunResult:
quiet = bool(getattr(ctx, "quiet", False)) quiet = bool(getattr(ctx, "quiet", False))
delays = list(self._fibonacci_backoff(self._policy.base_delay_seconds, self._policy.max_attempts)) delays = list(
self._fibonacci_backoff(
self._policy.base_delay_seconds, self._policy.max_attempts
)
)
last: RunResult | None = None last: RunResult | None = None
for attempt, base_delay in enumerate(delays, start=1): for attempt, base_delay in enumerate(delays, start=1):
if not quiet: if not quiet:
print(f"[nix] attempt {attempt}/{self._policy.max_attempts}: {install_cmd}") print(
f"[nix] attempt {attempt}/{self._policy.max_attempts}: {install_cmd}"
)
res = runner.run(ctx, install_cmd, allow_failure=True) res = runner.run(ctx, install_cmd, allow_failure=True)
last = res last = res
@@ -56,7 +63,9 @@ class GitHubRateLimitRetry:
if attempt >= self._policy.max_attempts: if attempt >= self._policy.max_attempts:
break break
jitter = random.randint(self._policy.jitter_seconds_min, self._policy.jitter_seconds_max) jitter = random.randint(
self._policy.jitter_seconds_min, self._policy.jitter_seconds_max
)
wait_time = base_delay + jitter wait_time = base_delay + jitter
if not quiet: if not quiet:
@@ -67,7 +76,11 @@ class GitHubRateLimitRetry:
time.sleep(wait_time) time.sleep(wait_time)
return last if last is not None else RunResult(returncode=1, stdout="", stderr="nix install retry failed") return (
last
if last is not None
else RunResult(returncode=1, stdout="", stderr="nix install retry failed")
)
@staticmethod @staticmethod
def _is_github_rate_limit_error(text: str) -> bool: def _is_github_rate_limit_error(text: str) -> bool:

View File

@@ -9,6 +9,7 @@ from .types import RunResult
if TYPE_CHECKING: if TYPE_CHECKING:
from pkgmgr.actions.install.context import RepoContext from pkgmgr.actions.install.context import RepoContext
class CommandRunner: class CommandRunner:
""" """
Executes commands (shell=True) inside a repository directory (if provided). Executes commands (shell=True) inside a repository directory (if provided).
@@ -40,7 +41,9 @@ class CommandRunner:
raise raise
return RunResult(returncode=1, stdout="", stderr=str(e)) return RunResult(returncode=1, stdout="", stderr=str(e))
res = RunResult(returncode=p.returncode, stdout=p.stdout or "", stderr=p.stderr or "") res = RunResult(
returncode=p.returncode, stdout=p.stdout or "", stderr=p.stderr or ""
)
if res.returncode != 0 and not quiet: if res.returncode != 0 and not quiet:
self._print_compact_failure(res) self._print_compact_failure(res)

View File

@@ -20,7 +20,9 @@ class NixConflictTextParser:
tokens: List[str] = [] tokens: List[str] = []
for m in pat.finditer(text or ""): for m in pat.finditer(text or ""):
t = (m.group(1) or "").strip() t = (m.group(1) or "").strip()
if (t.startswith("'") and t.endswith("'")) or (t.startswith('"') and t.endswith('"')): if (t.startswith("'") and t.endswith("'")) or (
t.startswith('"') and t.endswith('"')
):
t = t[1:-1] t = t[1:-1]
if t: if t:
tokens.append(t) tokens.append(t)

View File

@@ -14,7 +14,9 @@ class PythonInstaller(BaseInstaller):
def supports(self, ctx: RepoContext) -> bool: def supports(self, ctx: RepoContext) -> bool:
if os.environ.get("PKGMGR_DISABLE_PYTHON_INSTALLER") == "1": if os.environ.get("PKGMGR_DISABLE_PYTHON_INSTALLER") == "1":
print("[INFO] PythonInstaller disabled via PKGMGR_DISABLE_PYTHON_INSTALLER.") print(
"[INFO] PythonInstaller disabled via PKGMGR_DISABLE_PYTHON_INSTALLER."
)
return False return False
return os.path.exists(os.path.join(ctx.repo_dir, "pyproject.toml")) return os.path.exists(os.path.join(ctx.repo_dir, "pyproject.toml"))

View File

@@ -132,7 +132,11 @@ class InstallationPipeline:
continue continue
if not quiet: if not quiet:
if ctx.force_update and state.layer is not None and installer_layer == state.layer: if (
ctx.force_update
and state.layer is not None
and installer_layer == state.layer
):
print( print(
f"[pkgmgr] Running installer {installer.__class__.__name__} " f"[pkgmgr] Running installer {installer.__class__.__name__} "
f"for {identifier} in '{repo_dir}' (upgrade requested)..." f"for {identifier} in '{repo_dir}' (upgrade requested)..."

View File

@@ -16,6 +16,7 @@ from .types import MirrorMap, Repository
# Helpers # Helpers
# ----------------------------------------------------------------------------- # -----------------------------------------------------------------------------
def _repo_key(repo: Repository) -> Tuple[str, str, str]: def _repo_key(repo: Repository) -> Tuple[str, str, str]:
""" """
Normalised key for identifying a repository in config files. Normalised key for identifying a repository in config files.
@@ -47,6 +48,7 @@ def _load_user_config(path: str) -> Dict[str, object]:
# Main merge command # Main merge command
# ----------------------------------------------------------------------------- # -----------------------------------------------------------------------------
def merge_mirrors( def merge_mirrors(
selected_repos: List[Repository], selected_repos: List[Repository],
repositories_base_dir: str, repositories_base_dir: str,

View File

@@ -66,7 +66,9 @@ def _setup_remote_mirrors_for_repo(
# Probe only git URLs (do not try ls-remote against PyPI etc.) # Probe only git URLs (do not try ls-remote against PyPI etc.)
# If there are no mirrors at all, probe the primary git URL. # If there are no mirrors at all, probe the primary git URL.
git_mirrors = {k: v for k, v in ctx.resolved_mirrors.items() if _is_git_remote_url(v)} git_mirrors = {
k: v for k, v in ctx.resolved_mirrors.items() if _is_git_remote_url(v)
}
if not git_mirrors: if not git_mirrors:
primary = determine_primary_remote_url(repo, ctx) primary = determine_primary_remote_url(repo, ctx)

View File

@@ -17,7 +17,7 @@ def hostport_from_git_url(url: str) -> Tuple[str, Optional[str]]:
netloc = netloc.split("@", 1)[1] netloc = netloc.split("@", 1)[1]
if netloc.startswith("[") and "]" in netloc: if netloc.startswith("[") and "]" in netloc:
host = netloc[1:netloc.index("]")] host = netloc[1 : netloc.index("]")]
rest = netloc[netloc.index("]") + 1 :] rest = netloc[netloc.index("]") + 1 :]
port = rest[1:] if rest.startswith(":") else None port = rest[1:] if rest.startswith(":") else None
return host.strip(), (port.strip() if port else None) return host.strip(), (port.strip() if port else None)
@@ -43,7 +43,7 @@ def normalize_provider_host(host: str) -> str:
return "" return ""
if host.startswith("[") and "]" in host: if host.startswith("[") and "]" in host:
host = host[1:host.index("]")] host = host[1 : host.index("]")]
if ":" in host and host.count(":") == 1: if ":" in host and host.count(":") == 1:
host = host.rsplit(":", 1)[0] host = host.rsplit(":", 1)[0]

View File

@@ -4,7 +4,16 @@ from pkgmgr.core.repository.dir import get_repo_dir
from pkgmgr.core.command.run import run_command from pkgmgr.core.command.run import run_command
import sys import sys
def exec_proxy_command(proxy_prefix: str, selected_repos, repositories_base_dir, all_repos, proxy_command: str, extra_args, preview: bool):
def exec_proxy_command(
proxy_prefix: str,
selected_repos,
repositories_base_dir,
all_repos,
proxy_command: str,
extra_args,
preview: bool,
):
"""Execute a given proxy command with extra arguments for each repository.""" """Execute a given proxy command with extra arguments for each repository."""
error_repos = [] error_repos = []
max_exit_code = 0 max_exit_code = 0
@@ -22,7 +31,9 @@ def exec_proxy_command(proxy_prefix: str, selected_repos, repositories_base_dir,
try: try:
run_command(full_cmd, cwd=repo_dir, preview=preview) run_command(full_cmd, cwd=repo_dir, preview=preview)
except SystemExit as e: except SystemExit as e:
print(f"[ERROR] Command failed in {repo_identifier} with exit code {e.code}.") print(
f"[ERROR] Command failed in {repo_identifier} with exit code {e.code}."
)
error_repos.append((repo_identifier, e.code)) error_repos.append((repo_identifier, e.code))
max_exit_code = max(max_exit_code, e.code) max_exit_code = max(max_exit_code, e.code)
@@ -30,4 +41,4 @@ def exec_proxy_command(proxy_prefix: str, selected_repos, repositories_base_dir,
print("\nSummary of failed commands:") print("\nSummary of failed commands:")
for repo_identifier, exit_code in error_repos: for repo_identifier, exit_code in error_repos:
print(f"- {repo_identifier} failed with exit code {exit_code}") print(f"- {repo_identifier} failed with exit code {exit_code}")
sys.exit(max_exit_code) sys.exit(max_exit_code)

View File

@@ -121,7 +121,7 @@ def update_pyproject_version(
pattern = r'^(version\s*=\s*")([^"]+)(")' pattern = r'^(version\s*=\s*")([^"]+)(")'
new_content, count = re.subn( new_content, count = re.subn(
pattern, pattern,
lambda m: f'{m.group(1)}{new_version}{m.group(3)}', lambda m: f"{m.group(1)}{new_version}{m.group(3)}",
content, content,
flags=re.MULTILINE, flags=re.MULTILINE,
) )
@@ -162,7 +162,7 @@ def update_flake_version(
pattern = r'(version\s*=\s*")([^"]+)(")' pattern = r'(version\s*=\s*")([^"]+)(")'
new_content, count = re.subn( new_content, count = re.subn(
pattern, pattern,
lambda m: f'{m.group(1)}{new_version}{m.group(3)}', lambda m: f"{m.group(1)}{new_version}{m.group(3)}",
content, content,
) )

View File

@@ -80,7 +80,9 @@ def is_highest_version_tag(tag: str) -> bool:
return True return True
latest = max(parsed_all) latest = max(parsed_all)
print(f"[INFO] Latest tag (parsed): v{'.'.join(map(str, latest))}, Current tag: {tag}") print(
f"[INFO] Latest tag (parsed): v{'.'.join(map(str, latest))}, Current tag: {tag}"
)
return parsed_current >= latest return parsed_current >= latest
@@ -93,7 +95,9 @@ def update_latest_tag(new_tag: str, *, preview: bool = False) -> None:
- 'latest' is forced (floating tag), therefore the push uses --force. - 'latest' is forced (floating tag), therefore the push uses --force.
""" """
target_ref = f"{new_tag}^{{}}" target_ref = f"{new_tag}^{{}}"
print(f"[INFO] Updating 'latest' tag to point at {new_tag} (commit {target_ref})...") print(
f"[INFO] Updating 'latest' tag to point at {new_tag} (commit {target_ref})..."
)
tag_force_annotated( tag_force_annotated(
name="latest", name="latest",

View File

@@ -76,7 +76,9 @@ def _release_impl(
if paths.arch_pkgbuild: if paths.arch_pkgbuild:
update_pkgbuild_version(paths.arch_pkgbuild, new_ver_str, preview=preview) update_pkgbuild_version(paths.arch_pkgbuild, new_ver_str, preview=preview)
else: else:
print("[INFO] No PKGBUILD found (packaging/arch/PKGBUILD or PKGBUILD). Skipping.") print(
"[INFO] No PKGBUILD found (packaging/arch/PKGBUILD or PKGBUILD). Skipping."
)
if paths.rpm_spec: if paths.rpm_spec:
update_spec_version(paths.rpm_spec, new_ver_str, preview=preview) update_spec_version(paths.rpm_spec, new_ver_str, preview=preview)
@@ -123,7 +125,9 @@ def _release_impl(
paths.rpm_spec, paths.rpm_spec,
paths.debian_changelog, paths.debian_changelog,
] ]
existing_files = [p for p in files_to_add if isinstance(p, str) and p and os.path.exists(p)] existing_files = [
p for p in files_to_add if isinstance(p, str) and p and os.path.exists(p)
]
if preview: if preview:
add(existing_files, preview=True) add(existing_files, preview=True)
@@ -135,13 +139,17 @@ def _release_impl(
if is_highest_version_tag(new_tag): if is_highest_version_tag(new_tag):
update_latest_tag(new_tag, preview=True) update_latest_tag(new_tag, preview=True)
else: else:
print(f"[PREVIEW] Skipping 'latest' update (tag {new_tag} is not the highest).") print(
f"[PREVIEW] Skipping 'latest' update (tag {new_tag} is not the highest)."
)
if close and branch not in ("main", "master"): if close and branch not in ("main", "master"):
if force: if force:
print(f"[PREVIEW] Would delete branch {branch} (forced).") print(f"[PREVIEW] Would delete branch {branch} (forced).")
else: else:
print(f"[PREVIEW] Would ask whether to delete branch {branch} after release.") print(
f"[PREVIEW] Would ask whether to delete branch {branch} after release."
)
return return
add(existing_files, preview=False) add(existing_files, preview=False)
@@ -157,7 +165,9 @@ def _release_impl(
if is_highest_version_tag(new_tag): if is_highest_version_tag(new_tag):
update_latest_tag(new_tag, preview=False) update_latest_tag(new_tag, preview=False)
else: else:
print(f"[INFO] Skipping 'latest' update (tag {new_tag} is not the highest).") print(
f"[INFO] Skipping 'latest' update (tag {new_tag} is not the highest)."
)
except GitRunError as exc: except GitRunError as exc:
print(f"[WARN] Failed to update floating 'latest' tag for {new_tag}: {exc}") print(f"[WARN] Failed to update floating 'latest' tag for {new_tag}: {exc}")
print("'latest' tag was not updated.") print("'latest' tag was not updated.")
@@ -166,7 +176,9 @@ def _release_impl(
if close: if close:
if branch in ("main", "master"): if branch in ("main", "master"):
print(f"[INFO] close=True but current branch is {branch}; skipping branch deletion.") print(
f"[INFO] close=True but current branch is {branch}; skipping branch deletion."
)
return return
if not should_delete_branch(force=force): if not should_delete_branch(force=force):

View File

@@ -55,7 +55,9 @@ def clone_repos(
clone_url = _build_clone_url(repo, clone_mode) clone_url = _build_clone_url(repo, clone_mode)
if not clone_url: if not clone_url:
print(f"[WARNING] Cannot build clone URL for '{repo_identifier}'. Skipping.") print(
f"[WARNING] Cannot build clone URL for '{repo_identifier}'. Skipping."
)
continue continue
shallow = clone_mode == "shallow" shallow = clone_mode == "shallow"
@@ -84,7 +86,11 @@ def clone_repos(
continue continue
print(f"[WARNING] SSH clone failed for '{repo_identifier}': {exc}") print(f"[WARNING] SSH clone failed for '{repo_identifier}': {exc}")
choice = input("Do you want to attempt HTTPS clone instead? (y/N): ").strip().lower() choice = (
input("Do you want to attempt HTTPS clone instead? (y/N): ")
.strip()
.lower()
)
if choice != "y": if choice != "y":
print(f"[INFO] HTTPS clone not attempted for '{repo_identifier}'.") print(f"[INFO] HTTPS clone not attempted for '{repo_identifier}'.")
continue continue

View File

@@ -63,6 +63,4 @@ def _strip_git_suffix(name: str) -> str:
def _ensure_valid_repo_name(name: str) -> None: def _ensure_valid_repo_name(name: str) -> None:
if not _NAME_RE.fullmatch(name): if not _NAME_RE.fullmatch(name):
raise ValueError( raise ValueError("Repository name must match: lowercase a-z, 0-9, '_' and '-'.")
"Repository name must match: lowercase a-z, 0-9, '_' and '-'."
)

View File

@@ -66,9 +66,7 @@ class TemplateRenderer:
for root, _, files in os.walk(self.templates_dir): for root, _, files in os.walk(self.templates_dir):
for fn in files: for fn in files:
if fn.endswith(".j2"): if fn.endswith(".j2"):
rel = os.path.relpath( rel = os.path.relpath(os.path.join(root, fn), self.templates_dir)
os.path.join(root, fn), self.templates_dir
)
print(f"[Preview] Would render template: {rel} -> {rel[:-3]}") print(f"[Preview] Would render template: {rel} -> {rel[:-3]}")
@staticmethod @staticmethod

View File

@@ -24,9 +24,13 @@ def deinstall_repos(
# Remove alias link/file (interactive) # Remove alias link/file (interactive)
if os.path.exists(alias_path): if os.path.exists(alias_path):
confirm = input( confirm = (
f"Are you sure you want to delete link '{alias_path}' for {repo_identifier}? [y/N]: " input(
).strip().lower() f"Are you sure you want to delete link '{alias_path}' for {repo_identifier}? [y/N]: "
)
.strip()
.lower()
)
if confirm == "y": if confirm == "y":
if preview: if preview:
print(f"[Preview] Would remove link '{alias_path}'.") print(f"[Preview] Would remove link '{alias_path}'.")

View File

@@ -3,22 +3,33 @@ import os
from pkgmgr.core.repository.identifier import get_repo_identifier from pkgmgr.core.repository.identifier import get_repo_identifier
from pkgmgr.core.repository.dir import get_repo_dir from pkgmgr.core.repository.dir import get_repo_dir
def delete_repos(selected_repos, repositories_base_dir, all_repos, preview=False): def delete_repos(selected_repos, repositories_base_dir, all_repos, preview=False):
for repo in selected_repos: for repo in selected_repos:
repo_identifier = get_repo_identifier(repo, all_repos) repo_identifier = get_repo_identifier(repo, all_repos)
repo_dir = get_repo_dir(repositories_base_dir, repo) repo_dir = get_repo_dir(repositories_base_dir, repo)
if os.path.exists(repo_dir): if os.path.exists(repo_dir):
confirm = input(f"Are you sure you want to delete directory '{repo_dir}' for {repo_identifier}? [y/N]: ").strip().lower() confirm = (
input(
f"Are you sure you want to delete directory '{repo_dir}' for {repo_identifier}? [y/N]: "
)
.strip()
.lower()
)
if confirm == "y": if confirm == "y":
if preview: if preview:
print(f"[Preview] Would delete directory '{repo_dir}' for {repo_identifier}.") print(
f"[Preview] Would delete directory '{repo_dir}' for {repo_identifier}."
)
else: else:
try: try:
shutil.rmtree(repo_dir) shutil.rmtree(repo_dir)
print(f"Deleted repository directory '{repo_dir}' for {repo_identifier}.") print(
f"Deleted repository directory '{repo_dir}' for {repo_identifier}."
)
except Exception as e: except Exception as e:
print(f"Error deleting '{repo_dir}' for {repo_identifier}: {e}") print(f"Error deleting '{repo_dir}' for {repo_identifier}: {e}")
else: else:
print(f"Skipped deletion of '{repo_dir}' for {repo_identifier}.") print(f"Skipped deletion of '{repo_dir}' for {repo_identifier}.")
else: else:
print(f"Repository directory '{repo_dir}' not found for {repo_identifier}.") print(f"Repository directory '{repo_dir}' not found for {repo_identifier}.")

View File

@@ -233,9 +233,7 @@ def list_repositories(
categories.append(str(repo["category"])) categories.append(str(repo["category"]))
yaml_tags: List[str] = list(map(str, repo.get("tags", []))) yaml_tags: List[str] = list(map(str, repo.get("tags", [])))
display_tags: List[str] = sorted( display_tags: List[str] = sorted(set(yaml_tags + list(map(str, extra_tags))))
set(yaml_tags + list(map(str, extra_tags)))
)
rows.append( rows.append(
{ {
@@ -288,13 +286,7 @@ def list_repositories(
status_padded = status.ljust(status_width) status_padded = status.ljust(status_width)
status_colored = _color_status(status_padded) status_colored = _color_status(status_padded)
print( print(f"{ident_col} {status_colored} {cat_col} {tag_col} {dir_col}")
f"{ident_col} "
f"{status_colored} "
f"{cat_col} "
f"{tag_col} "
f"{dir_col}"
)
# ------------------------------------------------------------------ # ------------------------------------------------------------------
# Detailed section (alias value red, same status coloring) # Detailed section (alias value red, same status coloring)

View File

@@ -55,12 +55,16 @@ class UpdateManager:
code = exc.code if isinstance(exc.code, int) else str(exc.code) code = exc.code if isinstance(exc.code, int) else str(exc.code)
failures.append((identifier, f"pull failed (exit={code})")) failures.append((identifier, f"pull failed (exit={code})"))
if not quiet: if not quiet:
print(f"[Warning] update: pull failed for {identifier} (exit={code}). Continuing...") print(
f"[Warning] update: pull failed for {identifier} (exit={code}). Continuing..."
)
continue continue
except Exception as exc: except Exception as exc:
failures.append((identifier, f"pull failed: {exc}")) failures.append((identifier, f"pull failed: {exc}"))
if not quiet: if not quiet:
print(f"[Warning] update: pull failed for {identifier}: {exc}. Continuing...") print(
f"[Warning] update: pull failed for {identifier}: {exc}. Continuing..."
)
continue continue
try: try:
@@ -82,12 +86,16 @@ class UpdateManager:
code = exc.code if isinstance(exc.code, int) else str(exc.code) code = exc.code if isinstance(exc.code, int) else str(exc.code)
failures.append((identifier, f"install failed (exit={code})")) failures.append((identifier, f"install failed (exit={code})"))
if not quiet: if not quiet:
print(f"[Warning] update: install failed for {identifier} (exit={code}). Continuing...") print(
f"[Warning] update: install failed for {identifier} (exit={code}). Continuing..."
)
continue continue
except Exception as exc: except Exception as exc:
failures.append((identifier, f"install failed: {exc}")) failures.append((identifier, f"install failed: {exc}"))
if not quiet: if not quiet:
print(f"[Warning] update: install failed for {identifier}: {exc}. Continuing...") print(
f"[Warning] update: install failed for {identifier}: {exc}. Continuing..."
)
continue continue
if failures and not quiet: if failures and not quiet:

View File

@@ -31,6 +31,7 @@ class OSReleaseInfo:
""" """
Minimal /etc/os-release representation for distro detection. Minimal /etc/os-release representation for distro detection.
""" """
id: str = "" id: str = ""
id_like: str = "" id_like: str = ""
pretty_name: str = "" pretty_name: str = ""
@@ -63,4 +64,6 @@ class OSReleaseInfo:
def is_fedora_family(self) -> bool: def is_fedora_family(self) -> bool:
ids = self.ids() ids = self.ids()
return bool(ids.intersection({"fedora", "rhel", "centos", "rocky", "almalinux"})) return bool(
ids.intersection({"fedora", "rhel", "centos", "rocky", "almalinux"})
)

View File

@@ -58,7 +58,9 @@ class SystemUpdater:
run_command("sudo pacman -Syu --noconfirm", preview=preview) run_command("sudo pacman -Syu --noconfirm", preview=preview)
return return
print("[Warning] Cannot update Arch system: missing required tools (sudo/yay/pacman).") print(
"[Warning] Cannot update Arch system: missing required tools (sudo/yay/pacman)."
)
def _update_debian(self, *, preview: bool) -> None: def _update_debian(self, *, preview: bool) -> None:
from pkgmgr.core.command.run import run_command from pkgmgr.core.command.run import run_command
@@ -67,7 +69,9 @@ class SystemUpdater:
apt_get = shutil.which("apt-get") apt_get = shutil.which("apt-get")
if not (sudo and apt_get): if not (sudo and apt_get):
print("[Warning] Cannot update Debian/Ubuntu system: missing required tools (sudo/apt-get).") print(
"[Warning] Cannot update Debian/Ubuntu system: missing required tools (sudo/apt-get)."
)
return return
env = "DEBIAN_FRONTEND=noninteractive" env = "DEBIAN_FRONTEND=noninteractive"

View File

@@ -29,6 +29,7 @@ For details on any command, run:
\033[1mpkgmgr <command> --help\033[0m \033[1mpkgmgr <command> --help\033[0m
""" """
def main() -> None: def main() -> None:
""" """
Entry point for the pkgmgr CLI. Entry point for the pkgmgr CLI.
@@ -41,9 +42,7 @@ def main() -> None:
repositories_dir = os.path.expanduser( repositories_dir = os.path.expanduser(
directories.get("repositories", "~/Repositories") directories.get("repositories", "~/Repositories")
) )
binaries_dir = os.path.expanduser( binaries_dir = os.path.expanduser(directories.get("binaries", "~/.local/bin"))
directories.get("binaries", "~/.local/bin")
)
# Ensure the merged config actually contains the resolved directories # Ensure the merged config actually contains the resolved directories
config_merged.setdefault("directories", {}) config_merged.setdefault("directories", {})

View File

@@ -135,9 +135,7 @@ def handle_changelog(
target_tag=range_arg, target_tag=range_arg,
) )
if cur_tag is None: if cur_tag is None:
print( print(f"[WARN] Tag {range_arg!r} not found or not a SemVer tag.")
f"[WARN] Tag {range_arg!r} not found or not a SemVer tag."
)
print("[INFO] Falling back to full history.") print("[INFO] Falling back to full history.")
from_ref = None from_ref = None
to_ref = None to_ref = None

View File

@@ -213,9 +213,7 @@ def handle_config(args, ctx: CLIContext) -> None:
) )
if key == mod_key: if key == mod_key:
entry["ignore"] = args.set == "true" entry["ignore"] = args.set == "true"
print( print(f"Set ignore for {key} to {entry['ignore']}")
f"Set ignore for {key} to {entry['ignore']}"
)
save_user_config(user_config, user_config_path) save_user_config(user_config, user_config_path)
return return

View File

@@ -4,7 +4,12 @@ from __future__ import annotations
import sys import sys
from typing import Any, Dict, List from typing import Any, Dict, List
from pkgmgr.actions.mirror import diff_mirrors, list_mirrors, merge_mirrors, setup_mirrors from pkgmgr.actions.mirror import (
diff_mirrors,
list_mirrors,
merge_mirrors,
setup_mirrors,
)
from pkgmgr.cli.context import CLIContext from pkgmgr.cli.context import CLIContext
Repository = Dict[str, Any] Repository = Dict[str, Any]
@@ -56,11 +61,15 @@ def handle_mirror_command(
preview = getattr(args, "preview", False) preview = getattr(args, "preview", False)
if source == target: if source == target:
print("[ERROR] For 'mirror merge', source and target must differ (config vs file).") print(
"[ERROR] For 'mirror merge', source and target must differ (config vs file)."
)
sys.exit(2) sys.exit(2)
explicit_config_path = getattr(args, "config_path", None) explicit_config_path = getattr(args, "config_path", None)
user_config_path = explicit_config_path or getattr(ctx, "user_config_path", None) user_config_path = explicit_config_path or getattr(
ctx, "user_config_path", None
)
merge_mirrors( merge_mirrors(
selected_repos=selected, selected_repos=selected,

View File

@@ -18,7 +18,9 @@ def handle_publish(args, ctx: CLIContext, selected: List[Repository]) -> None:
for repo in selected: for repo in selected:
identifier = get_repo_identifier(repo, ctx.all_repositories) identifier = get_repo_identifier(repo, ctx.all_repositories)
repo_dir = repo.get("directory") or get_repo_dir(ctx.repositories_base_dir, repo) repo_dir = repo.get("directory") or get_repo_dir(
ctx.repositories_base_dir, repo
)
if not os.path.isdir(repo_dir): if not os.path.isdir(repo_dir):
print(f"[WARN] Skipping {identifier}: directory missing.") print(f"[WARN] Skipping {identifier}: directory missing.")

View File

@@ -36,9 +36,13 @@ def handle_release(
identifier = get_repo_identifier(repo, ctx.all_repositories) identifier = get_repo_identifier(repo, ctx.all_repositories)
try: try:
repo_dir = repo.get("directory") or get_repo_dir(ctx.repositories_base_dir, repo) repo_dir = repo.get("directory") or get_repo_dir(
ctx.repositories_base_dir, repo
)
except Exception as exc: except Exception as exc:
print(f"[WARN] Skipping repository {identifier}: failed to resolve directory: {exc}") print(
f"[WARN] Skipping repository {identifier}: failed to resolve directory: {exc}"
)
continue continue
if not os.path.isdir(repo_dir): if not os.path.isdir(repo_dir):

View File

@@ -32,9 +32,8 @@ def _resolve_repository_directory(repository: Repository, ctx: CLIContext) -> st
if repo_dir: if repo_dir:
return repo_dir return repo_dir
base_dir = ( base_dir = getattr(ctx, "repositories_base_dir", None) or getattr(
getattr(ctx, "repositories_base_dir", None) ctx, "repositories_dir", None
or getattr(ctx, "repositories_dir", None)
) )
if not base_dir: if not base_dir:
raise RuntimeError( raise RuntimeError(

View File

@@ -38,9 +38,9 @@ def _print_pkgmgr_self_version() -> None:
# Common distribution/module naming variants. # Common distribution/module naming variants.
python_candidates = [ python_candidates = [
"package-manager", # PyPI dist name in your project "package-manager", # PyPI dist name in your project
"package_manager", # module-ish variant "package_manager", # module-ish variant
"pkgmgr", # console/alias-ish "pkgmgr", # console/alias-ish
] ]
nix_candidates = [ nix_candidates = [
"pkgmgr", "pkgmgr",

View File

@@ -33,8 +33,7 @@ def add_branch_subparsers(
"name", "name",
nargs="?", nargs="?",
help=( help=(
"Name of the new branch (optional; will be asked interactively " "Name of the new branch (optional; will be asked interactively if omitted)"
"if omitted)"
), ),
) )
branch_open.add_argument( branch_open.add_argument(
@@ -54,8 +53,7 @@ def add_branch_subparsers(
"name", "name",
nargs="?", nargs="?",
help=( help=(
"Name of the branch to close (optional; current branch is used " "Name of the branch to close (optional; current branch is used if omitted)"
"if omitted)"
), ),
) )
branch_close.add_argument( branch_close.add_argument(
@@ -84,8 +82,7 @@ def add_branch_subparsers(
"name", "name",
nargs="?", nargs="?",
help=( help=(
"Name of the branch to drop (optional; current branch is used " "Name of the branch to drop (optional; current branch is used if omitted)"
"if omitted)"
), ),
) )
branch_drop.add_argument( branch_drop.add_argument(

View File

@@ -20,7 +20,9 @@ def add_mirror_subparsers(subparsers: argparse._SubParsersAction) -> None:
required=True, required=True,
) )
mirror_list = mirror_subparsers.add_parser("list", help="List configured mirrors for repositories") mirror_list = mirror_subparsers.add_parser(
"list", help="List configured mirrors for repositories"
)
add_identifier_arguments(mirror_list) add_identifier_arguments(mirror_list)
mirror_list.add_argument( mirror_list.add_argument(
"--source", "--source",
@@ -29,15 +31,21 @@ def add_mirror_subparsers(subparsers: argparse._SubParsersAction) -> None:
help="Which mirror source to show.", help="Which mirror source to show.",
) )
mirror_diff = mirror_subparsers.add_parser("diff", help="Show differences between config mirrors and MIRRORS file") mirror_diff = mirror_subparsers.add_parser(
"diff", help="Show differences between config mirrors and MIRRORS file"
)
add_identifier_arguments(mirror_diff) add_identifier_arguments(mirror_diff)
mirror_merge = mirror_subparsers.add_parser( mirror_merge = mirror_subparsers.add_parser(
"merge", "merge",
help="Merge mirrors between config and MIRRORS file (example: pkgmgr mirror merge config file --all)", help="Merge mirrors between config and MIRRORS file (example: pkgmgr mirror merge config file --all)",
) )
mirror_merge.add_argument("source", choices=["config", "file"], help="Source of mirrors.") mirror_merge.add_argument(
mirror_merge.add_argument("target", choices=["config", "file"], help="Target of mirrors.") "source", choices=["config", "file"], help="Source of mirrors."
)
mirror_merge.add_argument(
"target", choices=["config", "file"], help="Target of mirrors."
)
add_identifier_arguments(mirror_merge) add_identifier_arguments(mirror_merge)
mirror_merge.add_argument( mirror_merge.add_argument(
"--config-path", "--config-path",

View File

@@ -48,9 +48,6 @@ def add_navigation_subparsers(
"--command", "--command",
nargs=argparse.REMAINDER, nargs=argparse.REMAINDER,
dest="shell_command", dest="shell_command",
help=( help=("The shell command (and its arguments) to execute in each repository"),
"The shell command (and its arguments) to execute in each "
"repository"
),
default=[], default=[],
) )

View File

@@ -53,10 +53,7 @@ def _add_proxy_identifier_arguments(parser: argparse.ArgumentParser) -> None:
parser.add_argument( parser.add_argument(
"identifiers", "identifiers",
nargs="*", nargs="*",
help=( help=("Identifier(s) for repositories. Default: Repository of current folder."),
"Identifier(s) for repositories. "
"Default: Repository of current folder."
),
) )
parser.add_argument( parser.add_argument(
"--all", "--all",
@@ -118,12 +115,7 @@ def _proxy_has_explicit_selection(args: argparse.Namespace) -> bool:
string_filter = getattr(args, "string", "") or "" string_filter = getattr(args, "string", "") or ""
# Proxy commands currently do not support --tag, so it is not checked here. # Proxy commands currently do not support --tag, so it is not checked here.
return bool( return bool(use_all or identifiers or categories or string_filter)
use_all
or identifiers
or categories
or string_filter
)
def _select_repo_for_current_directory( def _select_repo_for_current_directory(
@@ -204,9 +196,7 @@ def maybe_handle_proxy(args: argparse.Namespace, ctx: CLIContext) -> bool:
If the top-level command is one of the proxy subcommands If the top-level command is one of the proxy subcommands
(git / docker / docker compose), handle it here and return True. (git / docker / docker compose), handle it here and return True.
""" """
all_proxy_subcommands = { all_proxy_subcommands = {sub for subs in PROXY_COMMANDS.values() for sub in subs}
sub for subs in PROXY_COMMANDS.values() for sub in subs
}
if args.command not in all_proxy_subcommands: if args.command not in all_proxy_subcommands:
return False return False

View File

@@ -22,9 +22,8 @@ def resolve_repository_path(repository: Repository, ctx: CLIContext) -> str:
if value: if value:
return value return value
base_dir = ( base_dir = getattr(ctx, "repositories_base_dir", None) or getattr(
getattr(ctx, "repositories_base_dir", None) ctx, "repositories_dir", None
or getattr(ctx, "repositories_dir", None)
) )
if not base_dir: if not base_dir:
raise RuntimeError( raise RuntimeError(

View File

@@ -57,7 +57,9 @@ def _build_workspace_filename(identifiers: List[str]) -> str:
return "_".join(sorted_identifiers) + ".code-workspace" return "_".join(sorted_identifiers) + ".code-workspace"
def _build_workspace_data(selected: List[Repository], ctx: CLIContext) -> Dict[str, Any]: def _build_workspace_data(
selected: List[Repository], ctx: CLIContext
) -> Dict[str, Any]:
folders = [{"path": resolve_repository_path(repo, ctx)} for repo in selected] folders = [{"path": resolve_repository_path(repo, ctx)} for repo in selected]
return { return {
"folders": folders, "folders": folders,

View File

@@ -2,10 +2,11 @@ import os
import hashlib import hashlib
import re import re
def generate_alias(repo, bin_dir, existing_aliases): def generate_alias(repo, bin_dir, existing_aliases):
""" """
Generate an alias for a repository based on its repository name. Generate an alias for a repository based on its repository name.
Steps: Steps:
1. Keep only consonants from the repository name (letters from BCDFGHJKLMNPQRSTVWXYZ). 1. Keep only consonants from the repository name (letters from BCDFGHJKLMNPQRSTVWXYZ).
2. Collapse consecutive identical consonants. 2. Collapse consecutive identical consonants.
@@ -39,4 +40,4 @@ def generate_alias(repo, bin_dir, existing_aliases):
while conflict(candidate3): while conflict(candidate3):
candidate3 += "x" candidate3 += "x"
candidate3 = candidate3[:12] candidate3 = candidate3[:12]
return candidate3 return candidate3

View File

@@ -98,8 +98,7 @@ def create_ink(
if alias_name == repo_identifier: if alias_name == repo_identifier:
if not quiet: if not quiet:
print( print(
f"Alias '{alias_name}' equals identifier. " f"Alias '{alias_name}' equals identifier. Skipping alias creation."
"Skipping alias creation."
) )
return return

View File

@@ -8,6 +8,7 @@ class CliLayer(str, Enum):
""" """
CLI layer precedence (lower number = stronger layer). CLI layer precedence (lower number = stronger layer).
""" """
OS_PACKAGES = "os-packages" OS_PACKAGES = "os-packages"
NIX = "nix" NIX = "nix"
PYTHON = "python" PYTHON = "python"

View File

@@ -34,11 +34,7 @@ def _nix_binary_candidates(home: str, names: List[str]) -> List[str]:
""" """
Build possible Nix profile binary paths for a list of candidate names. Build possible Nix profile binary paths for a list of candidate names.
""" """
return [ return [os.path.join(home, ".nix-profile", "bin", name) for name in names if name]
os.path.join(home, ".nix-profile", "bin", name)
for name in names
if name
]
def _path_binary_candidates(names: List[str]) -> List[str]: def _path_binary_candidates(names: List[str]) -> List[str]:
@@ -148,7 +144,8 @@ def resolve_command_for_repo(
# c) Nix profile binaries # c) Nix profile binaries
nix_binaries = [ nix_binaries = [
path for path in _nix_binary_candidates(home, candidate_names) path
for path in _nix_binary_candidates(home, candidate_names)
if _is_executable(path) if _is_executable(path)
] ]
nix_binary = nix_binaries[0] if nix_binaries else None nix_binary = nix_binaries[0] if nix_binaries else None

View File

@@ -51,6 +51,7 @@ Repo = Dict[str, Any]
# Hilfsfunktionen # Hilfsfunktionen
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
def _deep_merge(base: Dict[str, Any], override: Dict[str, Any]) -> Dict[str, Any]: def _deep_merge(base: Dict[str, Any], override: Dict[str, Any]) -> Dict[str, Any]:
""" """
Recursively merge two dictionaries. Recursively merge two dictionaries.
@@ -58,11 +59,7 @@ def _deep_merge(base: Dict[str, Any], override: Dict[str, Any]) -> Dict[str, Any
Values from `override` win over values in `base`. Values from `override` win over values in `base`.
""" """
for key, value in override.items(): for key, value in override.items():
if ( if key in base and isinstance(base[key], dict) and isinstance(value, dict):
key in base
and isinstance(base[key], dict)
and isinstance(value, dict)
):
_deep_merge(base[key], value) _deep_merge(base[key], value)
else: else:
base[key] = value base[key] = value
@@ -93,9 +90,7 @@ def _merge_repo_lists(
- Wenn category_name gesetzt ist, wird dieser in - Wenn category_name gesetzt ist, wird dieser in
repo["category_files"] eingetragen. repo["category_files"] eingetragen.
""" """
index: Dict[Tuple[str, str, str], Repo] = { index: Dict[Tuple[str, str, str], Repo] = {_repo_key(r): r for r in base_list}
_repo_key(r): r for r in base_list
}
for src in new_list: for src in new_list:
key = _repo_key(src) key = _repo_key(src)
@@ -233,10 +228,12 @@ def _load_defaults_from_package_or_project() -> Dict[str, Any]:
return {"directories": {}, "repositories": []} return {"directories": {}, "repositories": []}
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
# Hauptfunktion # Hauptfunktion
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
def load_config(user_config_path: str) -> Dict[str, Any]: def load_config(user_config_path: str) -> Dict[str, Any]:
""" """
Load and merge configuration for pkgmgr. Load and merge configuration for pkgmgr.
@@ -289,8 +286,12 @@ def load_config(user_config_path: str) -> Dict[str, Any]:
# repositories # repositories
merged["repositories"] = [] merged["repositories"] = []
_merge_repo_lists(merged["repositories"], defaults["repositories"], category_name=None) _merge_repo_lists(
_merge_repo_lists(merged["repositories"], user_cfg["repositories"], category_name=None) merged["repositories"], defaults["repositories"], category_name=None
)
_merge_repo_lists(
merged["repositories"], user_cfg["repositories"], category_name=None
)
# andere Top-Level-Keys (falls vorhanden) # andere Top-Level-Keys (falls vorhanden)
other_keys = (set(defaults.keys()) | set(user_cfg.keys())) - { other_keys = (set(defaults.keys()) | set(user_cfg.keys())) - {

View File

@@ -1,9 +1,10 @@
import yaml import yaml
import os import os
def save_user_config(user_config,USER_CONFIG_PATH:str):
def save_user_config(user_config, USER_CONFIG_PATH: str):
"""Save the user configuration to USER_CONFIG_PATH.""" """Save the user configuration to USER_CONFIG_PATH."""
os.makedirs(os.path.dirname(USER_CONFIG_PATH), exist_ok=True) os.makedirs(os.path.dirname(USER_CONFIG_PATH), exist_ok=True)
with open(USER_CONFIG_PATH, 'w') as f: with open(USER_CONFIG_PATH, "w") as f:
yaml.dump(user_config, f) yaml.dump(user_config, f)
print(f"User configuration updated in {USER_CONFIG_PATH}.") print(f"User configuration updated in {USER_CONFIG_PATH}.")

View File

@@ -16,7 +16,9 @@ class EnvTokenProvider:
source_name: str = "env" source_name: str = "env"
def get(self, request: TokenRequest) -> Optional[TokenResult]: def get(self, request: TokenRequest) -> Optional[TokenResult]:
for key in env_var_candidates(request.provider_kind, request.host, request.owner): for key in env_var_candidates(
request.provider_kind, request.host, request.owner
):
val = os.environ.get(key) val = os.environ.get(key)
if val: if val:
return TokenResult(token=val.strip(), source=self.source_name) return TokenResult(token=val.strip(), source=self.source_name)

View File

@@ -15,6 +15,7 @@ class GhTokenProvider:
This does NOT persist anything; it only reads what `gh` already knows. This does NOT persist anything; it only reads what `gh` already knows.
""" """
source_name: str = "gh" source_name: str = "gh"
def get(self, request: TokenRequest) -> Optional[TokenResult]: def get(self, request: TokenRequest) -> Optional[TokenResult]:

View File

@@ -21,9 +21,7 @@ def _import_keyring():
try: try:
import keyring # type: ignore import keyring # type: ignore
except Exception as exc: # noqa: BLE001 except Exception as exc: # noqa: BLE001
raise KeyringUnavailableError( raise KeyringUnavailableError("python-keyring is not installed.") from exc
"python-keyring is not installed."
) from exc
# Some environments have keyring installed but no usable backend. # Some environments have keyring installed but no usable backend.
# We do a lightweight "backend sanity check" by attempting to read the backend. # We do a lightweight "backend sanity check" by attempting to read the backend.

View File

@@ -9,7 +9,12 @@ from .providers.env import EnvTokenProvider
from .providers.gh import GhTokenProvider from .providers.gh import GhTokenProvider
from .providers.keyring import KeyringTokenProvider from .providers.keyring import KeyringTokenProvider
from .providers.prompt import PromptTokenProvider from .providers.prompt import PromptTokenProvider
from .types import KeyringUnavailableError, NoCredentialsError, TokenRequest, TokenResult from .types import (
KeyringUnavailableError,
NoCredentialsError,
TokenRequest,
TokenResult,
)
from .validate import validate_token from .validate import validate_token
@@ -55,7 +60,10 @@ class TokenResolver:
print(f" {msg}", file=sys.stderr) print(f" {msg}", file=sys.stderr)
print(" Tokens will NOT be persisted securely.", file=sys.stderr) print(" Tokens will NOT be persisted securely.", file=sys.stderr)
print("", file=sys.stderr) print("", file=sys.stderr)
print(" To enable secure token storage, install python-keyring:", file=sys.stderr) print(
" To enable secure token storage, install python-keyring:",
file=sys.stderr,
)
print(" pip install keyring", file=sys.stderr) print(" pip install keyring", file=sys.stderr)
print("", file=sys.stderr) print("", file=sys.stderr)
print(" Or install via system packages:", file=sys.stderr) print(" Or install via system packages:", file=sys.stderr)

View File

@@ -13,7 +13,9 @@ class KeyringKey:
username: str username: str
def build_keyring_key(provider_kind: str, host: str, owner: Optional[str]) -> KeyringKey: def build_keyring_key(
provider_kind: str, host: str, owner: Optional[str]
) -> KeyringKey:
"""Build a stable keyring key. """Build a stable keyring key.
- service: "pkgmgr:<provider>" - service: "pkgmgr:<provider>"
@@ -21,11 +23,15 @@ def build_keyring_key(provider_kind: str, host: str, owner: Optional[str]) -> Ke
""" """
provider_kind = str(provider_kind).strip().lower() provider_kind = str(provider_kind).strip().lower()
host = str(host).strip() host = str(host).strip()
owner_part = (str(owner).strip() if owner else "-") owner_part = str(owner).strip() if owner else "-"
return KeyringKey(service=f"pkgmgr:{provider_kind}", username=f"{host}|{owner_part}") return KeyringKey(
service=f"pkgmgr:{provider_kind}", username=f"{host}|{owner_part}"
)
def env_var_candidates(provider_kind: str, host: str, owner: Optional[str]) -> list[str]: def env_var_candidates(
provider_kind: str, host: str, owner: Optional[str]
) -> list[str]:
"""Return a list of environment variable names to try. """Return a list of environment variable names to try.
Order is from most specific to most generic. Order is from most specific to most generic.
@@ -44,7 +50,7 @@ def env_var_candidates(provider_kind: str, host: str, owner: Optional[str]) -> l
candidates.append(f"PKGMGR_{kind}_TOKEN") candidates.append(f"PKGMGR_{kind}_TOKEN")
candidates.append(f"PKGMGR_TOKEN_{kind}") candidates.append(f"PKGMGR_TOKEN_{kind}")
candidates.append("PKGMGR_TOKEN") candidates.append("PKGMGR_TOKEN")
return candidates return candidates

View File

@@ -18,4 +18,6 @@ def add_all(*, cwd: str = ".", preview: bool = False) -> None:
try: try:
run(["add", "-A"], cwd=cwd, preview=preview) run(["add", "-A"], cwd=cwd, preview=preview)
except GitRunError as exc: except GitRunError as exc:
raise GitAddAllError("Failed to stage all changes with `git add -A`.", cwd=cwd) from exc raise GitAddAllError(
"Failed to stage all changes with `git add -A`.", cwd=cwd
) from exc

View File

@@ -18,4 +18,6 @@ def branch_move(branch: str, *, cwd: str = ".", preview: bool = False) -> None:
try: try:
run(["branch", "-M", branch], cwd=cwd, preview=preview) run(["branch", "-M", branch], cwd=cwd, preview=preview)
except GitRunError as exc: except GitRunError as exc:
raise GitBranchMoveError(f"Failed to move/rename current branch to {branch!r}.", cwd=cwd) from exc raise GitBranchMoveError(
f"Failed to move/rename current branch to {branch!r}.", cwd=cwd
) from exc

View File

@@ -4,21 +4,26 @@ from __future__ import annotations
class GitBaseError(RuntimeError): class GitBaseError(RuntimeError):
"""Base error raised for Git related failures.""" """Base error raised for Git related failures."""
class GitRunError(GitBaseError): class GitRunError(GitBaseError):
"""Base error raised for Git related failures.""" """Base error raised for Git related failures."""
class GitNotRepositoryError(GitBaseError): class GitNotRepositoryError(GitBaseError):
"""Raised when the current working directory is not a git repository.""" """Raised when the current working directory is not a git repository."""
class GitQueryError(GitRunError): class GitQueryError(GitRunError):
"""Base class for read-only git query failures.""" """Base class for read-only git query failures."""
class GitCommandError(GitRunError): class GitCommandError(GitRunError):
""" """
Base class for state-changing git command failures. Base class for state-changing git command failures.
Use subclasses to provide stable error types for callers. Use subclasses to provide stable error types for callers.
""" """
def __init__(self, message: str, *, cwd: str = ".") -> None: def __init__(self, message: str, *, cwd: str = ".") -> None:
super().__init__(message) super().__init__(message)
if cwd in locals(): if cwd in locals():

View File

@@ -16,6 +16,7 @@ def _is_missing_key_error(exc: GitRunError) -> bool:
# 'git config --get' returns exit code 1 when the key is not set. # 'git config --get' returns exit code 1 when the key is not set.
return "exit code: 1" in msg return "exit code: 1" in msg
def get_config_value(key: str, *, cwd: str = ".") -> Optional[str]: def get_config_value(key: str, *, cwd: str = ".") -> Optional[str]:
""" """
Return a value from `git config --get <key>`, or None if not set. Return a value from `git config --get <key>`, or None if not set.

View File

@@ -15,4 +15,4 @@ def get_current_branch(cwd: str = ".") -> Optional[str]:
output = run(["rev-parse", "--abbrev-ref", "HEAD"], cwd=cwd) output = run(["rev-parse", "--abbrev-ref", "HEAD"], cwd=cwd)
except GitRunError: except GitRunError:
return None return None
return output or None return output or None

View File

@@ -30,4 +30,4 @@ def get_remote_head_commit(
) from exc ) from exc
# minimal parsing: first token is the hash # minimal parsing: first token is the hash
return (out.split()[0].strip() if out else "") return out.split()[0].strip() if out else ""

View File

@@ -4,6 +4,7 @@ from typing import Set
from ..run import run from ..run import run
def get_remote_push_urls(remote: str, cwd: str = ".") -> Set[str]: def get_remote_push_urls(remote: str, cwd: str = ".") -> Set[str]:
""" """
Return all push URLs configured for a remote. Return all push URLs configured for a remote.

View File

@@ -44,9 +44,7 @@ def run(
stderr = exc.stderr or "" stderr = exc.stderr or ""
if _is_not_repo_error(stderr): if _is_not_repo_error(stderr):
raise GitNotRepositoryError( raise GitNotRepositoryError(
f"Not a git repository: {cwd!r}\n" f"Not a git repository: {cwd!r}\nCommand: {cmd_str}\nSTDERR:\n{stderr}"
f"Command: {cmd_str}\n"
f"STDERR:\n{stderr}"
) from exc ) from exc
raise GitRunError( raise GitRunError(

View File

@@ -34,7 +34,15 @@ def get_repo_dir(repositories_base_dir: str, repo: Dict[str, Any]) -> str:
account = repo.get("account") account = repo.get("account")
repository = repo.get("repository") repository = repo.get("repository")
missing = [k for k, v in [("provider", provider), ("account", account), ("repository", repository)] if not v] missing = [
k
for k, v in [
("provider", provider),
("account", account),
("repository", repository),
]
if not v
]
if missing: if missing:
print( print(
"Error: repository entry is missing required keys.\n" "Error: repository entry is missing required keys.\n"

View File

@@ -9,4 +9,4 @@ def get_repo_identifier(repo, all_repos):
if count == 1: if count == 1:
return repo_name return repo_name
else: else:
return f'{repo.get("provider")}/{repo.get("account")}/{repo.get("repository")}' return f"{repo.get('provider')}/{repo.get('account')}/{repo.get('repository')}"

View File

@@ -1,3 +1,3 @@
def filter_ignored(repos): def filter_ignored(repos):
"""Filter out repositories that have 'ignore' set to True.""" """Filter out repositories that have 'ignore' set to True."""
return [r for r in repos if not r.get("ignore", False)] return [r for r in repos if not r.get("ignore", False)]

View File

@@ -109,7 +109,9 @@ def resolve_repo_paths(repo_dir: str) -> RepoPaths:
] ]
) )
if rpm_spec is None: if rpm_spec is None:
rpm_spec = _find_first_spec_in_dir(os.path.join(repo_dir, "packaging", "fedora")) rpm_spec = _find_first_spec_in_dir(
os.path.join(repo_dir, "packaging", "fedora")
)
if rpm_spec is None: if rpm_spec is None:
rpm_spec = _find_first_spec_in_dir(repo_dir) rpm_spec = _find_first_spec_in_dir(repo_dir)

View File

@@ -1,5 +1,4 @@
def resolve_repos(identifiers: [], all_repos: []):
def resolve_repos(identifiers:[], all_repos:[]):
""" """
Given a list of identifier strings, return a list of repository configs. Given a list of identifier strings, return a list of repository configs.
The identifier can be: The identifier can be:
@@ -11,7 +10,9 @@ def resolve_repos(identifiers:[], all_repos:[]):
for ident in identifiers: for ident in identifiers:
matches = [] matches = []
for repo in all_repos: for repo in all_repos:
full_id = f'{repo.get("provider")}/{repo.get("account")}/{repo.get("repository")}' full_id = (
f"{repo.get('provider')}/{repo.get('account')}/{repo.get('repository')}"
)
if ident == full_id: if ident == full_id:
matches.append(repo) matches.append(repo)
elif ident == repo.get("alias"): elif ident == repo.get("alias"):
@@ -24,4 +25,4 @@ def resolve_repos(identifiers:[], all_repos:[]):
print(f"Identifier '{ident}' did not match any repository in config.") print(f"Identifier '{ident}' did not match any repository in config.")
else: else:
selected.extend(matches) selected.extend(matches)
return selected return selected

View File

@@ -66,18 +66,26 @@ def verify_repository(repo, repo_dir, mode="local", no_verification=False):
if expected_commit: if expected_commit:
if not commit_hash: if not commit_hash:
commit_check_passed = False commit_check_passed = False
error_details.append(f"Expected commit: {expected_commit}, but could not determine current commit.") error_details.append(
f"Expected commit: {expected_commit}, but could not determine current commit."
)
elif commit_hash != expected_commit: elif commit_hash != expected_commit:
commit_check_passed = False commit_check_passed = False
error_details.append(f"Expected commit: {expected_commit}, found: {commit_hash}") error_details.append(
f"Expected commit: {expected_commit}, found: {commit_hash}"
)
if expected_gpg_keys: if expected_gpg_keys:
if not signing_key: if not signing_key:
gpg_check_passed = False gpg_check_passed = False
error_details.append(f"Expected one of GPG keys: {expected_gpg_keys}, but no signing key was found.") error_details.append(
f"Expected one of GPG keys: {expected_gpg_keys}, but no signing key was found."
)
elif signing_key not in expected_gpg_keys: elif signing_key not in expected_gpg_keys:
gpg_check_passed = False gpg_check_passed = False
error_details.append(f"Expected one of GPG keys: {expected_gpg_keys}, found: {signing_key}") error_details.append(
f"Expected one of GPG keys: {expected_gpg_keys}, found: {signing_key}"
)
if expected_commit and expected_gpg_keys: if expected_commit and expected_gpg_keys:
verified_ok = commit_check_passed and gpg_check_passed verified_ok = commit_check_passed and gpg_check_passed

View File

@@ -13,6 +13,7 @@ class InstalledVersion:
""" """
Represents a resolved installed version and the matched name. Represents a resolved installed version and the matched name.
""" """
name: str name: str
version: str version: str

View File

@@ -43,10 +43,14 @@ class SemVer:
minor = int(parts[1]) minor = int(parts[1])
patch = int(parts[2]) patch = int(parts[2])
except ValueError as exc: except ValueError as exc:
raise ValueError(f"Semantic version components must be integers: {value!r}") from exc raise ValueError(
f"Semantic version components must be integers: {value!r}"
) from exc
if major < 0 or minor < 0 or patch < 0: if major < 0 or minor < 0 or patch < 0:
raise ValueError(f"Semantic version components must be non-negative: {value!r}") raise ValueError(
f"Semantic version components must be non-negative: {value!r}"
)
return cls(major=major, minor=minor, patch=patch) return cls(major=major, minor=minor, patch=patch)

View File

@@ -37,9 +37,7 @@ class TestIntegrationBranchCommands(unittest.TestCase):
`pkgmgr branch open feature/test --base develop` must forward `pkgmgr branch open feature/test --base develop` must forward
the name and base branch to open_branch() with cwd=".". the name and base branch to open_branch() with cwd=".".
""" """
self._run_pkgmgr( self._run_pkgmgr(["branch", "open", "feature/test", "--base", "develop"])
["branch", "open", "feature/test", "--base", "develop"]
)
mock_open_branch.assert_called_once() mock_open_branch.assert_called_once()
_, kwargs = mock_open_branch.call_args _, kwargs = mock_open_branch.call_args
@@ -74,9 +72,7 @@ class TestIntegrationBranchCommands(unittest.TestCase):
`pkgmgr branch close feature/test --base develop` must forward `pkgmgr branch close feature/test --base develop` must forward
the name and base branch to close_branch() with cwd=".". the name and base branch to close_branch() with cwd=".".
""" """
self._run_pkgmgr( self._run_pkgmgr(["branch", "close", "feature/test", "--base", "develop"])
["branch", "close", "feature/test", "--base", "develop"]
)
mock_close_branch.assert_called_once() mock_close_branch.assert_called_once()
_, kwargs = mock_close_branch.call_args _, kwargs = mock_close_branch.call_args

View File

@@ -3,15 +3,14 @@ import tempfile
import unittest import unittest
from pathlib import Path from pathlib import Path
class TestMakefileThreeTimes(unittest.TestCase): class TestMakefileThreeTimes(unittest.TestCase):
def test_make_install_three_times(self): def test_make_install_three_times(self):
with tempfile.TemporaryDirectory(prefix="makefile-3x-") as tmp: with tempfile.TemporaryDirectory(prefix="makefile-3x-") as tmp:
repo = Path(tmp) repo = Path(tmp)
# Minimal Makefile with install target # Minimal Makefile with install target
(repo / "Makefile").write_text( (repo / "Makefile").write_text("install:\n\t@echo install >> install.log\n")
"install:\n\t@echo install >> install.log\n"
)
for i in range(1, 4): for i in range(1, 4):
print(f"\n=== RUN {i}/3 ===") print(f"\n=== RUN {i}/3 ===")

View File

@@ -114,7 +114,9 @@ class TestIntegrationInstalPKGMGRShallow(unittest.TestCase):
# Optional XDG override for a fully isolated environment # Optional XDG override for a fully isolated environment
os.environ.setdefault("XDG_CONFIG_HOME", os.path.join(temp_home, ".config")) os.environ.setdefault("XDG_CONFIG_HOME", os.path.join(temp_home, ".config"))
os.environ.setdefault("XDG_CACHE_HOME", os.path.join(temp_home, ".cache")) os.environ.setdefault("XDG_CACHE_HOME", os.path.join(temp_home, ".cache"))
os.environ.setdefault("XDG_DATA_HOME", os.path.join(temp_home, ".local", "share")) os.environ.setdefault(
"XDG_DATA_HOME", os.path.join(temp_home, ".local", "share")
)
# 🔧 IMPORTANT FIX: allow Git to access /src safely # 🔧 IMPORTANT FIX: allow Git to access /src safely
configure_git_safe_directory() configure_git_safe_directory()

View File

@@ -14,17 +14,16 @@ class TestPkgmgrInstallThreeTimesNix(unittest.TestCase):
env["HOME"] = tmp env["HOME"] = tmp
# Ensure nix is found # Ensure nix is found
env["PATH"] = "/nix/var/nix/profiles/default/bin:" + os.environ.get("PATH", "") env["PATH"] = "/nix/var/nix/profiles/default/bin:" + os.environ.get(
"PATH", ""
)
# IMPORTANT: # IMPORTANT:
# nix run uses git+file:///src internally -> Git will reject /src if it's not a safe.directory. # nix run uses git+file:///src internally -> Git will reject /src if it's not a safe.directory.
# Our test sets HOME to a temp dir, so we must provide a temp global gitconfig. # Our test sets HOME to a temp dir, so we must provide a temp global gitconfig.
gitconfig = tmp_path / ".gitconfig" gitconfig = tmp_path / ".gitconfig"
gitconfig.write_text( gitconfig.write_text(
"[safe]\n" "[safe]\n\tdirectory = /src\n\tdirectory = /src/.git\n\tdirectory = *\n"
"\tdirectory = /src\n"
"\tdirectory = /src/.git\n"
"\tdirectory = *\n"
) )
env["GIT_CONFIG_GLOBAL"] = str(gitconfig) env["GIT_CONFIG_GLOBAL"] = str(gitconfig)

Some files were not shown because too many files have changed in this diff Show More