Compare commits
15 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a20814cb37 | ||
|
|
feb5ba267f | ||
|
|
591be4ef35 | ||
|
|
3e6ef0fd68 | ||
|
|
3d5c770def | ||
|
|
f4339a746a | ||
|
|
763f02a9a4 | ||
|
|
2eec873a17 | ||
|
|
17ee947930 | ||
|
|
b989bdd4eb | ||
|
|
c4da8368d8 | ||
|
|
997c265cfb | ||
|
|
955028288f | ||
|
|
866572e252 | ||
|
|
b0a733369e |
4
.github/workflows/test-e2e.yml
vendored
4
.github/workflows/test-e2e.yml
vendored
@@ -11,7 +11,9 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
distro: [arch, debian, ubuntu, fedora, centos]
|
||||
|
||||
env:
|
||||
NIX_CONFIG: |
|
||||
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
4
.github/workflows/test-env-nix.yml
vendored
4
.github/workflows/test-env-nix.yml
vendored
@@ -12,7 +12,9 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
distro: [arch, debian, ubuntu, fedora, centos]
|
||||
|
||||
env:
|
||||
NIX_CONFIG: |
|
||||
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
4
.github/workflows/test-env-virtual.yml
vendored
4
.github/workflows/test-env-virtual.yml
vendored
@@ -11,7 +11,9 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
distro: [arch, debian, ubuntu, fedora, centos]
|
||||
|
||||
env:
|
||||
NIX_CONFIG: |
|
||||
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
4
.github/workflows/test-integration.yml
vendored
4
.github/workflows/test-integration.yml
vendored
@@ -7,7 +7,9 @@ jobs:
|
||||
test-integration:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
|
||||
env:
|
||||
NIX_CONFIG: |
|
||||
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
4
.github/workflows/test-unit.yml
vendored
4
.github/workflows/test-unit.yml
vendored
@@ -7,7 +7,9 @@ jobs:
|
||||
test-unit:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
|
||||
env:
|
||||
NIX_CONFIG: |
|
||||
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
15
.github/workflows/test-virgin-root.yml
vendored
15
.github/workflows/test-virgin-root.yml
vendored
@@ -11,7 +11,9 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
distro: [arch, debian, ubuntu, fedora, centos]
|
||||
|
||||
env:
|
||||
NIX_CONFIG: |
|
||||
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
@@ -19,27 +21,26 @@ jobs:
|
||||
- name: Show Docker version
|
||||
run: docker version
|
||||
|
||||
# 🔹 BUILD virgin image if missing
|
||||
- name: Build virgin container (${{ matrix.distro }})
|
||||
run: |
|
||||
set -euo pipefail
|
||||
PKGMGR_DISTRO="${{ matrix.distro }}" make build-missing-virgin
|
||||
|
||||
# 🔹 RUN test inside virgin image
|
||||
- name: Virgin ${{ matrix.distro }} pkgmgr test (root)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
docker run --rm \
|
||||
-v "$PWD":/src \
|
||||
-v "$PWD":/opt/src/pkgmgr \
|
||||
-v pkgmgr_repos:/root/Repositories \
|
||||
-v pkgmgr_pip_cache:/root/.cache/pip \
|
||||
-w /src \
|
||||
-e NIX_CONFIG="${NIX_CONFIG}" \
|
||||
-w /opt/src/pkgmgr \
|
||||
"pkgmgr-${{ matrix.distro }}-virgin" \
|
||||
bash -lc '
|
||||
set -euo pipefail
|
||||
|
||||
git config --global --add safe.directory /src
|
||||
git config --global --add safe.directory /opt/src/pkgmgr
|
||||
|
||||
make install
|
||||
make setup
|
||||
@@ -50,5 +51,5 @@ jobs:
|
||||
pkgmgr version pkgmgr
|
||||
|
||||
echo ">>> Running Nix-based: nix run .#pkgmgr -- version pkgmgr"
|
||||
nix run /src#pkgmgr -- version pkgmgr
|
||||
nix run /opt/src/pkgmgr#pkgmgr -- version pkgmgr
|
||||
'
|
||||
|
||||
17
.github/workflows/test-virgin-user.yml
vendored
17
.github/workflows/test-virgin-user.yml
vendored
@@ -11,7 +11,9 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
distro: [arch, debian, ubuntu, fedora, centos]
|
||||
|
||||
env:
|
||||
NIX_CONFIG: |
|
||||
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
@@ -19,20 +21,19 @@ jobs:
|
||||
- name: Show Docker version
|
||||
run: docker version
|
||||
|
||||
# 🔹 BUILD virgin image if missing
|
||||
- name: Build virgin container (${{ matrix.distro }})
|
||||
run: |
|
||||
set -euo pipefail
|
||||
PKGMGR_DISTRO="${{ matrix.distro }}" make build-missing-virgin
|
||||
|
||||
# 🔹 RUN test inside virgin image as non-root
|
||||
- name: Virgin ${{ matrix.distro }} pkgmgr test (user)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
docker run --rm \
|
||||
-v "$PWD":/src \
|
||||
-w /src \
|
||||
-v "$PWD":/opt/src/pkgmgr \
|
||||
-e NIX_CONFIG="${NIX_CONFIG}" \
|
||||
-w /opt/src/pkgmgr \
|
||||
"pkgmgr-${{ matrix.distro }}-virgin" \
|
||||
bash -lc '
|
||||
set -euo pipefail
|
||||
@@ -42,7 +43,7 @@ jobs:
|
||||
useradd -m dev
|
||||
echo "dev ALL=(ALL) NOPASSWD: ALL" > /etc/sudoers.d/dev
|
||||
chmod 0440 /etc/sudoers.d/dev
|
||||
chown -R dev:dev /src
|
||||
chown -R dev:dev /opt/src/pkgmgr
|
||||
|
||||
mkdir -p /nix/store /nix/var/nix /nix/var/log/nix /nix/var/nix/profiles
|
||||
chown -R dev:dev /nix
|
||||
@@ -51,7 +52,7 @@ jobs:
|
||||
|
||||
sudo -H -u dev env HOME=/home/dev PKGMGR_DISABLE_NIX_FLAKE_INSTALLER=1 bash -lc "
|
||||
set -euo pipefail
|
||||
cd /src
|
||||
cd /opt/src/pkgmgr
|
||||
|
||||
make setup-venv
|
||||
. \"\$HOME/.venvs/pkgmgr/bin/activate\"
|
||||
@@ -59,6 +60,6 @@ jobs:
|
||||
pkgmgr version pkgmgr
|
||||
|
||||
export NIX_REMOTE=local
|
||||
nix run /src#pkgmgr -- version pkgmgr
|
||||
nix run /opt/src/pkgmgr#pkgmgr -- version pkgmgr
|
||||
"
|
||||
'
|
||||
|
||||
29
CHANGELOG.md
29
CHANGELOG.md
@@ -1,3 +1,32 @@
|
||||
## [1.8.7] - 2025-12-19
|
||||
|
||||
* * **Release version updates now correctly modify ***pyproject.toml*** files that follow PEP 621**, ensuring the ***[project].version*** field is updated as expected.
|
||||
* **Invalid or incomplete ***pyproject.toml*** files are now handled gracefully** with clear error messages instead of abrupt process termination.
|
||||
* **RPM spec files remain compatible during releases**: existing macros such as ***%{?dist}*** are preserved and no longer accidentally modified.
|
||||
|
||||
|
||||
## [1.8.6] - 2025-12-17
|
||||
|
||||
* Prevent Rate Limits during GitHub Nix Setups
|
||||
|
||||
|
||||
## [1.8.5] - 2025-12-17
|
||||
|
||||
* * Clearer Git error handling, especially when a directory is not a Git repository.
|
||||
* More reliable repository verification with improved commit and GPG signature checks.
|
||||
* Better error messages and overall robustness when working with Git-based workflows.
|
||||
|
||||
|
||||
## [1.9.0] - 2025-12-17
|
||||
|
||||
* Automated release.
|
||||
|
||||
|
||||
## [1.8.4] - 2025-12-17
|
||||
|
||||
* * Made pkgmgr’s base-layer role explicit by standardizing the Docker/CI mount path to *`/opt/src/pkgmgr`*.
|
||||
|
||||
|
||||
## [1.8.3] - 2025-12-16
|
||||
|
||||
* MIRRORS now supports plain URL entries, ensuring metadata-only sources like PyPI are recorded without ever being added to the Git configuration.
|
||||
|
||||
@@ -50,6 +50,6 @@ RUN set -euo pipefail; \
|
||||
# Entry point
|
||||
COPY scripts/docker/entry.sh /usr/local/bin/docker-entry.sh
|
||||
|
||||
WORKDIR /src
|
||||
WORKDIR /opt/src/pkgmgr
|
||||
ENTRYPOINT ["/usr/local/bin/docker-entry.sh"]
|
||||
CMD ["pkgmgr", "--help"]
|
||||
|
||||
4
Makefile
4
Makefile
@@ -10,6 +10,10 @@ DISTROS ?= arch debian ubuntu fedora centos
|
||||
PKGMGR_DISTRO ?= arch
|
||||
export PKGMGR_DISTRO
|
||||
|
||||
# Nix Config Variable (To avoid rate limit)
|
||||
NIX_CONFIG ?=
|
||||
export NIX_CONFIG
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# Base images
|
||||
# (kept for documentation/reference; actual build logic is in scripts/build)
|
||||
|
||||
@@ -32,7 +32,7 @@
|
||||
rec {
|
||||
pkgmgr = pyPkgs.buildPythonApplication {
|
||||
pname = "package-manager";
|
||||
version = "1.8.3";
|
||||
version = "1.8.7";
|
||||
|
||||
# Use the git repo as source
|
||||
src = ./.;
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Maintainer: Kevin Veen-Birkenbach <info@veen.world>
|
||||
|
||||
pkgname=package-manager
|
||||
pkgver=1.8.3
|
||||
pkgver=1.8.7
|
||||
pkgrel=1
|
||||
pkgdesc="Local-flake wrapper for Kevin's package-manager (Nix-based)."
|
||||
arch=('any')
|
||||
|
||||
@@ -1,3 +1,37 @@
|
||||
package-manager (1.8.7-1) unstable; urgency=medium
|
||||
|
||||
* * **Release version updates now correctly modify ***pyproject.toml*** files that follow PEP 621**, ensuring the ***[project].version*** field is updated as expected.
|
||||
* **Invalid or incomplete ***pyproject.toml*** files are now handled gracefully** with clear error messages instead of abrupt process termination.
|
||||
* **RPM spec files remain compatible during releases**: existing macros such as ***%{?dist}*** are preserved and no longer accidentally modified.
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Fri, 19 Dec 2025 14:15:47 +0100
|
||||
|
||||
package-manager (1.8.6-1) unstable; urgency=medium
|
||||
|
||||
* Prevent Rate Limits during GitHub Nix Setups
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Wed, 17 Dec 2025 23:50:31 +0100
|
||||
|
||||
package-manager (1.8.5-1) unstable; urgency=medium
|
||||
|
||||
* * Clearer Git error handling, especially when a directory is not a Git repository.
|
||||
* More reliable repository verification with improved commit and GPG signature checks.
|
||||
* Better error messages and overall robustness when working with Git-based workflows.
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Wed, 17 Dec 2025 22:15:48 +0100
|
||||
|
||||
package-manager (1.9.0-1) unstable; urgency=medium
|
||||
|
||||
* Automated release.
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Wed, 17 Dec 2025 22:10:31 +0100
|
||||
|
||||
package-manager (1.8.4-1) unstable; urgency=medium
|
||||
|
||||
* * Made pkgmgr’s base-layer role explicit by standardizing the Docker/CI mount path to *`/opt/src/pkgmgr`*.
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Wed, 17 Dec 2025 11:20:16 +0100
|
||||
|
||||
package-manager (1.8.3-1) unstable; urgency=medium
|
||||
|
||||
* MIRRORS now supports plain URL entries, ensuring metadata-only sources like PyPI are recorded without ever being added to the Git configuration.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
Name: package-manager
|
||||
Version: 1.8.3
|
||||
Version: 1.8.7
|
||||
Release: 1%{?dist}
|
||||
Summary: Wrapper that runs Kevin's package-manager via Nix flake
|
||||
|
||||
@@ -74,6 +74,25 @@ echo ">>> package-manager removed. Nix itself was not removed."
|
||||
/usr/lib/package-manager/
|
||||
|
||||
%changelog
|
||||
* Fri Dec 19 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.8.7-1
|
||||
- * **Release version updates now correctly modify ***pyproject.toml*** files that follow PEP 621**, ensuring the ***[project].version*** field is updated as expected.
|
||||
* **Invalid or incomplete ***pyproject.toml*** files are now handled gracefully** with clear error messages instead of abrupt process termination.
|
||||
* **RPM spec files remain compatible during releases**: existing macros such as ***%{?dist}*** are preserved and no longer accidentally modified.
|
||||
|
||||
* Wed Dec 17 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.8.6-1
|
||||
- Prevent Rate Limits during GitHub Nix Setups
|
||||
|
||||
* Wed Dec 17 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.8.5-1
|
||||
- * Clearer Git error handling, especially when a directory is not a Git repository.
|
||||
* More reliable repository verification with improved commit and GPG signature checks.
|
||||
* Better error messages and overall robustness when working with Git-based workflows.
|
||||
|
||||
* Wed Dec 17 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.9.0-1
|
||||
- Automated release.
|
||||
|
||||
* Wed Dec 17 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.8.4-1
|
||||
- * Made pkgmgr’s base-layer role explicit by standardizing the Docker/CI mount path to *`/opt/src/pkgmgr`*.
|
||||
|
||||
* Tue Dec 16 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.8.3-1
|
||||
- MIRRORS now supports plain URL entries, ensuring metadata-only sources like PyPI are recorded without ever being added to the Git configuration.
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "kpmx"
|
||||
version = "1.8.3"
|
||||
version = "1.8.7"
|
||||
description = "Kevin's package-manager tool (pkgmgr)"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.9"
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
echo "[docker] Starting package-manager container"
|
||||
echo "[docker-pkgmgr] Starting package-manager container"
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Log distribution info
|
||||
@@ -9,19 +9,19 @@ echo "[docker] Starting package-manager container"
|
||||
if [[ -f /etc/os-release ]]; then
|
||||
# shellcheck disable=SC1091
|
||||
. /etc/os-release
|
||||
echo "[docker] Detected distro: ${ID:-unknown} (like: ${ID_LIKE:-})"
|
||||
echo "[docker-pkgmgr] Detected distro: ${ID:-unknown} (like: ${ID_LIKE:-})"
|
||||
fi
|
||||
|
||||
# Always use /src (mounted from host) as working directory
|
||||
echo "[docker] Using /src as working directory"
|
||||
cd /src
|
||||
# Always use /opt/src/pkgmgr (mounted from host) as working directory
|
||||
echo "[docker-pkgmgr] Using /opt/src/pkgmgr as working directory"
|
||||
cd /opt/src/pkgmgr
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# DEV mode: rebuild package-manager from the mounted /src tree
|
||||
# DEV mode: rebuild package-manager from the mounted /opt/src/pkgmgr tree
|
||||
# ---------------------------------------------------------------------------
|
||||
if [[ "${REINSTALL_PKGMGR:-0}" == "1" ]]; then
|
||||
echo "[docker] DEV mode enabled (REINSTALL_PKGMGR=1)"
|
||||
echo "[docker] Rebuilding package-manager from /src via scripts/installation/package.sh..."
|
||||
echo "[docker-pkgmgr] DEV mode enabled (REINSTALL_PKGMGR=1)"
|
||||
echo "[docker-pkgmgr] Rebuilding package-manager from /opt/src/pkgmgr via scripts/installation/package.sh..."
|
||||
bash scripts/installation/package.sh || exit 1
|
||||
fi
|
||||
|
||||
@@ -29,9 +29,9 @@ fi
|
||||
# Hand off to pkgmgr or arbitrary command
|
||||
# ---------------------------------------------------------------------------
|
||||
if [[ $# -eq 0 ]]; then
|
||||
echo "[docker] No arguments provided. Showing pkgmgr help..."
|
||||
echo "[docker-pkgmgr] No arguments provided. Showing pkgmgr help..."
|
||||
exec pkgmgr --help
|
||||
else
|
||||
echo "[docker] Executing command: $*"
|
||||
echo "[docker-pkgmgr] Executing command: $*"
|
||||
exec "$@"
|
||||
fi
|
||||
|
||||
@@ -6,7 +6,7 @@ echo "[arch/package] Building Arch package (makepkg --nodeps) in an isolated bui
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "${SCRIPT_DIR}/../../.." && pwd)"
|
||||
|
||||
# We must not build inside /src (mounted repo). Build in /tmp to avoid permission issues.
|
||||
# We must not build inside /opt/src/pkgmgr (mounted repo). Build in /tmp to avoid permission issues.
|
||||
BUILD_ROOT="/tmp/package-manager-arch-build"
|
||||
PKG_SRC_DIR="${PROJECT_ROOT}/packaging/arch"
|
||||
PKG_BUILD_DIR="${BUILD_ROOT}/packaging/arch"
|
||||
|
||||
@@ -6,12 +6,13 @@ echo ">>> Running E2E tests: $PKGMGR_DISTRO"
|
||||
echo "============================================================"
|
||||
|
||||
docker run --rm \
|
||||
-v "$(pwd):/src" \
|
||||
-v "$(pwd):/opt/src/pkgmgr" \
|
||||
-v "pkgmgr_nix_store_${PKGMGR_DISTRO}:/nix" \
|
||||
-v "pkgmgr_nix_cache_${PKGMGR_DISTRO}:/root/.cache/nix" \
|
||||
-e REINSTALL_PKGMGR=1 \
|
||||
-e TEST_PATTERN="${TEST_PATTERN}" \
|
||||
--workdir /src \
|
||||
-e NIX_CONFIG="${NIX_CONFIG}" \
|
||||
--workdir /opt/src/pkgmgr \
|
||||
"pkgmgr-${PKGMGR_DISTRO}" \
|
||||
bash -lc '
|
||||
set -euo pipefail
|
||||
@@ -40,14 +41,14 @@ docker run --rm \
|
||||
}
|
||||
|
||||
# Mark the mounted repository as safe to avoid Git ownership errors.
|
||||
# Newer Git (e.g. on Ubuntu) complains about the gitdir (/src/.git),
|
||||
# older versions about the worktree (/src). Nix turns "." into the
|
||||
# flake input "git+file:///src", which then uses Git under the hood.
|
||||
# Newer Git (e.g. on Ubuntu) complains about the gitdir (/opt/src/pkgmgr/.git),
|
||||
# older versions about the worktree (/opt/src/pkgmgr). Nix turns "." into the
|
||||
# flake input "git+file:///opt/src/pkgmgr", which then uses Git under the hood.
|
||||
if command -v git >/dev/null 2>&1; then
|
||||
# Worktree path
|
||||
git config --global --add safe.directory /src || true
|
||||
git config --global --add safe.directory /opt/src/pkgmgr || true
|
||||
# Gitdir path shown in the "dubious ownership" error
|
||||
git config --global --add safe.directory /src/.git || true
|
||||
git config --global --add safe.directory /opt/src/pkgmgr/.git || true
|
||||
# Ephemeral CI containers: allow all paths as a last resort
|
||||
git config --global --add safe.directory "*" || true
|
||||
fi
|
||||
@@ -55,6 +56,6 @@ docker run --rm \
|
||||
# Run the E2E tests inside the Nix development shell
|
||||
nix develop .#default --no-write-lock-file -c \
|
||||
python3 -m unittest discover \
|
||||
-s /src/tests/e2e \
|
||||
-s /opt/src/pkgmgr/tests/e2e \
|
||||
-p "$TEST_PATTERN"
|
||||
'
|
||||
|
||||
@@ -9,18 +9,19 @@ echo ">>> Image: ${IMAGE}"
|
||||
echo "============================================================"
|
||||
|
||||
docker run --rm \
|
||||
-v "$(pwd):/src" \
|
||||
-v "$(pwd):/opt/src/pkgmgr" \
|
||||
-v "pkgmgr_nix_store_${PKGMGR_DISTRO}:/nix" \
|
||||
-v "pkgmgr_nix_cache_${PKGMGR_DISTRO}:/root/.cache/nix" \
|
||||
--workdir /src \
|
||||
--workdir /opt/src/pkgmgr \
|
||||
-e REINSTALL_PKGMGR=1 \
|
||||
-e NIX_CONFIG="${NIX_CONFIG}" \
|
||||
"${IMAGE}" \
|
||||
bash -lc '
|
||||
set -euo pipefail
|
||||
|
||||
if command -v git >/dev/null 2>&1; then
|
||||
git config --global --add safe.directory /src || true
|
||||
git config --global --add safe.directory /src/.git || true
|
||||
git config --global --add safe.directory /opt/src/pkgmgr || true
|
||||
git config --global --add safe.directory /opt/src/pkgmgr/.git || true
|
||||
git config --global --add safe.directory "*" || true
|
||||
fi
|
||||
|
||||
@@ -38,9 +39,9 @@ docker run --rm \
|
||||
# ------------------------------------------------------------
|
||||
# Retry helper for GitHub API rate-limit (HTTP 403)
|
||||
# ------------------------------------------------------------
|
||||
if [[ -f /src/scripts/nix/lib/retry_403.sh ]]; then
|
||||
if [[ -f /opt/src/pkgmgr/scripts/nix/lib/retry_403.sh ]]; then
|
||||
# shellcheck source=./scripts/nix/lib/retry_403.sh
|
||||
source /src/scripts/nix/lib/retry_403.sh
|
||||
source /opt/src/pkgmgr/scripts/nix/lib/retry_403.sh
|
||||
elif [[ -f ./scripts/nix/lib/retry_403.sh ]]; then
|
||||
# shellcheck source=./scripts/nix/lib/retry_403.sh
|
||||
source ./scripts/nix/lib/retry_403.sh
|
||||
|
||||
@@ -17,8 +17,9 @@ echo
|
||||
# ------------------------------------------------------------
|
||||
if OUTPUT=$(docker run --rm \
|
||||
-e REINSTALL_PKGMGR=1 \
|
||||
-v "$(pwd):/src" \
|
||||
-w /src \
|
||||
-v "$(pwd):/opt/src/pkgmgr" \
|
||||
-w /opt/src/pkgmgr \
|
||||
-e NIX_CONFIG="${NIX_CONFIG}" \
|
||||
"${IMAGE}" \
|
||||
bash -lc '
|
||||
set -euo pipefail
|
||||
|
||||
@@ -6,19 +6,20 @@ echo ">>> Running INTEGRATION tests in ${PKGMGR_DISTRO} container"
|
||||
echo "============================================================"
|
||||
|
||||
docker run --rm \
|
||||
-v "$(pwd):/src" \
|
||||
-v "$(pwd):/opt/src/pkgmgr" \
|
||||
-v "pkgmgr_nix_store_${PKGMGR_DISTRO}:/nix" \
|
||||
-v "pkgmgr_nix_cache_${PKGMGR_DISTRO}:/root/.cache/nix" \
|
||||
--workdir /src \
|
||||
--workdir /opt/src/pkgmgr \
|
||||
-e REINSTALL_PKGMGR=1 \
|
||||
-e TEST_PATTERN="${TEST_PATTERN}" \
|
||||
-e NIX_CONFIG="${NIX_CONFIG}" \
|
||||
"pkgmgr-${PKGMGR_DISTRO}" \
|
||||
bash -lc '
|
||||
set -e;
|
||||
git config --global --add safe.directory /src || true;
|
||||
git config --global --add safe.directory /opt/src/pkgmgr || true;
|
||||
nix develop .#default --no-write-lock-file -c \
|
||||
python3 -m unittest discover \
|
||||
-s tests/integration \
|
||||
-t /src \
|
||||
-t /opt/src/pkgmgr \
|
||||
-p "$TEST_PATTERN";
|
||||
'
|
||||
|
||||
@@ -6,19 +6,20 @@ echo ">>> Running UNIT tests in ${PKGMGR_DISTRO} container"
|
||||
echo "============================================================"
|
||||
|
||||
docker run --rm \
|
||||
-v "$(pwd):/src" \
|
||||
-v "$(pwd):/opt/src/pkgmgr" \
|
||||
-v "pkgmgr_nix_cache_${PKGMGR_DISTRO}:/root/.cache/nix" \
|
||||
-v "pkgmgr_nix_store_${PKGMGR_DISTRO}:/nix" \
|
||||
--workdir /src \
|
||||
--workdir /opt/src/pkgmgr \
|
||||
-e REINSTALL_PKGMGR=1 \
|
||||
-e TEST_PATTERN="${TEST_PATTERN}" \
|
||||
-e NIX_CONFIG="${NIX_CONFIG}" \
|
||||
"pkgmgr-${PKGMGR_DISTRO}" \
|
||||
bash -lc '
|
||||
set -e;
|
||||
git config --global --add safe.directory /src || true;
|
||||
git config --global --add safe.directory /opt/src/pkgmgr || true;
|
||||
nix develop .#default --no-write-lock-file -c \
|
||||
python3 -m unittest discover \
|
||||
-s tests/unit \
|
||||
-t /src \
|
||||
-t /opt/src/pkgmgr \
|
||||
-p "$TEST_PATTERN";
|
||||
'
|
||||
|
||||
@@ -2,7 +2,7 @@ from __future__ import annotations
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from pkgmgr.core.git.errors import GitError
|
||||
from pkgmgr.core.git.errors import GitRunError
|
||||
from pkgmgr.core.git.queries import get_current_branch
|
||||
from pkgmgr.core.git.commands import (
|
||||
GitDeleteRemoteBranchError,
|
||||
@@ -32,7 +32,7 @@ def close_branch(
|
||||
if not name:
|
||||
try:
|
||||
name = get_current_branch(cwd=cwd)
|
||||
except GitError as exc:
|
||||
except GitRunError as exc:
|
||||
raise RuntimeError(f"Failed to detect current branch: {exc}") from exc
|
||||
|
||||
if not name:
|
||||
@@ -48,14 +48,18 @@ def close_branch(
|
||||
|
||||
# Confirmation
|
||||
if not force:
|
||||
answer = input(
|
||||
answer = (
|
||||
input(
|
||||
f"Merge branch '{name}' into '{target_base}' and delete it afterwards? (y/N): "
|
||||
).strip().lower()
|
||||
)
|
||||
.strip()
|
||||
.lower()
|
||||
)
|
||||
if answer != "y":
|
||||
print("Aborted closing branch.")
|
||||
return
|
||||
|
||||
# Execute workflow (commands raise specific GitError subclasses)
|
||||
# Execute workflow (commands raise specific GitRunError subclasses)
|
||||
fetch("origin", cwd=cwd)
|
||||
checkout(target_base, cwd=cwd)
|
||||
pull("origin", target_base, cwd=cwd)
|
||||
|
||||
@@ -2,7 +2,7 @@ from __future__ import annotations
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from pkgmgr.core.git.errors import GitError
|
||||
from pkgmgr.core.git.errors import GitRunError
|
||||
from pkgmgr.core.git.queries import get_current_branch
|
||||
from pkgmgr.core.git.commands import (
|
||||
GitDeleteRemoteBranchError,
|
||||
@@ -26,7 +26,7 @@ def drop_branch(
|
||||
if not name:
|
||||
try:
|
||||
name = get_current_branch(cwd=cwd)
|
||||
except GitError as exc:
|
||||
except GitRunError as exc:
|
||||
raise RuntimeError(f"Failed to detect current branch: {exc}") from exc
|
||||
|
||||
if not name:
|
||||
@@ -41,9 +41,13 @@ def drop_branch(
|
||||
|
||||
# Confirmation
|
||||
if not force:
|
||||
answer = input(
|
||||
answer = (
|
||||
input(
|
||||
f"Delete branch '{name}' locally and on origin? This is destructive! (y/N): "
|
||||
).strip().lower()
|
||||
)
|
||||
.strip()
|
||||
.lower()
|
||||
)
|
||||
if answer != "y":
|
||||
print("Aborted dropping branch.")
|
||||
return
|
||||
|
||||
@@ -30,7 +30,7 @@ def open_branch(
|
||||
|
||||
resolved_base = resolve_base_branch(base_branch, fallback_base, cwd=cwd)
|
||||
|
||||
# Workflow (commands raise specific GitError subclasses)
|
||||
# Workflow (commands raise specific GitBaseError subclasses)
|
||||
fetch("origin", cwd=cwd)
|
||||
checkout(resolved_base, cwd=cwd)
|
||||
pull("origin", resolved_base, cwd=cwd)
|
||||
|
||||
@@ -2,14 +2,17 @@ import yaml
|
||||
import os
|
||||
from pkgmgr.core.config.save import save_user_config
|
||||
|
||||
def interactive_add(config,USER_CONFIG_PATH:str):
|
||||
|
||||
def interactive_add(config, USER_CONFIG_PATH: str):
|
||||
"""Interactively prompt the user to add a new repository entry to the user config."""
|
||||
print("Adding a new repository configuration entry.")
|
||||
new_entry = {}
|
||||
new_entry["provider"] = input("Provider (e.g., github.com): ").strip()
|
||||
new_entry["account"] = input("Account (e.g., yourusername): ").strip()
|
||||
new_entry["repository"] = input("Repository name (e.g., mytool): ").strip()
|
||||
new_entry["command"] = input("Command (optional, leave blank to auto-detect): ").strip()
|
||||
new_entry["command"] = input(
|
||||
"Command (optional, leave blank to auto-detect): "
|
||||
).strip()
|
||||
new_entry["description"] = input("Description (optional): ").strip()
|
||||
new_entry["replacement"] = input("Replacement (optional): ").strip()
|
||||
new_entry["alias"] = input("Alias (optional): ").strip()
|
||||
@@ -25,12 +28,12 @@ def interactive_add(config,USER_CONFIG_PATH:str):
|
||||
confirm = input("Add this entry to user config? (y/N): ").strip().lower()
|
||||
if confirm == "y":
|
||||
if os.path.exists(USER_CONFIG_PATH):
|
||||
with open(USER_CONFIG_PATH, 'r') as f:
|
||||
with open(USER_CONFIG_PATH, "r") as f:
|
||||
user_config = yaml.safe_load(f) or {}
|
||||
else:
|
||||
user_config = {"repositories": []}
|
||||
user_config.setdefault("repositories", [])
|
||||
user_config["repositories"].append(new_entry)
|
||||
save_user_config(user_config,USER_CONFIG_PATH)
|
||||
save_user_config(user_config, USER_CONFIG_PATH)
|
||||
else:
|
||||
print("Entry not added.")
|
||||
@@ -107,11 +107,15 @@ def config_init(
|
||||
# Already known?
|
||||
if key in default_keys:
|
||||
skipped += 1
|
||||
print(f"[SKIP] (defaults) {provider}/{account}/{repo_name}")
|
||||
print(
|
||||
f"[SKIP] (defaults) {provider}/{account}/{repo_name}"
|
||||
)
|
||||
continue
|
||||
if key in existing_keys:
|
||||
skipped += 1
|
||||
print(f"[SKIP] (user-config) {provider}/{account}/{repo_name}")
|
||||
print(
|
||||
f"[SKIP] (user-config) {provider}/{account}/{repo_name}"
|
||||
)
|
||||
continue
|
||||
|
||||
print(f"[ADD] {provider}/{account}/{repo_name}")
|
||||
@@ -121,7 +125,9 @@ def config_init(
|
||||
if verified_commit:
|
||||
print(f"[INFO] Latest commit: {verified_commit}")
|
||||
else:
|
||||
print("[WARN] Could not read commit (not a git repo or no commits).")
|
||||
print(
|
||||
"[WARN] Could not read commit (not a git repo or no commits)."
|
||||
)
|
||||
|
||||
entry: Dict[str, Any] = {
|
||||
"provider": provider,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import yaml
|
||||
from pkgmgr.core.config.load import load_config
|
||||
|
||||
|
||||
def show_config(selected_repos, user_config_path, full_config=False):
|
||||
"""Display configuration for one or more repositories, or the entire merged config."""
|
||||
if full_config:
|
||||
@@ -8,7 +9,9 @@ def show_config(selected_repos, user_config_path, full_config=False):
|
||||
print(yaml.dump(merged, default_flow_style=False))
|
||||
else:
|
||||
for repo in selected_repos:
|
||||
identifier = f'{repo.get("provider")}/{repo.get("account")}/{repo.get("repository")}'
|
||||
identifier = (
|
||||
f"{repo.get('provider')}/{repo.get('account')}/{repo.get('repository')}"
|
||||
)
|
||||
print(f"Repository: {identifier}")
|
||||
for key, value in repo.items():
|
||||
print(f" {key}: {value}")
|
||||
|
||||
@@ -66,10 +66,7 @@ def _ensure_repo_dir(
|
||||
repo_dir = get_repo_dir(repositories_base_dir, repo)
|
||||
|
||||
if not os.path.exists(repo_dir):
|
||||
print(
|
||||
f"Repository directory '{repo_dir}' does not exist. "
|
||||
"Cloning it now..."
|
||||
)
|
||||
print(f"Repository directory '{repo_dir}' does not exist. Cloning it now...")
|
||||
clone_repos(
|
||||
[repo],
|
||||
repositories_base_dir,
|
||||
@@ -79,10 +76,7 @@ def _ensure_repo_dir(
|
||||
clone_mode,
|
||||
)
|
||||
if not os.path.exists(repo_dir):
|
||||
print(
|
||||
f"Cloning failed for repository {identifier}. "
|
||||
"Skipping installation."
|
||||
)
|
||||
print(f"Cloning failed for repository {identifier}. Skipping installation.")
|
||||
return None
|
||||
|
||||
return repo_dir
|
||||
@@ -115,7 +109,9 @@ def _verify_repo(
|
||||
|
||||
if silent:
|
||||
# Non-interactive mode: continue with a warning.
|
||||
print(f"[Warning] Continuing despite verification failure for {identifier} (--silent).")
|
||||
print(
|
||||
f"[Warning] Continuing despite verification failure for {identifier} (--silent)."
|
||||
)
|
||||
else:
|
||||
choice = input("Continue anyway? [y/N]: ").strip().lower()
|
||||
if choice != "y":
|
||||
@@ -232,12 +228,16 @@ def install_repos(
|
||||
code = exc.code if isinstance(exc.code, int) else str(exc.code)
|
||||
failures.append((identifier, f"installer failed (exit={code})"))
|
||||
if not quiet:
|
||||
print(f"[Warning] install: repository {identifier} failed (exit={code}). Continuing...")
|
||||
print(
|
||||
f"[Warning] install: repository {identifier} failed (exit={code}). Continuing..."
|
||||
)
|
||||
continue
|
||||
except Exception as exc:
|
||||
failures.append((identifier, f"unexpected error: {exc}"))
|
||||
if not quiet:
|
||||
print(f"[Warning] install: repository {identifier} hit an unexpected error: {exc}. Continuing...")
|
||||
print(
|
||||
f"[Warning] install: repository {identifier} hit an unexpected error: {exc}. Continuing..."
|
||||
)
|
||||
continue
|
||||
|
||||
if failures and emit_summary and not quiet:
|
||||
|
||||
@@ -14,6 +14,10 @@ from pkgmgr.actions.install.installers.python import PythonInstaller # noqa: F4
|
||||
from pkgmgr.actions.install.installers.makefile import MakefileInstaller # noqa: F401
|
||||
|
||||
# OS-specific installers
|
||||
from pkgmgr.actions.install.installers.os_packages.arch_pkgbuild import ArchPkgbuildInstaller # noqa: F401
|
||||
from pkgmgr.actions.install.installers.os_packages.debian_control import DebianControlInstaller # noqa: F401
|
||||
from pkgmgr.actions.install.installers.os_packages.arch_pkgbuild import (
|
||||
ArchPkgbuildInstaller as ArchPkgbuildInstaller,
|
||||
) # noqa: F401
|
||||
from pkgmgr.actions.install.installers.os_packages.debian_control import (
|
||||
DebianControlInstaller as DebianControlInstaller,
|
||||
) # noqa: F401
|
||||
from pkgmgr.actions.install.installers.os_packages.rpm_spec import RpmSpecInstaller # noqa: F401
|
||||
|
||||
@@ -41,7 +41,9 @@ class BaseInstaller(ABC):
|
||||
return caps
|
||||
|
||||
for matcher in CAPABILITY_MATCHERS:
|
||||
if matcher.applies_to_layer(self.layer) and matcher.is_provided(ctx, self.layer):
|
||||
if matcher.applies_to_layer(self.layer) and matcher.is_provided(
|
||||
ctx, self.layer
|
||||
):
|
||||
caps.add(matcher.name)
|
||||
|
||||
return caps
|
||||
|
||||
@@ -16,7 +16,9 @@ class MakefileInstaller(BaseInstaller):
|
||||
def supports(self, ctx: RepoContext) -> bool:
|
||||
if os.environ.get("PKGMGR_DISABLE_MAKEFILE_INSTALLER") == "1":
|
||||
if not ctx.quiet:
|
||||
print("[INFO] PKGMGR_DISABLE_MAKEFILE_INSTALLER=1 – skipping MakefileInstaller.")
|
||||
print(
|
||||
"[INFO] PKGMGR_DISABLE_MAKEFILE_INSTALLER=1 – skipping MakefileInstaller."
|
||||
)
|
||||
return False
|
||||
|
||||
makefile_path = os.path.join(ctx.repo_dir, self.MAKEFILE_NAME)
|
||||
@@ -46,7 +48,9 @@ class MakefileInstaller(BaseInstaller):
|
||||
return
|
||||
|
||||
if not ctx.quiet:
|
||||
print(f"[pkgmgr] Running make install for {ctx.identifier} (MakefileInstaller)")
|
||||
print(
|
||||
f"[pkgmgr] Running make install for {ctx.identifier} (MakefileInstaller)"
|
||||
)
|
||||
|
||||
run_command("make install", cwd=ctx.repo_dir, preview=ctx.preview)
|
||||
|
||||
|
||||
@@ -57,7 +57,9 @@ class NixConflictResolver:
|
||||
|
||||
# 3) Fallback: output-name based lookup (also covers nix suggesting: `nix profile remove pkgmgr`)
|
||||
if not tokens:
|
||||
tokens = self._profile.find_remove_tokens_for_output(ctx, self._runner, output)
|
||||
tokens = self._profile.find_remove_tokens_for_output(
|
||||
ctx, self._runner, output
|
||||
)
|
||||
|
||||
if tokens:
|
||||
if not quiet:
|
||||
@@ -94,7 +96,9 @@ class NixConflictResolver:
|
||||
continue
|
||||
|
||||
if not quiet:
|
||||
print("[nix] conflict detected but could not resolve profile entries to remove.")
|
||||
print(
|
||||
"[nix] conflict detected but could not resolve profile entries to remove."
|
||||
)
|
||||
return False
|
||||
|
||||
return False
|
||||
|
||||
@@ -75,7 +75,9 @@ class NixFlakeInstaller(BaseInstaller):
|
||||
# Core install path
|
||||
# ---------------------------------------------------------------------
|
||||
|
||||
def _install_only(self, ctx: "RepoContext", output: str, allow_failure: bool) -> None:
|
||||
def _install_only(
|
||||
self, ctx: "RepoContext", output: str, allow_failure: bool
|
||||
) -> None:
|
||||
install_cmd = f"nix profile install {self._installable(ctx, output)}"
|
||||
|
||||
if not ctx.quiet:
|
||||
@@ -96,7 +98,9 @@ class NixFlakeInstaller(BaseInstaller):
|
||||
output=output,
|
||||
):
|
||||
if not ctx.quiet:
|
||||
print(f"[nix] output '{output}' successfully installed after conflict cleanup.")
|
||||
print(
|
||||
f"[nix] output '{output}' successfully installed after conflict cleanup."
|
||||
)
|
||||
return
|
||||
|
||||
if not ctx.quiet:
|
||||
@@ -107,20 +111,26 @@ class NixFlakeInstaller(BaseInstaller):
|
||||
|
||||
# If indices are supported, try legacy index-upgrade path.
|
||||
if self._indices_supported is not False:
|
||||
indices = self._profile.find_installed_indices_for_output(ctx, self._runner, output)
|
||||
indices = self._profile.find_installed_indices_for_output(
|
||||
ctx, self._runner, output
|
||||
)
|
||||
|
||||
upgraded = False
|
||||
for idx in indices:
|
||||
if self._upgrade_index(ctx, idx):
|
||||
upgraded = True
|
||||
if not ctx.quiet:
|
||||
print(f"[nix] output '{output}' successfully upgraded (index {idx}).")
|
||||
print(
|
||||
f"[nix] output '{output}' successfully upgraded (index {idx})."
|
||||
)
|
||||
|
||||
if upgraded:
|
||||
return
|
||||
|
||||
if indices and not ctx.quiet:
|
||||
print(f"[nix] upgrade failed; removing indices {indices} and reinstalling '{output}'.")
|
||||
print(
|
||||
f"[nix] upgrade failed; removing indices {indices} and reinstalling '{output}'."
|
||||
)
|
||||
|
||||
for idx in indices:
|
||||
self._remove_index(ctx, idx)
|
||||
@@ -139,7 +149,9 @@ class NixFlakeInstaller(BaseInstaller):
|
||||
print(f"[nix] output '{output}' successfully re-installed.")
|
||||
return
|
||||
|
||||
print(f"[ERROR] Failed to install Nix flake output '{output}' (exit {final.returncode})")
|
||||
print(
|
||||
f"[ERROR] Failed to install Nix flake output '{output}' (exit {final.returncode})"
|
||||
)
|
||||
if not allow_failure:
|
||||
raise SystemExit(final.returncode)
|
||||
|
||||
@@ -149,7 +161,9 @@ class NixFlakeInstaller(BaseInstaller):
|
||||
# force_update path
|
||||
# ---------------------------------------------------------------------
|
||||
|
||||
def _force_upgrade_output(self, ctx: "RepoContext", output: str, allow_failure: bool) -> None:
|
||||
def _force_upgrade_output(
|
||||
self, ctx: "RepoContext", output: str, allow_failure: bool
|
||||
) -> None:
|
||||
# Prefer token path if indices unsupported (new nix)
|
||||
if self._indices_supported is False:
|
||||
self._remove_tokens_for_output(ctx, output)
|
||||
@@ -158,14 +172,18 @@ class NixFlakeInstaller(BaseInstaller):
|
||||
print(f"[nix] output '{output}' successfully upgraded.")
|
||||
return
|
||||
|
||||
indices = self._profile.find_installed_indices_for_output(ctx, self._runner, output)
|
||||
indices = self._profile.find_installed_indices_for_output(
|
||||
ctx, self._runner, output
|
||||
)
|
||||
|
||||
upgraded_any = False
|
||||
for idx in indices:
|
||||
if self._upgrade_index(ctx, idx):
|
||||
upgraded_any = True
|
||||
if not ctx.quiet:
|
||||
print(f"[nix] output '{output}' successfully upgraded (index {idx}).")
|
||||
print(
|
||||
f"[nix] output '{output}' successfully upgraded (index {idx})."
|
||||
)
|
||||
|
||||
if upgraded_any:
|
||||
if not ctx.quiet:
|
||||
@@ -173,7 +191,9 @@ class NixFlakeInstaller(BaseInstaller):
|
||||
return
|
||||
|
||||
if indices and not ctx.quiet:
|
||||
print(f"[nix] upgrade failed; removing indices {indices} and reinstalling '{output}'.")
|
||||
print(
|
||||
f"[nix] upgrade failed; removing indices {indices} and reinstalling '{output}'."
|
||||
)
|
||||
|
||||
for idx in indices:
|
||||
self._remove_index(ctx, idx)
|
||||
@@ -223,7 +243,9 @@ class NixFlakeInstaller(BaseInstaller):
|
||||
return
|
||||
|
||||
if not ctx.quiet:
|
||||
print(f"[nix] indices unsupported; removing by token(s): {', '.join(tokens)}")
|
||||
print(
|
||||
f"[nix] indices unsupported; removing by token(s): {', '.join(tokens)}"
|
||||
)
|
||||
|
||||
for t in tokens:
|
||||
self._runner.run(ctx, f"nix profile remove {t}", allow_failure=True)
|
||||
|
||||
@@ -101,7 +101,9 @@ class NixProfileInspector:
|
||||
data = self.list_json(ctx, runner)
|
||||
entries = normalize_elements(data)
|
||||
|
||||
tokens: List[str] = [out] # critical: matches nix's own suggestion for conflicts
|
||||
tokens: List[str] = [
|
||||
out
|
||||
] # critical: matches nix's own suggestion for conflicts
|
||||
|
||||
for e in entries:
|
||||
if entry_matches_output(e, out):
|
||||
|
||||
@@ -48,7 +48,9 @@ class NixProfileListReader:
|
||||
|
||||
return uniq
|
||||
|
||||
def indices_matching_store_prefixes(self, ctx: "RepoContext", prefixes: List[str]) -> List[int]:
|
||||
def indices_matching_store_prefixes(
|
||||
self, ctx: "RepoContext", prefixes: List[str]
|
||||
) -> List[int]:
|
||||
prefixes = [self._store_prefix(p) for p in prefixes if p]
|
||||
prefixes = [p for p in prefixes if p]
|
||||
if not prefixes:
|
||||
|
||||
@@ -11,6 +11,7 @@ if TYPE_CHECKING:
|
||||
from pkgmgr.actions.install.context import RepoContext
|
||||
from .runner import CommandRunner
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class RetryPolicy:
|
||||
max_attempts: int = 7
|
||||
@@ -35,13 +36,19 @@ class GitHubRateLimitRetry:
|
||||
install_cmd: str,
|
||||
) -> RunResult:
|
||||
quiet = bool(getattr(ctx, "quiet", False))
|
||||
delays = list(self._fibonacci_backoff(self._policy.base_delay_seconds, self._policy.max_attempts))
|
||||
delays = list(
|
||||
self._fibonacci_backoff(
|
||||
self._policy.base_delay_seconds, self._policy.max_attempts
|
||||
)
|
||||
)
|
||||
|
||||
last: RunResult | None = None
|
||||
|
||||
for attempt, base_delay in enumerate(delays, start=1):
|
||||
if not quiet:
|
||||
print(f"[nix] attempt {attempt}/{self._policy.max_attempts}: {install_cmd}")
|
||||
print(
|
||||
f"[nix] attempt {attempt}/{self._policy.max_attempts}: {install_cmd}"
|
||||
)
|
||||
|
||||
res = runner.run(ctx, install_cmd, allow_failure=True)
|
||||
last = res
|
||||
@@ -56,7 +63,9 @@ class GitHubRateLimitRetry:
|
||||
if attempt >= self._policy.max_attempts:
|
||||
break
|
||||
|
||||
jitter = random.randint(self._policy.jitter_seconds_min, self._policy.jitter_seconds_max)
|
||||
jitter = random.randint(
|
||||
self._policy.jitter_seconds_min, self._policy.jitter_seconds_max
|
||||
)
|
||||
wait_time = base_delay + jitter
|
||||
|
||||
if not quiet:
|
||||
@@ -67,7 +76,11 @@ class GitHubRateLimitRetry:
|
||||
|
||||
time.sleep(wait_time)
|
||||
|
||||
return last if last is not None else RunResult(returncode=1, stdout="", stderr="nix install retry failed")
|
||||
return (
|
||||
last
|
||||
if last is not None
|
||||
else RunResult(returncode=1, stdout="", stderr="nix install retry failed")
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _is_github_rate_limit_error(text: str) -> bool:
|
||||
|
||||
@@ -9,6 +9,7 @@ from .types import RunResult
|
||||
if TYPE_CHECKING:
|
||||
from pkgmgr.actions.install.context import RepoContext
|
||||
|
||||
|
||||
class CommandRunner:
|
||||
"""
|
||||
Executes commands (shell=True) inside a repository directory (if provided).
|
||||
@@ -40,7 +41,9 @@ class CommandRunner:
|
||||
raise
|
||||
return RunResult(returncode=1, stdout="", stderr=str(e))
|
||||
|
||||
res = RunResult(returncode=p.returncode, stdout=p.stdout or "", stderr=p.stderr or "")
|
||||
res = RunResult(
|
||||
returncode=p.returncode, stdout=p.stdout or "", stderr=p.stderr or ""
|
||||
)
|
||||
|
||||
if res.returncode != 0 and not quiet:
|
||||
self._print_compact_failure(res)
|
||||
|
||||
@@ -20,7 +20,9 @@ class NixConflictTextParser:
|
||||
tokens: List[str] = []
|
||||
for m in pat.finditer(text or ""):
|
||||
t = (m.group(1) or "").strip()
|
||||
if (t.startswith("'") and t.endswith("'")) or (t.startswith('"') and t.endswith('"')):
|
||||
if (t.startswith("'") and t.endswith("'")) or (
|
||||
t.startswith('"') and t.endswith('"')
|
||||
):
|
||||
t = t[1:-1]
|
||||
if t:
|
||||
tokens.append(t)
|
||||
|
||||
@@ -14,7 +14,9 @@ class PythonInstaller(BaseInstaller):
|
||||
|
||||
def supports(self, ctx: RepoContext) -> bool:
|
||||
if os.environ.get("PKGMGR_DISABLE_PYTHON_INSTALLER") == "1":
|
||||
print("[INFO] PythonInstaller disabled via PKGMGR_DISABLE_PYTHON_INSTALLER.")
|
||||
print(
|
||||
"[INFO] PythonInstaller disabled via PKGMGR_DISABLE_PYTHON_INSTALLER."
|
||||
)
|
||||
return False
|
||||
|
||||
return os.path.exists(os.path.join(ctx.repo_dir, "pyproject.toml"))
|
||||
|
||||
@@ -132,7 +132,11 @@ class InstallationPipeline:
|
||||
continue
|
||||
|
||||
if not quiet:
|
||||
if ctx.force_update and state.layer is not None and installer_layer == state.layer:
|
||||
if (
|
||||
ctx.force_update
|
||||
and state.layer is not None
|
||||
and installer_layer == state.layer
|
||||
):
|
||||
print(
|
||||
f"[pkgmgr] Running installer {installer.__class__.__name__} "
|
||||
f"for {identifier} in '{repo_dir}' (upgrade requested)..."
|
||||
|
||||
@@ -3,7 +3,7 @@ from __future__ import annotations
|
||||
import os
|
||||
from typing import Optional, Set
|
||||
|
||||
from pkgmgr.core.git.errors import GitError
|
||||
from pkgmgr.core.git.errors import GitRunError
|
||||
from pkgmgr.core.git.commands import (
|
||||
GitAddRemoteError,
|
||||
GitAddRemotePushUrlError,
|
||||
@@ -90,7 +90,7 @@ def determine_primary_remote_url(
|
||||
def has_origin_remote(repo_dir: str) -> bool:
|
||||
try:
|
||||
return "origin" in list_remotes(cwd=repo_dir)
|
||||
except GitError:
|
||||
except GitRunError:
|
||||
return False
|
||||
|
||||
|
||||
@@ -122,7 +122,7 @@ def _ensure_additional_push_urls(
|
||||
|
||||
try:
|
||||
existing = get_remote_push_urls("origin", cwd=repo_dir)
|
||||
except GitError:
|
||||
except GitRunError:
|
||||
existing = set()
|
||||
|
||||
for url in sorted(desired - existing):
|
||||
|
||||
@@ -16,6 +16,7 @@ from .types import MirrorMap, Repository
|
||||
# Helpers
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _repo_key(repo: Repository) -> Tuple[str, str, str]:
|
||||
"""
|
||||
Normalised key for identifying a repository in config files.
|
||||
@@ -47,6 +48,7 @@ def _load_user_config(path: str) -> Dict[str, object]:
|
||||
# Main merge command
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
|
||||
def merge_mirrors(
|
||||
selected_repos: List[Repository],
|
||||
repositories_base_dir: str,
|
||||
|
||||
@@ -66,7 +66,9 @@ def _setup_remote_mirrors_for_repo(
|
||||
|
||||
# Probe only git URLs (do not try ls-remote against PyPI etc.)
|
||||
# If there are no mirrors at all, probe the primary git URL.
|
||||
git_mirrors = {k: v for k, v in ctx.resolved_mirrors.items() if _is_git_remote_url(v)}
|
||||
git_mirrors = {
|
||||
k: v for k, v in ctx.resolved_mirrors.items() if _is_git_remote_url(v)
|
||||
}
|
||||
|
||||
if not git_mirrors:
|
||||
primary = determine_primary_remote_url(repo, ctx)
|
||||
|
||||
@@ -17,7 +17,7 @@ def hostport_from_git_url(url: str) -> Tuple[str, Optional[str]]:
|
||||
netloc = netloc.split("@", 1)[1]
|
||||
|
||||
if netloc.startswith("[") and "]" in netloc:
|
||||
host = netloc[1:netloc.index("]")]
|
||||
host = netloc[1 : netloc.index("]")]
|
||||
rest = netloc[netloc.index("]") + 1 :]
|
||||
port = rest[1:] if rest.startswith(":") else None
|
||||
return host.strip(), (port.strip() if port else None)
|
||||
@@ -43,7 +43,7 @@ def normalize_provider_host(host: str) -> str:
|
||||
return ""
|
||||
|
||||
if host.startswith("[") and "]" in host:
|
||||
host = host[1:host.index("]")]
|
||||
host = host[1 : host.index("]")]
|
||||
|
||||
if ":" in host and host.count(":") == 1:
|
||||
host = host.rsplit(":", 1)[0]
|
||||
|
||||
@@ -4,7 +4,16 @@ from pkgmgr.core.repository.dir import get_repo_dir
|
||||
from pkgmgr.core.command.run import run_command
|
||||
import sys
|
||||
|
||||
def exec_proxy_command(proxy_prefix: str, selected_repos, repositories_base_dir, all_repos, proxy_command: str, extra_args, preview: bool):
|
||||
|
||||
def exec_proxy_command(
|
||||
proxy_prefix: str,
|
||||
selected_repos,
|
||||
repositories_base_dir,
|
||||
all_repos,
|
||||
proxy_command: str,
|
||||
extra_args,
|
||||
preview: bool,
|
||||
):
|
||||
"""Execute a given proxy command with extra arguments for each repository."""
|
||||
error_repos = []
|
||||
max_exit_code = 0
|
||||
@@ -22,7 +31,9 @@ def exec_proxy_command(proxy_prefix: str, selected_repos, repositories_base_dir,
|
||||
try:
|
||||
run_command(full_cmd, cwd=repo_dir, preview=preview)
|
||||
except SystemExit as e:
|
||||
print(f"[ERROR] Command failed in {repo_identifier} with exit code {e.code}.")
|
||||
print(
|
||||
f"[ERROR] Command failed in {repo_identifier} with exit code {e.code}."
|
||||
)
|
||||
error_repos.append((repo_identifier, e.code))
|
||||
max_exit_code = max(max_exit_code, e.code)
|
||||
|
||||
|
||||
@@ -1,519 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
File and metadata update helpers for the release workflow.
|
||||
|
||||
Responsibilities:
|
||||
- Update pyproject.toml with the new version.
|
||||
- Update flake.nix, PKGBUILD, RPM spec files where present.
|
||||
- Prepend release entries to CHANGELOG.md.
|
||||
- Maintain distribution-specific changelog files:
|
||||
* debian/changelog
|
||||
* RPM spec %changelog section
|
||||
including maintainer metadata where applicable.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from datetime import date, datetime
|
||||
from typing import Optional, Tuple
|
||||
|
||||
from pkgmgr.core.git.queries import get_config_value
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Editor helper for interactive changelog messages
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _open_editor_for_changelog(initial_message: Optional[str] = None) -> str:
|
||||
"""
|
||||
Open $EDITOR (fallback 'nano') so the user can enter a changelog message.
|
||||
|
||||
The temporary file is pre-filled with commented instructions and an
|
||||
optional initial_message. Lines starting with '#' are ignored when the
|
||||
message is read back.
|
||||
|
||||
Returns the final message (may be empty string if user leaves it blank).
|
||||
"""
|
||||
editor = os.environ.get("EDITOR", "nano")
|
||||
|
||||
with tempfile.NamedTemporaryFile(
|
||||
mode="w+",
|
||||
delete=False,
|
||||
encoding="utf-8",
|
||||
) as tmp:
|
||||
tmp_path = tmp.name
|
||||
tmp.write(
|
||||
"# Write the changelog entry for this release.\n"
|
||||
"# Lines starting with '#' will be ignored.\n"
|
||||
"# Empty result will fall back to a generic message.\n\n"
|
||||
)
|
||||
if initial_message:
|
||||
tmp.write(initial_message.strip() + "\n")
|
||||
tmp.flush()
|
||||
|
||||
try:
|
||||
subprocess.call([editor, tmp_path])
|
||||
except FileNotFoundError:
|
||||
print(
|
||||
f"[WARN] Editor {editor!r} not found; proceeding without "
|
||||
"interactive changelog message."
|
||||
)
|
||||
|
||||
try:
|
||||
with open(tmp_path, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
finally:
|
||||
try:
|
||||
os.remove(tmp_path)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
lines = [line for line in content.splitlines() if not line.strip().startswith("#")]
|
||||
return "\n".join(lines).strip()
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# File update helpers (pyproject + extra packaging + changelog)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def update_pyproject_version(
|
||||
pyproject_path: str,
|
||||
new_version: str,
|
||||
preview: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
Update the version in pyproject.toml with the new version.
|
||||
|
||||
The function looks for a line matching:
|
||||
|
||||
version = "X.Y.Z"
|
||||
|
||||
and replaces the version part with the given new_version string.
|
||||
|
||||
If the file does not exist, it is skipped without failing the release.
|
||||
"""
|
||||
if not os.path.exists(pyproject_path):
|
||||
print(
|
||||
f"[INFO] pyproject.toml not found at: {pyproject_path}, "
|
||||
"skipping version update."
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
with open(pyproject_path, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
except OSError as exc:
|
||||
print(
|
||||
f"[WARN] Could not read pyproject.toml at {pyproject_path}: {exc}. "
|
||||
"Skipping version update."
|
||||
)
|
||||
return
|
||||
|
||||
pattern = r'^(version\s*=\s*")([^"]+)(")'
|
||||
new_content, count = re.subn(
|
||||
pattern,
|
||||
lambda m: f'{m.group(1)}{new_version}{m.group(3)}',
|
||||
content,
|
||||
flags=re.MULTILINE,
|
||||
)
|
||||
|
||||
if count == 0:
|
||||
print("[ERROR] Could not find version line in pyproject.toml")
|
||||
sys.exit(1)
|
||||
|
||||
if preview:
|
||||
print(f"[PREVIEW] Would update pyproject.toml version to {new_version}")
|
||||
return
|
||||
|
||||
with open(pyproject_path, "w", encoding="utf-8") as f:
|
||||
f.write(new_content)
|
||||
|
||||
print(f"Updated pyproject.toml version to {new_version}")
|
||||
|
||||
|
||||
def update_flake_version(
|
||||
flake_path: str,
|
||||
new_version: str,
|
||||
preview: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
Update the version in flake.nix, if present.
|
||||
"""
|
||||
if not os.path.exists(flake_path):
|
||||
print("[INFO] flake.nix not found, skipping.")
|
||||
return
|
||||
|
||||
try:
|
||||
with open(flake_path, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
except Exception as exc:
|
||||
print(f"[WARN] Could not read flake.nix: {exc}")
|
||||
return
|
||||
|
||||
pattern = r'(version\s*=\s*")([^"]+)(")'
|
||||
new_content, count = re.subn(
|
||||
pattern,
|
||||
lambda m: f'{m.group(1)}{new_version}{m.group(3)}',
|
||||
content,
|
||||
)
|
||||
|
||||
if count == 0:
|
||||
print("[WARN] No version assignment found in flake.nix, skipping.")
|
||||
return
|
||||
|
||||
if preview:
|
||||
print(f"[PREVIEW] Would update flake.nix version to {new_version}")
|
||||
return
|
||||
|
||||
with open(flake_path, "w", encoding="utf-8") as f:
|
||||
f.write(new_content)
|
||||
|
||||
print(f"Updated flake.nix version to {new_version}")
|
||||
|
||||
|
||||
def update_pkgbuild_version(
|
||||
pkgbuild_path: str,
|
||||
new_version: str,
|
||||
preview: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
Update the version in PKGBUILD, if present.
|
||||
|
||||
Expects:
|
||||
pkgver=1.2.3
|
||||
pkgrel=1
|
||||
"""
|
||||
if not os.path.exists(pkgbuild_path):
|
||||
print("[INFO] PKGBUILD not found, skipping.")
|
||||
return
|
||||
|
||||
try:
|
||||
with open(pkgbuild_path, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
except Exception as exc:
|
||||
print(f"[WARN] Could not read PKGBUILD: {exc}")
|
||||
return
|
||||
|
||||
ver_pattern = r"^(pkgver\s*=\s*)(.+)$"
|
||||
new_content, ver_count = re.subn(
|
||||
ver_pattern,
|
||||
lambda m: f"{m.group(1)}{new_version}",
|
||||
content,
|
||||
flags=re.MULTILINE,
|
||||
)
|
||||
|
||||
if ver_count == 0:
|
||||
print("[WARN] No pkgver line found in PKGBUILD.")
|
||||
new_content = content
|
||||
|
||||
rel_pattern = r"^(pkgrel\s*=\s*)(.+)$"
|
||||
new_content, rel_count = re.subn(
|
||||
rel_pattern,
|
||||
lambda m: f"{m.group(1)}1",
|
||||
new_content,
|
||||
flags=re.MULTILINE,
|
||||
)
|
||||
|
||||
if rel_count == 0:
|
||||
print("[WARN] No pkgrel line found in PKGBUILD.")
|
||||
|
||||
if preview:
|
||||
print(f"[PREVIEW] Would update PKGBUILD to pkgver={new_version}, pkgrel=1")
|
||||
return
|
||||
|
||||
with open(pkgbuild_path, "w", encoding="utf-8") as f:
|
||||
f.write(new_content)
|
||||
|
||||
print(f"Updated PKGBUILD to pkgver={new_version}, pkgrel=1")
|
||||
|
||||
|
||||
def update_spec_version(
|
||||
spec_path: str,
|
||||
new_version: str,
|
||||
preview: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
Update the version in an RPM spec file, if present.
|
||||
"""
|
||||
if not os.path.exists(spec_path):
|
||||
print("[INFO] RPM spec file not found, skipping.")
|
||||
return
|
||||
|
||||
try:
|
||||
with open(spec_path, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
except Exception as exc:
|
||||
print(f"[WARN] Could not read spec file: {exc}")
|
||||
return
|
||||
|
||||
ver_pattern = r"^(Version:\s*)(.+)$"
|
||||
new_content, ver_count = re.subn(
|
||||
ver_pattern,
|
||||
lambda m: f"{m.group(1)}{new_version}",
|
||||
content,
|
||||
flags=re.MULTILINE,
|
||||
)
|
||||
|
||||
if ver_count == 0:
|
||||
print("[WARN] No 'Version:' line found in spec file.")
|
||||
|
||||
rel_pattern = r"^(Release:\s*)(.+)$"
|
||||
|
||||
def _release_repl(m: re.Match[str]) -> str: # type: ignore[name-defined]
|
||||
rest = m.group(2).strip()
|
||||
match = re.match(r"^(\d+)(.*)$", rest)
|
||||
if match:
|
||||
suffix = match.group(2)
|
||||
else:
|
||||
suffix = ""
|
||||
return f"{m.group(1)}1{suffix}"
|
||||
|
||||
new_content, rel_count = re.subn(
|
||||
rel_pattern,
|
||||
_release_repl,
|
||||
new_content,
|
||||
flags=re.MULTILINE,
|
||||
)
|
||||
|
||||
if rel_count == 0:
|
||||
print("[WARN] No 'Release:' line found in spec file.")
|
||||
|
||||
if preview:
|
||||
print(
|
||||
"[PREVIEW] Would update spec file "
|
||||
f"{os.path.basename(spec_path)} to Version: {new_version}, Release: 1..."
|
||||
)
|
||||
return
|
||||
|
||||
with open(spec_path, "w", encoding="utf-8") as f:
|
||||
f.write(new_content)
|
||||
|
||||
print(
|
||||
f"Updated spec file {os.path.basename(spec_path)} "
|
||||
f"to Version: {new_version}, Release: 1..."
|
||||
)
|
||||
|
||||
|
||||
def update_changelog(
|
||||
changelog_path: str,
|
||||
new_version: str,
|
||||
message: Optional[str] = None,
|
||||
preview: bool = False,
|
||||
) -> str:
|
||||
"""
|
||||
Prepend a new release section to CHANGELOG.md with the new version,
|
||||
current date, and a message.
|
||||
"""
|
||||
today = date.today().isoformat()
|
||||
|
||||
if message is None:
|
||||
if preview:
|
||||
message = "Automated release."
|
||||
else:
|
||||
print(
|
||||
"\n[INFO] No release message provided, opening editor for "
|
||||
"changelog entry...\n"
|
||||
)
|
||||
editor_message = _open_editor_for_changelog()
|
||||
if not editor_message:
|
||||
message = "Automated release."
|
||||
else:
|
||||
message = editor_message
|
||||
|
||||
header = f"## [{new_version}] - {today}\n"
|
||||
header += f"\n* {message}\n\n"
|
||||
|
||||
if os.path.exists(changelog_path):
|
||||
try:
|
||||
with open(changelog_path, "r", encoding="utf-8") as f:
|
||||
changelog = f.read()
|
||||
except Exception as exc:
|
||||
print(f"[WARN] Could not read existing CHANGELOG.md: {exc}")
|
||||
changelog = ""
|
||||
else:
|
||||
changelog = ""
|
||||
|
||||
new_changelog = header + "\n" + changelog if changelog else header
|
||||
|
||||
print("\n================ CHANGELOG ENTRY ================")
|
||||
print(header.rstrip())
|
||||
print("=================================================\n")
|
||||
|
||||
if preview:
|
||||
print(f"[PREVIEW] Would prepend new entry for {new_version} to CHANGELOG.md")
|
||||
return message
|
||||
|
||||
with open(changelog_path, "w", encoding="utf-8") as f:
|
||||
f.write(new_changelog)
|
||||
|
||||
print(f"Updated CHANGELOG.md with version {new_version}")
|
||||
|
||||
return message
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Debian changelog helpers (with Git config fallback for maintainer)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _get_debian_author() -> Tuple[str, str]:
|
||||
"""
|
||||
Determine the maintainer name/email for debian/changelog entries.
|
||||
"""
|
||||
name = os.environ.get("DEBFULLNAME")
|
||||
email = os.environ.get("DEBEMAIL")
|
||||
|
||||
if not name:
|
||||
name = os.environ.get("GIT_AUTHOR_NAME")
|
||||
if not email:
|
||||
email = os.environ.get("GIT_AUTHOR_EMAIL")
|
||||
|
||||
if not name:
|
||||
name = get_config_value("user.name")
|
||||
if not email:
|
||||
email = get_config_value("user.email")
|
||||
|
||||
if not name:
|
||||
name = "Unknown Maintainer"
|
||||
if not email:
|
||||
email = "unknown@example.com"
|
||||
|
||||
return name, email
|
||||
|
||||
|
||||
def update_debian_changelog(
|
||||
debian_changelog_path: str,
|
||||
package_name: str,
|
||||
new_version: str,
|
||||
message: Optional[str] = None,
|
||||
preview: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
Prepend a new entry to debian/changelog, if it exists.
|
||||
"""
|
||||
if not os.path.exists(debian_changelog_path):
|
||||
print("[INFO] debian/changelog not found, skipping.")
|
||||
return
|
||||
|
||||
debian_version = f"{new_version}-1"
|
||||
now = datetime.now().astimezone()
|
||||
date_str = now.strftime("%a, %d %b %Y %H:%M:%S %z")
|
||||
|
||||
author_name, author_email = _get_debian_author()
|
||||
|
||||
first_line = f"{package_name} ({debian_version}) unstable; urgency=medium"
|
||||
body_line = message.strip() if message else f"Automated release {new_version}."
|
||||
stanza = (
|
||||
f"{first_line}\n\n"
|
||||
f" * {body_line}\n\n"
|
||||
f" -- {author_name} <{author_email}> {date_str}\n\n"
|
||||
)
|
||||
|
||||
if preview:
|
||||
print(
|
||||
"[PREVIEW] Would prepend the following stanza to debian/changelog:\n"
|
||||
f"{stanza}"
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
with open(debian_changelog_path, "r", encoding="utf-8") as f:
|
||||
existing = f.read()
|
||||
except Exception as exc:
|
||||
print(f"[WARN] Could not read debian/changelog: {exc}")
|
||||
existing = ""
|
||||
|
||||
new_content = stanza + existing
|
||||
|
||||
with open(debian_changelog_path, "w", encoding="utf-8") as f:
|
||||
f.write(new_content)
|
||||
|
||||
print(f"Updated debian/changelog with version {debian_version}")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Fedora / RPM spec %changelog helper
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def update_spec_changelog(
|
||||
spec_path: str,
|
||||
package_name: str,
|
||||
new_version: str,
|
||||
message: Optional[str] = None,
|
||||
preview: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
Prepend a new entry to the %changelog section of an RPM spec file,
|
||||
if present.
|
||||
|
||||
Typical RPM-style entry:
|
||||
|
||||
* Tue Dec 09 2025 John Doe <john@example.com> - 0.5.1-1
|
||||
- Your changelog message
|
||||
"""
|
||||
if not os.path.exists(spec_path):
|
||||
print("[INFO] RPM spec file not found, skipping spec changelog update.")
|
||||
return
|
||||
|
||||
try:
|
||||
with open(spec_path, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
except Exception as exc:
|
||||
print(f"[WARN] Could not read spec file for changelog update: {exc}")
|
||||
return
|
||||
|
||||
debian_version = f"{new_version}-1"
|
||||
now = datetime.now().astimezone()
|
||||
date_str = now.strftime("%a %b %d %Y")
|
||||
|
||||
# Reuse Debian maintainer discovery for author name/email.
|
||||
author_name, author_email = _get_debian_author()
|
||||
|
||||
body_line = message.strip() if message else f"Automated release {new_version}."
|
||||
|
||||
stanza = (
|
||||
f"* {date_str} {author_name} <{author_email}> - {debian_version}\n"
|
||||
f"- {body_line}\n\n"
|
||||
)
|
||||
|
||||
marker = "%changelog"
|
||||
idx = content.find(marker)
|
||||
|
||||
if idx == -1:
|
||||
# No %changelog section yet: append one at the end.
|
||||
new_content = content.rstrip() + "\n\n%changelog\n" + stanza
|
||||
else:
|
||||
# Insert stanza right after the %changelog line.
|
||||
before = content[: idx + len(marker)]
|
||||
after = content[idx + len(marker) :]
|
||||
new_content = before + "\n" + stanza + after.lstrip("\n")
|
||||
|
||||
if preview:
|
||||
print(
|
||||
"[PREVIEW] Would update RPM %changelog section with the following "
|
||||
"stanza:\n"
|
||||
f"{stanza}"
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
with open(spec_path, "w", encoding="utf-8") as f:
|
||||
f.write(new_content)
|
||||
except Exception as exc:
|
||||
print(f"[WARN] Failed to write updated spec changelog section: {exc}")
|
||||
return
|
||||
|
||||
print(
|
||||
f"Updated RPM %changelog section in {os.path.basename(spec_path)} "
|
||||
f"for {package_name} {debian_version}"
|
||||
)
|
||||
35
src/pkgmgr/actions/release/files/__init__.py
Normal file
35
src/pkgmgr/actions/release/files/__init__.py
Normal file
@@ -0,0 +1,35 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Backwards-compatible facade for the release file update helpers.
|
||||
|
||||
Implementations live in this package:
|
||||
pkgmgr.actions.release.files.*
|
||||
|
||||
Keep this package stable so existing imports continue to work, e.g.:
|
||||
from pkgmgr.actions.release.files import update_pyproject_version
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from .editor import _open_editor_for_changelog
|
||||
from .pyproject import update_pyproject_version
|
||||
from .flake import update_flake_version
|
||||
from .pkgbuild import update_pkgbuild_version
|
||||
from .rpm_spec import update_spec_version
|
||||
from .changelog_md import update_changelog
|
||||
from .debian import _get_debian_author, update_debian_changelog
|
||||
from .rpm_changelog import update_spec_changelog
|
||||
|
||||
__all__ = [
|
||||
"_open_editor_for_changelog",
|
||||
"update_pyproject_version",
|
||||
"update_flake_version",
|
||||
"update_pkgbuild_version",
|
||||
"update_spec_version",
|
||||
"update_changelog",
|
||||
"_get_debian_author",
|
||||
"update_debian_changelog",
|
||||
"update_spec_changelog",
|
||||
]
|
||||
62
src/pkgmgr/actions/release/files/changelog_md.py
Normal file
62
src/pkgmgr/actions/release/files/changelog_md.py
Normal file
@@ -0,0 +1,62 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from datetime import date
|
||||
from typing import Optional
|
||||
|
||||
from .editor import _open_editor_for_changelog
|
||||
|
||||
|
||||
def update_changelog(
|
||||
changelog_path: str,
|
||||
new_version: str,
|
||||
message: Optional[str] = None,
|
||||
preview: bool = False,
|
||||
) -> str:
|
||||
"""
|
||||
Prepend a new release section to CHANGELOG.md with the new version,
|
||||
current date, and a message.
|
||||
"""
|
||||
today = date.today().isoformat()
|
||||
|
||||
if message is None:
|
||||
if preview:
|
||||
message = "Automated release."
|
||||
else:
|
||||
print(
|
||||
"\n[INFO] No release message provided, opening editor for changelog entry...\n"
|
||||
)
|
||||
editor_message = _open_editor_for_changelog()
|
||||
if not editor_message:
|
||||
message = "Automated release."
|
||||
else:
|
||||
message = editor_message
|
||||
|
||||
header = f"## [{new_version}] - {today}\n"
|
||||
header += f"\n* {message}\n\n"
|
||||
|
||||
if os.path.exists(changelog_path):
|
||||
try:
|
||||
with open(changelog_path, "r", encoding="utf-8") as f:
|
||||
changelog = f.read()
|
||||
except Exception as exc:
|
||||
print(f"[WARN] Could not read existing CHANGELOG.md: {exc}")
|
||||
changelog = ""
|
||||
else:
|
||||
changelog = ""
|
||||
|
||||
new_changelog = header + "\n" + changelog if changelog else header
|
||||
|
||||
print("\n================ CHANGELOG ENTRY ================")
|
||||
print(header.rstrip())
|
||||
print("=================================================\n")
|
||||
|
||||
if preview:
|
||||
print(f"[PREVIEW] Would prepend new entry for {new_version} to CHANGELOG.md")
|
||||
return message
|
||||
|
||||
with open(changelog_path, "w", encoding="utf-8") as f:
|
||||
f.write(new_changelog)
|
||||
|
||||
print(f"Updated CHANGELOG.md with version {new_version}")
|
||||
return message
|
||||
74
src/pkgmgr/actions/release/files/debian.py
Normal file
74
src/pkgmgr/actions/release/files/debian.py
Normal file
@@ -0,0 +1,74 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from datetime import datetime
|
||||
from typing import Optional, Tuple
|
||||
|
||||
from pkgmgr.core.git.queries import get_config_value
|
||||
|
||||
|
||||
def _get_debian_author() -> Tuple[str, str]:
|
||||
name = os.environ.get("DEBFULLNAME")
|
||||
email = os.environ.get("DEBEMAIL")
|
||||
|
||||
if not name:
|
||||
name = os.environ.get("GIT_AUTHOR_NAME")
|
||||
if not email:
|
||||
email = os.environ.get("GIT_AUTHOR_EMAIL")
|
||||
|
||||
if not name:
|
||||
name = get_config_value("user.name")
|
||||
if not email:
|
||||
email = get_config_value("user.email")
|
||||
|
||||
if not name:
|
||||
name = "Unknown Maintainer"
|
||||
if not email:
|
||||
email = "unknown@example.com"
|
||||
|
||||
return name, email
|
||||
|
||||
|
||||
def update_debian_changelog(
|
||||
debian_changelog_path: str,
|
||||
package_name: str,
|
||||
new_version: str,
|
||||
message: Optional[str] = None,
|
||||
preview: bool = False,
|
||||
) -> None:
|
||||
if not os.path.exists(debian_changelog_path):
|
||||
print("[INFO] debian/changelog not found, skipping.")
|
||||
return
|
||||
|
||||
debian_version = f"{new_version}-1"
|
||||
now = datetime.now().astimezone()
|
||||
date_str = now.strftime("%a, %d %b %Y %H:%M:%S %z")
|
||||
|
||||
author_name, author_email = _get_debian_author()
|
||||
|
||||
first_line = f"{package_name} ({debian_version}) unstable; urgency=medium"
|
||||
body_line = message.strip() if message else f"Automated release {new_version}."
|
||||
stanza = (
|
||||
f"{first_line}\n\n"
|
||||
f" * {body_line}\n\n"
|
||||
f" -- {author_name} <{author_email}> {date_str}\n\n"
|
||||
)
|
||||
|
||||
if preview:
|
||||
print(
|
||||
"[PREVIEW] Would prepend the following stanza to debian/changelog:\n"
|
||||
f"{stanza}"
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
with open(debian_changelog_path, "r", encoding="utf-8") as f:
|
||||
existing = f.read()
|
||||
except Exception as exc:
|
||||
print(f"[WARN] Could not read debian/changelog: {exc}")
|
||||
existing = ""
|
||||
|
||||
with open(debian_changelog_path, "w", encoding="utf-8") as f:
|
||||
f.write(stanza + existing)
|
||||
|
||||
print(f"Updated debian/changelog with version {debian_version}")
|
||||
45
src/pkgmgr/actions/release/files/editor.py
Normal file
45
src/pkgmgr/actions/release/files/editor.py
Normal file
@@ -0,0 +1,45 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import tempfile
|
||||
from typing import Optional
|
||||
|
||||
|
||||
def _open_editor_for_changelog(initial_message: Optional[str] = None) -> str:
|
||||
editor = os.environ.get("EDITOR", "nano")
|
||||
|
||||
with tempfile.NamedTemporaryFile(
|
||||
mode="w+",
|
||||
delete=False,
|
||||
encoding="utf-8",
|
||||
) as tmp:
|
||||
tmp_path = tmp.name
|
||||
tmp.write(
|
||||
"# Write the changelog entry for this release.\n"
|
||||
"# Lines starting with '#' will be ignored.\n"
|
||||
"# Empty result will fall back to a generic message.\n\n"
|
||||
)
|
||||
if initial_message:
|
||||
tmp.write(initial_message.strip() + "\n")
|
||||
tmp.flush()
|
||||
|
||||
try:
|
||||
subprocess.call([editor, tmp_path])
|
||||
except FileNotFoundError:
|
||||
print(
|
||||
f"[WARN] Editor {editor!r} not found; proceeding without "
|
||||
"interactive changelog message."
|
||||
)
|
||||
|
||||
try:
|
||||
with open(tmp_path, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
finally:
|
||||
try:
|
||||
os.remove(tmp_path)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
lines = [line for line in content.splitlines() if not line.strip().startswith("#")]
|
||||
return "\n".join(lines).strip()
|
||||
39
src/pkgmgr/actions/release/files/flake.py
Normal file
39
src/pkgmgr/actions/release/files/flake.py
Normal file
@@ -0,0 +1,39 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
|
||||
def update_flake_version(
|
||||
flake_path: str, new_version: str, preview: bool = False
|
||||
) -> None:
|
||||
if not os.path.exists(flake_path):
|
||||
print("[INFO] flake.nix not found, skipping.")
|
||||
return
|
||||
|
||||
try:
|
||||
with open(flake_path, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
except Exception as exc:
|
||||
print(f"[WARN] Could not read flake.nix: {exc}")
|
||||
return
|
||||
|
||||
pattern = r'(version\s*=\s*")([^"]+)(")'
|
||||
new_content, count = re.subn(
|
||||
pattern,
|
||||
lambda m: f"{m.group(1)}{new_version}{m.group(3)}",
|
||||
content,
|
||||
)
|
||||
|
||||
if count == 0:
|
||||
print("[WARN] No version found in flake.nix.")
|
||||
return
|
||||
|
||||
if preview:
|
||||
print(f"[PREVIEW] Would update flake.nix version to {new_version}")
|
||||
return
|
||||
|
||||
with open(flake_path, "w", encoding="utf-8") as f:
|
||||
f.write(new_content)
|
||||
|
||||
print(f"Updated flake.nix version to {new_version}")
|
||||
41
src/pkgmgr/actions/release/files/pkgbuild.py
Normal file
41
src/pkgmgr/actions/release/files/pkgbuild.py
Normal file
@@ -0,0 +1,41 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
|
||||
def update_pkgbuild_version(
|
||||
pkgbuild_path: str, new_version: str, preview: bool = False
|
||||
) -> None:
|
||||
if not os.path.exists(pkgbuild_path):
|
||||
print("[INFO] PKGBUILD not found, skipping.")
|
||||
return
|
||||
|
||||
try:
|
||||
with open(pkgbuild_path, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
except Exception as exc:
|
||||
print(f"[WARN] Could not read PKGBUILD: {exc}")
|
||||
return
|
||||
|
||||
content, _ = re.subn(
|
||||
r"^(pkgver\s*=\s*)(.+)$",
|
||||
lambda m: f"{m.group(1)}{new_version}",
|
||||
content,
|
||||
flags=re.MULTILINE,
|
||||
)
|
||||
content, _ = re.subn(
|
||||
r"^(pkgrel\s*=\s*)(.+)$",
|
||||
lambda m: f"{m.group(1)}1",
|
||||
content,
|
||||
flags=re.MULTILINE,
|
||||
)
|
||||
|
||||
if preview:
|
||||
print(f"[PREVIEW] Would update PKGBUILD to pkgver={new_version}, pkgrel=1")
|
||||
return
|
||||
|
||||
with open(pkgbuild_path, "w", encoding="utf-8") as f:
|
||||
f.write(content)
|
||||
|
||||
print(f"Updated PKGBUILD to pkgver={new_version}, pkgrel=1")
|
||||
45
src/pkgmgr/actions/release/files/pyproject.py
Normal file
45
src/pkgmgr/actions/release/files/pyproject.py
Normal file
@@ -0,0 +1,45 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
|
||||
def update_pyproject_version(
|
||||
pyproject_path: str, new_version: str, preview: bool = False
|
||||
) -> None:
|
||||
if not os.path.exists(pyproject_path):
|
||||
print(f"[INFO] pyproject.toml not found at: {pyproject_path}, skipping.")
|
||||
return
|
||||
|
||||
try:
|
||||
with open(pyproject_path, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
except OSError as exc:
|
||||
print(f"[WARN] Could not read pyproject.toml: {exc}")
|
||||
return
|
||||
|
||||
m = re.search(r"(?ms)^\s*\[project\]\s*$.*?(?=^\s*\[|\Z)", content)
|
||||
if not m:
|
||||
raise RuntimeError("Missing [project] section in pyproject.toml")
|
||||
|
||||
project_block = m.group(0)
|
||||
ver_pat = r'(?m)^(\s*version\s*=\s*")([^"]+)(")\s*$'
|
||||
|
||||
new_block, count = re.subn(
|
||||
ver_pat,
|
||||
lambda mm: f"{mm.group(1)}{new_version}{mm.group(3)}",
|
||||
project_block,
|
||||
)
|
||||
if count == 0:
|
||||
raise RuntimeError("Missing version key in [project] section")
|
||||
|
||||
new_content = content[: m.start()] + new_block + content[m.end() :]
|
||||
|
||||
if preview:
|
||||
print(f"[PREVIEW] Would update pyproject.toml version to {new_version}")
|
||||
return
|
||||
|
||||
with open(pyproject_path, "w", encoding="utf-8") as f:
|
||||
f.write(new_content)
|
||||
|
||||
print(f"Updated pyproject.toml version to {new_version}")
|
||||
67
src/pkgmgr/actions/release/files/rpm_changelog.py
Normal file
67
src/pkgmgr/actions/release/files/rpm_changelog.py
Normal file
@@ -0,0 +1,67 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from .debian import _get_debian_author
|
||||
|
||||
|
||||
def update_spec_changelog(
|
||||
spec_path: str,
|
||||
package_name: str,
|
||||
new_version: str,
|
||||
message: Optional[str] = None,
|
||||
preview: bool = False,
|
||||
) -> None:
|
||||
if not os.path.exists(spec_path):
|
||||
print("[INFO] RPM spec file not found, skipping spec changelog update.")
|
||||
return
|
||||
|
||||
try:
|
||||
with open(spec_path, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
except Exception as exc:
|
||||
print(f"[WARN] Could not read spec file for changelog update: {exc}")
|
||||
return
|
||||
|
||||
debian_version = f"{new_version}-1"
|
||||
now = datetime.now().astimezone()
|
||||
date_str = now.strftime("%a %b %d %Y")
|
||||
|
||||
author_name, author_email = _get_debian_author()
|
||||
body_line = message.strip() if message else f"Automated release {new_version}."
|
||||
|
||||
stanza = (
|
||||
f"* {date_str} {author_name} <{author_email}> - {debian_version}\n"
|
||||
f"- {body_line}\n\n"
|
||||
)
|
||||
|
||||
marker = "%changelog"
|
||||
idx = content.find(marker)
|
||||
|
||||
if idx == -1:
|
||||
new_content = content.rstrip() + "\n\n%changelog\n" + stanza
|
||||
else:
|
||||
before = content[: idx + len(marker)]
|
||||
after = content[idx + len(marker) :]
|
||||
new_content = before + "\n" + stanza + after.lstrip("\n")
|
||||
|
||||
if preview:
|
||||
print(
|
||||
"[PREVIEW] Would update RPM %changelog section with the following stanza:\n"
|
||||
f"{stanza}"
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
with open(spec_path, "w", encoding="utf-8") as f:
|
||||
f.write(new_content)
|
||||
except Exception as exc:
|
||||
print(f"[WARN] Failed to write updated spec changelog section: {exc}")
|
||||
return
|
||||
|
||||
print(
|
||||
f"Updated RPM %changelog section in {os.path.basename(spec_path)} "
|
||||
f"for {package_name} {debian_version}"
|
||||
)
|
||||
66
src/pkgmgr/actions/release/files/rpm_spec.py
Normal file
66
src/pkgmgr/actions/release/files/rpm_spec.py
Normal file
@@ -0,0 +1,66 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
|
||||
def update_spec_version(
|
||||
spec_path: str, new_version: str, preview: bool = False
|
||||
) -> None:
|
||||
"""
|
||||
Update the version in an RPM spec file, if present.
|
||||
"""
|
||||
if not os.path.exists(spec_path):
|
||||
print("[INFO] RPM spec file not found, skipping.")
|
||||
return
|
||||
|
||||
try:
|
||||
with open(spec_path, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
except Exception as exc:
|
||||
print(f"[WARN] Could not read spec file: {exc}")
|
||||
return
|
||||
|
||||
ver_pattern = r"^(Version:\s*)(.+)$"
|
||||
new_content, ver_count = re.subn(
|
||||
ver_pattern,
|
||||
lambda m: f"{m.group(1)}{new_version}",
|
||||
content,
|
||||
flags=re.MULTILINE,
|
||||
)
|
||||
|
||||
if ver_count == 0:
|
||||
print("[WARN] No 'Version:' line found in spec file.")
|
||||
|
||||
rel_pattern = r"^(Release:\s*)(.+)$"
|
||||
|
||||
def _release_repl(m: re.Match[str]) -> str:
|
||||
rest = m.group(2).strip()
|
||||
match = re.match(r"^(\d+)(.*)$", rest)
|
||||
suffix = match.group(2) if match else ""
|
||||
return f"{m.group(1)}1{suffix}"
|
||||
|
||||
new_content, rel_count = re.subn(
|
||||
rel_pattern,
|
||||
_release_repl,
|
||||
new_content,
|
||||
flags=re.MULTILINE,
|
||||
)
|
||||
|
||||
if rel_count == 0:
|
||||
print("[WARN] No 'Release:' line found in spec file.")
|
||||
|
||||
if preview:
|
||||
print(
|
||||
"[PREVIEW] Would update spec file "
|
||||
f"{os.path.basename(spec_path)} to Version: {new_version}, Release: 1..."
|
||||
)
|
||||
return
|
||||
|
||||
with open(spec_path, "w", encoding="utf-8") as f:
|
||||
f.write(new_content)
|
||||
|
||||
print(
|
||||
f"Updated spec file {os.path.basename(spec_path)} "
|
||||
f"to Version: {new_version}, Release: 1..."
|
||||
)
|
||||
@@ -80,7 +80,9 @@ def is_highest_version_tag(tag: str) -> bool:
|
||||
return True
|
||||
|
||||
latest = max(parsed_all)
|
||||
print(f"[INFO] Latest tag (parsed): v{'.'.join(map(str, latest))}, Current tag: {tag}")
|
||||
print(
|
||||
f"[INFO] Latest tag (parsed): v{'.'.join(map(str, latest))}, Current tag: {tag}"
|
||||
)
|
||||
return parsed_current >= latest
|
||||
|
||||
|
||||
@@ -93,7 +95,9 @@ def update_latest_tag(new_tag: str, *, preview: bool = False) -> None:
|
||||
- 'latest' is forced (floating tag), therefore the push uses --force.
|
||||
"""
|
||||
target_ref = f"{new_tag}^{{}}"
|
||||
print(f"[INFO] Updating 'latest' tag to point at {new_tag} (commit {target_ref})...")
|
||||
print(
|
||||
f"[INFO] Updating 'latest' tag to point at {new_tag} (commit {target_ref})..."
|
||||
)
|
||||
|
||||
tag_force_annotated(
|
||||
name="latest",
|
||||
|
||||
@@ -5,7 +5,7 @@ import sys
|
||||
from typing import Optional
|
||||
|
||||
from pkgmgr.actions.branch import close_branch
|
||||
from pkgmgr.core.git import GitError
|
||||
from pkgmgr.core.git import GitRunError
|
||||
from pkgmgr.core.git.commands import add, commit, push, tag_annotated
|
||||
from pkgmgr.core.git.queries import get_current_branch
|
||||
from pkgmgr.core.repository.paths import resolve_repo_paths
|
||||
@@ -40,7 +40,7 @@ def _release_impl(
|
||||
# Determine current branch early
|
||||
try:
|
||||
branch = get_current_branch() or "main"
|
||||
except GitError:
|
||||
except GitRunError:
|
||||
branch = "main"
|
||||
print(f"Releasing on branch: {branch}")
|
||||
|
||||
@@ -76,7 +76,9 @@ def _release_impl(
|
||||
if paths.arch_pkgbuild:
|
||||
update_pkgbuild_version(paths.arch_pkgbuild, new_ver_str, preview=preview)
|
||||
else:
|
||||
print("[INFO] No PKGBUILD found (packaging/arch/PKGBUILD or PKGBUILD). Skipping.")
|
||||
print(
|
||||
"[INFO] No PKGBUILD found (packaging/arch/PKGBUILD or PKGBUILD). Skipping."
|
||||
)
|
||||
|
||||
if paths.rpm_spec:
|
||||
update_spec_version(paths.rpm_spec, new_ver_str, preview=preview)
|
||||
@@ -123,7 +125,9 @@ def _release_impl(
|
||||
paths.rpm_spec,
|
||||
paths.debian_changelog,
|
||||
]
|
||||
existing_files = [p for p in files_to_add if isinstance(p, str) and p and os.path.exists(p)]
|
||||
existing_files = [
|
||||
p for p in files_to_add if isinstance(p, str) and p and os.path.exists(p)
|
||||
]
|
||||
|
||||
if preview:
|
||||
add(existing_files, preview=True)
|
||||
@@ -135,13 +139,17 @@ def _release_impl(
|
||||
if is_highest_version_tag(new_tag):
|
||||
update_latest_tag(new_tag, preview=True)
|
||||
else:
|
||||
print(f"[PREVIEW] Skipping 'latest' update (tag {new_tag} is not the highest).")
|
||||
print(
|
||||
f"[PREVIEW] Skipping 'latest' update (tag {new_tag} is not the highest)."
|
||||
)
|
||||
|
||||
if close and branch not in ("main", "master"):
|
||||
if force:
|
||||
print(f"[PREVIEW] Would delete branch {branch} (forced).")
|
||||
else:
|
||||
print(f"[PREVIEW] Would ask whether to delete branch {branch} after release.")
|
||||
print(
|
||||
f"[PREVIEW] Would ask whether to delete branch {branch} after release."
|
||||
)
|
||||
return
|
||||
|
||||
add(existing_files, preview=False)
|
||||
@@ -157,8 +165,10 @@ def _release_impl(
|
||||
if is_highest_version_tag(new_tag):
|
||||
update_latest_tag(new_tag, preview=False)
|
||||
else:
|
||||
print(f"[INFO] Skipping 'latest' update (tag {new_tag} is not the highest).")
|
||||
except GitError as exc:
|
||||
print(
|
||||
f"[INFO] Skipping 'latest' update (tag {new_tag} is not the highest)."
|
||||
)
|
||||
except GitRunError as exc:
|
||||
print(f"[WARN] Failed to update floating 'latest' tag for {new_tag}: {exc}")
|
||||
print("'latest' tag was not updated.")
|
||||
|
||||
@@ -166,7 +176,9 @@ def _release_impl(
|
||||
|
||||
if close:
|
||||
if branch in ("main", "master"):
|
||||
print(f"[INFO] close=True but current branch is {branch}; skipping branch deletion.")
|
||||
print(
|
||||
f"[INFO] close=True but current branch is {branch}; skipping branch deletion."
|
||||
)
|
||||
return
|
||||
|
||||
if not should_delete_branch(force=force):
|
||||
|
||||
@@ -55,7 +55,9 @@ def clone_repos(
|
||||
|
||||
clone_url = _build_clone_url(repo, clone_mode)
|
||||
if not clone_url:
|
||||
print(f"[WARNING] Cannot build clone URL for '{repo_identifier}'. Skipping.")
|
||||
print(
|
||||
f"[WARNING] Cannot build clone URL for '{repo_identifier}'. Skipping."
|
||||
)
|
||||
continue
|
||||
|
||||
shallow = clone_mode == "shallow"
|
||||
@@ -84,7 +86,11 @@ def clone_repos(
|
||||
continue
|
||||
|
||||
print(f"[WARNING] SSH clone failed for '{repo_identifier}': {exc}")
|
||||
choice = input("Do you want to attempt HTTPS clone instead? (y/N): ").strip().lower()
|
||||
choice = (
|
||||
input("Do you want to attempt HTTPS clone instead? (y/N): ")
|
||||
.strip()
|
||||
.lower()
|
||||
)
|
||||
if choice != "y":
|
||||
print(f"[INFO] HTTPS clone not attempted for '{repo_identifier}'.")
|
||||
continue
|
||||
|
||||
@@ -63,6 +63,4 @@ def _strip_git_suffix(name: str) -> str:
|
||||
|
||||
def _ensure_valid_repo_name(name: str) -> None:
|
||||
if not _NAME_RE.fullmatch(name):
|
||||
raise ValueError(
|
||||
"Repository name must match: lowercase a-z, 0-9, '_' and '-'."
|
||||
)
|
||||
raise ValueError("Repository name must match: lowercase a-z, 0-9, '_' and '-'.")
|
||||
|
||||
@@ -66,9 +66,7 @@ class TemplateRenderer:
|
||||
for root, _, files in os.walk(self.templates_dir):
|
||||
for fn in files:
|
||||
if fn.endswith(".j2"):
|
||||
rel = os.path.relpath(
|
||||
os.path.join(root, fn), self.templates_dir
|
||||
)
|
||||
rel = os.path.relpath(os.path.join(root, fn), self.templates_dir)
|
||||
print(f"[Preview] Would render template: {rel} -> {rel[:-3]}")
|
||||
|
||||
@staticmethod
|
||||
|
||||
@@ -24,9 +24,13 @@ def deinstall_repos(
|
||||
|
||||
# Remove alias link/file (interactive)
|
||||
if os.path.exists(alias_path):
|
||||
confirm = input(
|
||||
confirm = (
|
||||
input(
|
||||
f"Are you sure you want to delete link '{alias_path}' for {repo_identifier}? [y/N]: "
|
||||
).strip().lower()
|
||||
)
|
||||
.strip()
|
||||
.lower()
|
||||
)
|
||||
if confirm == "y":
|
||||
if preview:
|
||||
print(f"[Preview] Would remove link '{alias_path}'.")
|
||||
|
||||
@@ -3,19 +3,30 @@ import os
|
||||
from pkgmgr.core.repository.identifier import get_repo_identifier
|
||||
from pkgmgr.core.repository.dir import get_repo_dir
|
||||
|
||||
|
||||
def delete_repos(selected_repos, repositories_base_dir, all_repos, preview=False):
|
||||
for repo in selected_repos:
|
||||
repo_identifier = get_repo_identifier(repo, all_repos)
|
||||
repo_dir = get_repo_dir(repositories_base_dir, repo)
|
||||
if os.path.exists(repo_dir):
|
||||
confirm = input(f"Are you sure you want to delete directory '{repo_dir}' for {repo_identifier}? [y/N]: ").strip().lower()
|
||||
confirm = (
|
||||
input(
|
||||
f"Are you sure you want to delete directory '{repo_dir}' for {repo_identifier}? [y/N]: "
|
||||
)
|
||||
.strip()
|
||||
.lower()
|
||||
)
|
||||
if confirm == "y":
|
||||
if preview:
|
||||
print(f"[Preview] Would delete directory '{repo_dir}' for {repo_identifier}.")
|
||||
print(
|
||||
f"[Preview] Would delete directory '{repo_dir}' for {repo_identifier}."
|
||||
)
|
||||
else:
|
||||
try:
|
||||
shutil.rmtree(repo_dir)
|
||||
print(f"Deleted repository directory '{repo_dir}' for {repo_identifier}.")
|
||||
print(
|
||||
f"Deleted repository directory '{repo_dir}' for {repo_identifier}."
|
||||
)
|
||||
except Exception as e:
|
||||
print(f"Error deleting '{repo_dir}' for {repo_identifier}: {e}")
|
||||
else:
|
||||
|
||||
@@ -233,9 +233,7 @@ def list_repositories(
|
||||
categories.append(str(repo["category"]))
|
||||
|
||||
yaml_tags: List[str] = list(map(str, repo.get("tags", [])))
|
||||
display_tags: List[str] = sorted(
|
||||
set(yaml_tags + list(map(str, extra_tags)))
|
||||
)
|
||||
display_tags: List[str] = sorted(set(yaml_tags + list(map(str, extra_tags))))
|
||||
|
||||
rows.append(
|
||||
{
|
||||
@@ -288,13 +286,7 @@ def list_repositories(
|
||||
status_padded = status.ljust(status_width)
|
||||
status_colored = _color_status(status_padded)
|
||||
|
||||
print(
|
||||
f"{ident_col} "
|
||||
f"{status_colored} "
|
||||
f"{cat_col} "
|
||||
f"{tag_col} "
|
||||
f"{dir_col}"
|
||||
)
|
||||
print(f"{ident_col} {status_colored} {cat_col} {tag_col} {dir_col}")
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Detailed section (alias value red, same status coloring)
|
||||
|
||||
@@ -55,12 +55,16 @@ class UpdateManager:
|
||||
code = exc.code if isinstance(exc.code, int) else str(exc.code)
|
||||
failures.append((identifier, f"pull failed (exit={code})"))
|
||||
if not quiet:
|
||||
print(f"[Warning] update: pull failed for {identifier} (exit={code}). Continuing...")
|
||||
print(
|
||||
f"[Warning] update: pull failed for {identifier} (exit={code}). Continuing..."
|
||||
)
|
||||
continue
|
||||
except Exception as exc:
|
||||
failures.append((identifier, f"pull failed: {exc}"))
|
||||
if not quiet:
|
||||
print(f"[Warning] update: pull failed for {identifier}: {exc}. Continuing...")
|
||||
print(
|
||||
f"[Warning] update: pull failed for {identifier}: {exc}. Continuing..."
|
||||
)
|
||||
continue
|
||||
|
||||
try:
|
||||
@@ -82,12 +86,16 @@ class UpdateManager:
|
||||
code = exc.code if isinstance(exc.code, int) else str(exc.code)
|
||||
failures.append((identifier, f"install failed (exit={code})"))
|
||||
if not quiet:
|
||||
print(f"[Warning] update: install failed for {identifier} (exit={code}). Continuing...")
|
||||
print(
|
||||
f"[Warning] update: install failed for {identifier} (exit={code}). Continuing..."
|
||||
)
|
||||
continue
|
||||
except Exception as exc:
|
||||
failures.append((identifier, f"install failed: {exc}"))
|
||||
if not quiet:
|
||||
print(f"[Warning] update: install failed for {identifier}: {exc}. Continuing...")
|
||||
print(
|
||||
f"[Warning] update: install failed for {identifier}: {exc}. Continuing..."
|
||||
)
|
||||
continue
|
||||
|
||||
if failures and not quiet:
|
||||
|
||||
@@ -31,6 +31,7 @@ class OSReleaseInfo:
|
||||
"""
|
||||
Minimal /etc/os-release representation for distro detection.
|
||||
"""
|
||||
|
||||
id: str = ""
|
||||
id_like: str = ""
|
||||
pretty_name: str = ""
|
||||
@@ -63,4 +64,6 @@ class OSReleaseInfo:
|
||||
|
||||
def is_fedora_family(self) -> bool:
|
||||
ids = self.ids()
|
||||
return bool(ids.intersection({"fedora", "rhel", "centos", "rocky", "almalinux"}))
|
||||
return bool(
|
||||
ids.intersection({"fedora", "rhel", "centos", "rocky", "almalinux"})
|
||||
)
|
||||
|
||||
@@ -58,7 +58,9 @@ class SystemUpdater:
|
||||
run_command("sudo pacman -Syu --noconfirm", preview=preview)
|
||||
return
|
||||
|
||||
print("[Warning] Cannot update Arch system: missing required tools (sudo/yay/pacman).")
|
||||
print(
|
||||
"[Warning] Cannot update Arch system: missing required tools (sudo/yay/pacman)."
|
||||
)
|
||||
|
||||
def _update_debian(self, *, preview: bool) -> None:
|
||||
from pkgmgr.core.command.run import run_command
|
||||
@@ -67,7 +69,9 @@ class SystemUpdater:
|
||||
apt_get = shutil.which("apt-get")
|
||||
|
||||
if not (sudo and apt_get):
|
||||
print("[Warning] Cannot update Debian/Ubuntu system: missing required tools (sudo/apt-get).")
|
||||
print(
|
||||
"[Warning] Cannot update Debian/Ubuntu system: missing required tools (sudo/apt-get)."
|
||||
)
|
||||
return
|
||||
|
||||
env = "DEBIAN_FRONTEND=noninteractive"
|
||||
|
||||
@@ -29,6 +29,7 @@ For details on any command, run:
|
||||
\033[1mpkgmgr <command> --help\033[0m
|
||||
"""
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""
|
||||
Entry point for the pkgmgr CLI.
|
||||
@@ -41,9 +42,7 @@ def main() -> None:
|
||||
repositories_dir = os.path.expanduser(
|
||||
directories.get("repositories", "~/Repositories")
|
||||
)
|
||||
binaries_dir = os.path.expanduser(
|
||||
directories.get("binaries", "~/.local/bin")
|
||||
)
|
||||
binaries_dir = os.path.expanduser(directories.get("binaries", "~/.local/bin"))
|
||||
|
||||
# Ensure the merged config actually contains the resolved directories
|
||||
config_merged.setdefault("directories", {})
|
||||
|
||||
@@ -135,9 +135,7 @@ def handle_changelog(
|
||||
target_tag=range_arg,
|
||||
)
|
||||
if cur_tag is None:
|
||||
print(
|
||||
f"[WARN] Tag {range_arg!r} not found or not a SemVer tag."
|
||||
)
|
||||
print(f"[WARN] Tag {range_arg!r} not found or not a SemVer tag.")
|
||||
print("[INFO] Falling back to full history.")
|
||||
from_ref = None
|
||||
to_ref = None
|
||||
|
||||
@@ -213,9 +213,7 @@ def handle_config(args, ctx: CLIContext) -> None:
|
||||
)
|
||||
if key == mod_key:
|
||||
entry["ignore"] = args.set == "true"
|
||||
print(
|
||||
f"Set ignore for {key} to {entry['ignore']}"
|
||||
)
|
||||
print(f"Set ignore for {key} to {entry['ignore']}")
|
||||
|
||||
save_user_config(user_config, user_config_path)
|
||||
return
|
||||
|
||||
@@ -4,7 +4,12 @@ from __future__ import annotations
|
||||
import sys
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from pkgmgr.actions.mirror import diff_mirrors, list_mirrors, merge_mirrors, setup_mirrors
|
||||
from pkgmgr.actions.mirror import (
|
||||
diff_mirrors,
|
||||
list_mirrors,
|
||||
merge_mirrors,
|
||||
setup_mirrors,
|
||||
)
|
||||
from pkgmgr.cli.context import CLIContext
|
||||
|
||||
Repository = Dict[str, Any]
|
||||
@@ -56,11 +61,15 @@ def handle_mirror_command(
|
||||
preview = getattr(args, "preview", False)
|
||||
|
||||
if source == target:
|
||||
print("[ERROR] For 'mirror merge', source and target must differ (config vs file).")
|
||||
print(
|
||||
"[ERROR] For 'mirror merge', source and target must differ (config vs file)."
|
||||
)
|
||||
sys.exit(2)
|
||||
|
||||
explicit_config_path = getattr(args, "config_path", None)
|
||||
user_config_path = explicit_config_path or getattr(ctx, "user_config_path", None)
|
||||
user_config_path = explicit_config_path or getattr(
|
||||
ctx, "user_config_path", None
|
||||
)
|
||||
|
||||
merge_mirrors(
|
||||
selected_repos=selected,
|
||||
|
||||
@@ -18,7 +18,9 @@ def handle_publish(args, ctx: CLIContext, selected: List[Repository]) -> None:
|
||||
|
||||
for repo in selected:
|
||||
identifier = get_repo_identifier(repo, ctx.all_repositories)
|
||||
repo_dir = repo.get("directory") or get_repo_dir(ctx.repositories_base_dir, repo)
|
||||
repo_dir = repo.get("directory") or get_repo_dir(
|
||||
ctx.repositories_base_dir, repo
|
||||
)
|
||||
|
||||
if not os.path.isdir(repo_dir):
|
||||
print(f"[WARN] Skipping {identifier}: directory missing.")
|
||||
|
||||
@@ -36,9 +36,13 @@ def handle_release(
|
||||
identifier = get_repo_identifier(repo, ctx.all_repositories)
|
||||
|
||||
try:
|
||||
repo_dir = repo.get("directory") or get_repo_dir(ctx.repositories_base_dir, repo)
|
||||
repo_dir = repo.get("directory") or get_repo_dir(
|
||||
ctx.repositories_base_dir, repo
|
||||
)
|
||||
except Exception as exc:
|
||||
print(f"[WARN] Skipping repository {identifier}: failed to resolve directory: {exc}")
|
||||
print(
|
||||
f"[WARN] Skipping repository {identifier}: failed to resolve directory: {exc}"
|
||||
)
|
||||
continue
|
||||
|
||||
if not os.path.isdir(repo_dir):
|
||||
|
||||
@@ -32,9 +32,8 @@ def _resolve_repository_directory(repository: Repository, ctx: CLIContext) -> st
|
||||
if repo_dir:
|
||||
return repo_dir
|
||||
|
||||
base_dir = (
|
||||
getattr(ctx, "repositories_base_dir", None)
|
||||
or getattr(ctx, "repositories_dir", None)
|
||||
base_dir = getattr(ctx, "repositories_base_dir", None) or getattr(
|
||||
ctx, "repositories_dir", None
|
||||
)
|
||||
if not base_dir:
|
||||
raise RuntimeError(
|
||||
|
||||
@@ -33,8 +33,7 @@ def add_branch_subparsers(
|
||||
"name",
|
||||
nargs="?",
|
||||
help=(
|
||||
"Name of the new branch (optional; will be asked interactively "
|
||||
"if omitted)"
|
||||
"Name of the new branch (optional; will be asked interactively if omitted)"
|
||||
),
|
||||
)
|
||||
branch_open.add_argument(
|
||||
@@ -54,8 +53,7 @@ def add_branch_subparsers(
|
||||
"name",
|
||||
nargs="?",
|
||||
help=(
|
||||
"Name of the branch to close (optional; current branch is used "
|
||||
"if omitted)"
|
||||
"Name of the branch to close (optional; current branch is used if omitted)"
|
||||
),
|
||||
)
|
||||
branch_close.add_argument(
|
||||
@@ -84,8 +82,7 @@ def add_branch_subparsers(
|
||||
"name",
|
||||
nargs="?",
|
||||
help=(
|
||||
"Name of the branch to drop (optional; current branch is used "
|
||||
"if omitted)"
|
||||
"Name of the branch to drop (optional; current branch is used if omitted)"
|
||||
),
|
||||
)
|
||||
branch_drop.add_argument(
|
||||
|
||||
@@ -20,7 +20,9 @@ def add_mirror_subparsers(subparsers: argparse._SubParsersAction) -> None:
|
||||
required=True,
|
||||
)
|
||||
|
||||
mirror_list = mirror_subparsers.add_parser("list", help="List configured mirrors for repositories")
|
||||
mirror_list = mirror_subparsers.add_parser(
|
||||
"list", help="List configured mirrors for repositories"
|
||||
)
|
||||
add_identifier_arguments(mirror_list)
|
||||
mirror_list.add_argument(
|
||||
"--source",
|
||||
@@ -29,15 +31,21 @@ def add_mirror_subparsers(subparsers: argparse._SubParsersAction) -> None:
|
||||
help="Which mirror source to show.",
|
||||
)
|
||||
|
||||
mirror_diff = mirror_subparsers.add_parser("diff", help="Show differences between config mirrors and MIRRORS file")
|
||||
mirror_diff = mirror_subparsers.add_parser(
|
||||
"diff", help="Show differences between config mirrors and MIRRORS file"
|
||||
)
|
||||
add_identifier_arguments(mirror_diff)
|
||||
|
||||
mirror_merge = mirror_subparsers.add_parser(
|
||||
"merge",
|
||||
help="Merge mirrors between config and MIRRORS file (example: pkgmgr mirror merge config file --all)",
|
||||
)
|
||||
mirror_merge.add_argument("source", choices=["config", "file"], help="Source of mirrors.")
|
||||
mirror_merge.add_argument("target", choices=["config", "file"], help="Target of mirrors.")
|
||||
mirror_merge.add_argument(
|
||||
"source", choices=["config", "file"], help="Source of mirrors."
|
||||
)
|
||||
mirror_merge.add_argument(
|
||||
"target", choices=["config", "file"], help="Target of mirrors."
|
||||
)
|
||||
add_identifier_arguments(mirror_merge)
|
||||
mirror_merge.add_argument(
|
||||
"--config-path",
|
||||
|
||||
@@ -48,9 +48,6 @@ def add_navigation_subparsers(
|
||||
"--command",
|
||||
nargs=argparse.REMAINDER,
|
||||
dest="shell_command",
|
||||
help=(
|
||||
"The shell command (and its arguments) to execute in each "
|
||||
"repository"
|
||||
),
|
||||
help=("The shell command (and its arguments) to execute in each repository"),
|
||||
default=[],
|
||||
)
|
||||
|
||||
@@ -53,10 +53,7 @@ def _add_proxy_identifier_arguments(parser: argparse.ArgumentParser) -> None:
|
||||
parser.add_argument(
|
||||
"identifiers",
|
||||
nargs="*",
|
||||
help=(
|
||||
"Identifier(s) for repositories. "
|
||||
"Default: Repository of current folder."
|
||||
),
|
||||
help=("Identifier(s) for repositories. Default: Repository of current folder."),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--all",
|
||||
@@ -118,12 +115,7 @@ def _proxy_has_explicit_selection(args: argparse.Namespace) -> bool:
|
||||
string_filter = getattr(args, "string", "") or ""
|
||||
|
||||
# Proxy commands currently do not support --tag, so it is not checked here.
|
||||
return bool(
|
||||
use_all
|
||||
or identifiers
|
||||
or categories
|
||||
or string_filter
|
||||
)
|
||||
return bool(use_all or identifiers or categories or string_filter)
|
||||
|
||||
|
||||
def _select_repo_for_current_directory(
|
||||
@@ -204,9 +196,7 @@ def maybe_handle_proxy(args: argparse.Namespace, ctx: CLIContext) -> bool:
|
||||
If the top-level command is one of the proxy subcommands
|
||||
(git / docker / docker compose), handle it here and return True.
|
||||
"""
|
||||
all_proxy_subcommands = {
|
||||
sub for subs in PROXY_COMMANDS.values() for sub in subs
|
||||
}
|
||||
all_proxy_subcommands = {sub for subs in PROXY_COMMANDS.values() for sub in subs}
|
||||
|
||||
if args.command not in all_proxy_subcommands:
|
||||
return False
|
||||
|
||||
@@ -22,9 +22,8 @@ def resolve_repository_path(repository: Repository, ctx: CLIContext) -> str:
|
||||
if value:
|
||||
return value
|
||||
|
||||
base_dir = (
|
||||
getattr(ctx, "repositories_base_dir", None)
|
||||
or getattr(ctx, "repositories_dir", None)
|
||||
base_dir = getattr(ctx, "repositories_base_dir", None) or getattr(
|
||||
ctx, "repositories_dir", None
|
||||
)
|
||||
if not base_dir:
|
||||
raise RuntimeError(
|
||||
|
||||
@@ -57,7 +57,9 @@ def _build_workspace_filename(identifiers: List[str]) -> str:
|
||||
return "_".join(sorted_identifiers) + ".code-workspace"
|
||||
|
||||
|
||||
def _build_workspace_data(selected: List[Repository], ctx: CLIContext) -> Dict[str, Any]:
|
||||
def _build_workspace_data(
|
||||
selected: List[Repository], ctx: CLIContext
|
||||
) -> Dict[str, Any]:
|
||||
folders = [{"path": resolve_repository_path(repo, ctx)} for repo in selected]
|
||||
return {
|
||||
"folders": folders,
|
||||
|
||||
@@ -2,6 +2,7 @@ import os
|
||||
import hashlib
|
||||
import re
|
||||
|
||||
|
||||
def generate_alias(repo, bin_dir, existing_aliases):
|
||||
"""
|
||||
Generate an alias for a repository based on its repository name.
|
||||
|
||||
@@ -98,8 +98,7 @@ def create_ink(
|
||||
if alias_name == repo_identifier:
|
||||
if not quiet:
|
||||
print(
|
||||
f"Alias '{alias_name}' equals identifier. "
|
||||
"Skipping alias creation."
|
||||
f"Alias '{alias_name}' equals identifier. Skipping alias creation."
|
||||
)
|
||||
return
|
||||
|
||||
|
||||
@@ -8,6 +8,7 @@ class CliLayer(str, Enum):
|
||||
"""
|
||||
CLI layer precedence (lower number = stronger layer).
|
||||
"""
|
||||
|
||||
OS_PACKAGES = "os-packages"
|
||||
NIX = "nix"
|
||||
PYTHON = "python"
|
||||
|
||||
@@ -34,11 +34,7 @@ def _nix_binary_candidates(home: str, names: List[str]) -> List[str]:
|
||||
"""
|
||||
Build possible Nix profile binary paths for a list of candidate names.
|
||||
"""
|
||||
return [
|
||||
os.path.join(home, ".nix-profile", "bin", name)
|
||||
for name in names
|
||||
if name
|
||||
]
|
||||
return [os.path.join(home, ".nix-profile", "bin", name) for name in names if name]
|
||||
|
||||
|
||||
def _path_binary_candidates(names: List[str]) -> List[str]:
|
||||
@@ -148,7 +144,8 @@ def resolve_command_for_repo(
|
||||
|
||||
# c) Nix profile binaries
|
||||
nix_binaries = [
|
||||
path for path in _nix_binary_candidates(home, candidate_names)
|
||||
path
|
||||
for path in _nix_binary_candidates(home, candidate_names)
|
||||
if _is_executable(path)
|
||||
]
|
||||
nix_binary = nix_binaries[0] if nix_binaries else None
|
||||
|
||||
@@ -51,6 +51,7 @@ Repo = Dict[str, Any]
|
||||
# Hilfsfunktionen
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _deep_merge(base: Dict[str, Any], override: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""
|
||||
Recursively merge two dictionaries.
|
||||
@@ -58,11 +59,7 @@ def _deep_merge(base: Dict[str, Any], override: Dict[str, Any]) -> Dict[str, Any
|
||||
Values from `override` win over values in `base`.
|
||||
"""
|
||||
for key, value in override.items():
|
||||
if (
|
||||
key in base
|
||||
and isinstance(base[key], dict)
|
||||
and isinstance(value, dict)
|
||||
):
|
||||
if key in base and isinstance(base[key], dict) and isinstance(value, dict):
|
||||
_deep_merge(base[key], value)
|
||||
else:
|
||||
base[key] = value
|
||||
@@ -93,9 +90,7 @@ def _merge_repo_lists(
|
||||
- Wenn category_name gesetzt ist, wird dieser in
|
||||
repo["category_files"] eingetragen.
|
||||
"""
|
||||
index: Dict[Tuple[str, str, str], Repo] = {
|
||||
_repo_key(r): r for r in base_list
|
||||
}
|
||||
index: Dict[Tuple[str, str, str], Repo] = {_repo_key(r): r for r in base_list}
|
||||
|
||||
for src in new_list:
|
||||
key = _repo_key(src)
|
||||
@@ -233,10 +228,12 @@ def _load_defaults_from_package_or_project() -> Dict[str, Any]:
|
||||
|
||||
return {"directories": {}, "repositories": []}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Hauptfunktion
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def load_config(user_config_path: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Load and merge configuration for pkgmgr.
|
||||
@@ -289,8 +286,12 @@ def load_config(user_config_path: str) -> Dict[str, Any]:
|
||||
|
||||
# repositories
|
||||
merged["repositories"] = []
|
||||
_merge_repo_lists(merged["repositories"], defaults["repositories"], category_name=None)
|
||||
_merge_repo_lists(merged["repositories"], user_cfg["repositories"], category_name=None)
|
||||
_merge_repo_lists(
|
||||
merged["repositories"], defaults["repositories"], category_name=None
|
||||
)
|
||||
_merge_repo_lists(
|
||||
merged["repositories"], user_cfg["repositories"], category_name=None
|
||||
)
|
||||
|
||||
# andere Top-Level-Keys (falls vorhanden)
|
||||
other_keys = (set(defaults.keys()) | set(user_cfg.keys())) - {
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import yaml
|
||||
import os
|
||||
|
||||
def save_user_config(user_config,USER_CONFIG_PATH:str):
|
||||
|
||||
def save_user_config(user_config, USER_CONFIG_PATH: str):
|
||||
"""Save the user configuration to USER_CONFIG_PATH."""
|
||||
os.makedirs(os.path.dirname(USER_CONFIG_PATH), exist_ok=True)
|
||||
with open(USER_CONFIG_PATH, 'w') as f:
|
||||
with open(USER_CONFIG_PATH, "w") as f:
|
||||
yaml.dump(user_config, f)
|
||||
print(f"User configuration updated in {USER_CONFIG_PATH}.")
|
||||
@@ -16,7 +16,9 @@ class EnvTokenProvider:
|
||||
source_name: str = "env"
|
||||
|
||||
def get(self, request: TokenRequest) -> Optional[TokenResult]:
|
||||
for key in env_var_candidates(request.provider_kind, request.host, request.owner):
|
||||
for key in env_var_candidates(
|
||||
request.provider_kind, request.host, request.owner
|
||||
):
|
||||
val = os.environ.get(key)
|
||||
if val:
|
||||
return TokenResult(token=val.strip(), source=self.source_name)
|
||||
|
||||
@@ -15,6 +15,7 @@ class GhTokenProvider:
|
||||
|
||||
This does NOT persist anything; it only reads what `gh` already knows.
|
||||
"""
|
||||
|
||||
source_name: str = "gh"
|
||||
|
||||
def get(self, request: TokenRequest) -> Optional[TokenResult]:
|
||||
|
||||
@@ -21,9 +21,7 @@ def _import_keyring():
|
||||
try:
|
||||
import keyring # type: ignore
|
||||
except Exception as exc: # noqa: BLE001
|
||||
raise KeyringUnavailableError(
|
||||
"python-keyring is not installed."
|
||||
) from exc
|
||||
raise KeyringUnavailableError("python-keyring is not installed.") from exc
|
||||
|
||||
# Some environments have keyring installed but no usable backend.
|
||||
# We do a lightweight "backend sanity check" by attempting to read the backend.
|
||||
|
||||
@@ -9,7 +9,12 @@ from .providers.env import EnvTokenProvider
|
||||
from .providers.gh import GhTokenProvider
|
||||
from .providers.keyring import KeyringTokenProvider
|
||||
from .providers.prompt import PromptTokenProvider
|
||||
from .types import KeyringUnavailableError, NoCredentialsError, TokenRequest, TokenResult
|
||||
from .types import (
|
||||
KeyringUnavailableError,
|
||||
NoCredentialsError,
|
||||
TokenRequest,
|
||||
TokenResult,
|
||||
)
|
||||
from .validate import validate_token
|
||||
|
||||
|
||||
@@ -55,7 +60,10 @@ class TokenResolver:
|
||||
print(f" {msg}", file=sys.stderr)
|
||||
print(" Tokens will NOT be persisted securely.", file=sys.stderr)
|
||||
print("", file=sys.stderr)
|
||||
print(" To enable secure token storage, install python-keyring:", file=sys.stderr)
|
||||
print(
|
||||
" To enable secure token storage, install python-keyring:",
|
||||
file=sys.stderr,
|
||||
)
|
||||
print(" pip install keyring", file=sys.stderr)
|
||||
print("", file=sys.stderr)
|
||||
print(" Or install via system packages:", file=sys.stderr)
|
||||
|
||||
@@ -13,7 +13,9 @@ class KeyringKey:
|
||||
username: str
|
||||
|
||||
|
||||
def build_keyring_key(provider_kind: str, host: str, owner: Optional[str]) -> KeyringKey:
|
||||
def build_keyring_key(
|
||||
provider_kind: str, host: str, owner: Optional[str]
|
||||
) -> KeyringKey:
|
||||
"""Build a stable keyring key.
|
||||
|
||||
- service: "pkgmgr:<provider>"
|
||||
@@ -21,11 +23,15 @@ def build_keyring_key(provider_kind: str, host: str, owner: Optional[str]) -> Ke
|
||||
"""
|
||||
provider_kind = str(provider_kind).strip().lower()
|
||||
host = str(host).strip()
|
||||
owner_part = (str(owner).strip() if owner else "-")
|
||||
return KeyringKey(service=f"pkgmgr:{provider_kind}", username=f"{host}|{owner_part}")
|
||||
owner_part = str(owner).strip() if owner else "-"
|
||||
return KeyringKey(
|
||||
service=f"pkgmgr:{provider_kind}", username=f"{host}|{owner_part}"
|
||||
)
|
||||
|
||||
|
||||
def env_var_candidates(provider_kind: str, host: str, owner: Optional[str]) -> list[str]:
|
||||
def env_var_candidates(
|
||||
provider_kind: str, host: str, owner: Optional[str]
|
||||
) -> list[str]:
|
||||
"""Return a list of environment variable names to try.
|
||||
|
||||
Order is from most specific to most generic.
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from .errors import GitError
|
||||
from .errors import GitRunError
|
||||
from .run import run
|
||||
|
||||
"""
|
||||
@@ -12,6 +12,6 @@ details of subprocess handling.
|
||||
"""
|
||||
|
||||
__all__ = [
|
||||
"GitError",
|
||||
"GitRunError",
|
||||
"run",
|
||||
]
|
||||
|
||||
@@ -16,7 +16,7 @@ from .fetch import GitFetchError, fetch
|
||||
from .init import GitInitError, init
|
||||
from .merge_no_ff import GitMergeError, merge_no_ff
|
||||
from .pull import GitPullError, pull
|
||||
from .pull_args import GitPullArgsError, pull_args # <-- add
|
||||
from .pull_args import GitPullArgsError, pull_args
|
||||
from .pull_ff_only import GitPullFfOnlyError, pull_ff_only
|
||||
from .push import GitPushError, push
|
||||
from .push_upstream import GitPushUpstreamError, push_upstream
|
||||
@@ -30,7 +30,7 @@ __all__ = [
|
||||
"fetch",
|
||||
"checkout",
|
||||
"pull",
|
||||
"pull_args", # <-- add
|
||||
"pull_args",
|
||||
"pull_ff_only",
|
||||
"merge_no_ff",
|
||||
"push",
|
||||
@@ -52,7 +52,7 @@ __all__ = [
|
||||
"GitFetchError",
|
||||
"GitCheckoutError",
|
||||
"GitPullError",
|
||||
"GitPullArgsError", # <-- add
|
||||
"GitPullArgsError",
|
||||
"GitPullFfOnlyError",
|
||||
"GitMergeError",
|
||||
"GitPushError",
|
||||
|
||||
@@ -2,7 +2,7 @@ from __future__ import annotations
|
||||
|
||||
from typing import Iterable, List, Sequence, Union
|
||||
|
||||
from ..errors import GitError, GitCommandError
|
||||
from ..errors import GitRunError, GitCommandError
|
||||
from ..run import run
|
||||
|
||||
|
||||
@@ -37,7 +37,7 @@ def add(
|
||||
|
||||
try:
|
||||
run(["add", *normalized], cwd=cwd, preview=preview)
|
||||
except GitError as exc:
|
||||
except GitRunError as exc:
|
||||
raise GitAddError(
|
||||
f"Failed to add paths to staging area: {normalized!r}.",
|
||||
cwd=cwd,
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
# src/pkgmgr/core/git/commands/add_all.py
|
||||
from __future__ import annotations
|
||||
|
||||
from ..errors import GitError, GitCommandError
|
||||
from ..errors import GitCommandError, GitRunError
|
||||
from ..run import run
|
||||
|
||||
|
||||
@@ -18,5 +17,7 @@ def add_all(*, cwd: str = ".", preview: bool = False) -> None:
|
||||
"""
|
||||
try:
|
||||
run(["add", "-A"], cwd=cwd, preview=preview)
|
||||
except GitError as exc:
|
||||
raise GitAddAllError("Failed to stage all changes with `git add -A`.", cwd=cwd) from exc
|
||||
except GitRunError as exc:
|
||||
raise GitAddAllError(
|
||||
"Failed to stage all changes with `git add -A`.", cwd=cwd
|
||||
) from exc
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from ..errors import GitError, GitCommandError
|
||||
from ..errors import GitRunError, GitCommandError
|
||||
from ..run import run
|
||||
|
||||
|
||||
@@ -27,7 +27,7 @@ def add_remote(
|
||||
cwd=cwd,
|
||||
preview=preview,
|
||||
)
|
||||
except GitError as exc:
|
||||
except GitRunError as exc:
|
||||
raise GitAddRemoteError(
|
||||
f"Failed to add remote {name!r} with URL {url!r}.",
|
||||
cwd=cwd,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from ..errors import GitError, GitCommandError
|
||||
from ..errors import GitRunError, GitCommandError
|
||||
from ..run import run
|
||||
|
||||
|
||||
@@ -27,7 +27,7 @@ def add_remote_push_url(
|
||||
cwd=cwd,
|
||||
preview=preview,
|
||||
)
|
||||
except GitError as exc:
|
||||
except GitRunError as exc:
|
||||
raise GitAddRemotePushUrlError(
|
||||
f"Failed to add push url {url!r} to remote {remote!r}.",
|
||||
cwd=cwd,
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
# src/pkgmgr/core/git/commands/branch_move.py
|
||||
from __future__ import annotations
|
||||
|
||||
from ..errors import GitError, GitCommandError
|
||||
from ..errors import GitRunError, GitCommandError
|
||||
from ..run import run
|
||||
|
||||
|
||||
@@ -18,5 +17,7 @@ def branch_move(branch: str, *, cwd: str = ".", preview: bool = False) -> None:
|
||||
"""
|
||||
try:
|
||||
run(["branch", "-M", branch], cwd=cwd, preview=preview)
|
||||
except GitError as exc:
|
||||
raise GitBranchMoveError(f"Failed to move/rename current branch to {branch!r}.", cwd=cwd) from exc
|
||||
except GitRunError as exc:
|
||||
raise GitBranchMoveError(
|
||||
f"Failed to move/rename current branch to {branch!r}.", cwd=cwd
|
||||
) from exc
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from ..errors import GitError, GitCommandError
|
||||
from ..errors import GitRunError, GitCommandError
|
||||
from ..run import run
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ class GitCheckoutError(GitCommandError):
|
||||
def checkout(branch: str, cwd: str = ".") -> None:
|
||||
try:
|
||||
run(["checkout", branch], cwd=cwd)
|
||||
except GitError as exc:
|
||||
except GitRunError as exc:
|
||||
raise GitCheckoutError(
|
||||
f"Failed to checkout branch {branch!r}.",
|
||||
cwd=cwd,
|
||||
|
||||
@@ -2,7 +2,7 @@ from __future__ import annotations
|
||||
|
||||
from typing import List
|
||||
|
||||
from ..errors import GitError, GitCommandError
|
||||
from ..errors import GitRunError, GitCommandError
|
||||
from ..run import run
|
||||
|
||||
|
||||
@@ -25,7 +25,7 @@ def clone(
|
||||
"""
|
||||
try:
|
||||
run(["clone", *args], cwd=cwd, preview=preview)
|
||||
except GitError as exc:
|
||||
except GitRunError as exc:
|
||||
raise GitCloneError(
|
||||
f"Git clone failed with args={args!r}.",
|
||||
cwd=cwd,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user