Compare commits
29 Commits
d1e5a71f77
...
v1.6.4
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3d7d7e9c09 | ||
|
|
328203ccd7 | ||
|
|
ac16378807 | ||
|
|
f7a86bc353 | ||
|
|
06a6a77a48 | ||
|
|
4883e40812 | ||
|
|
031ae5ac69 | ||
|
|
1c4fc531fa | ||
|
|
33dfbf3a4d | ||
|
|
a3aa7b6394 | ||
|
|
724c262a4a | ||
|
|
dcbe16c5f0 | ||
|
|
f63b0a9f08 | ||
|
|
822c418503 | ||
|
|
562a6da291 | ||
|
|
e61b30d9af | ||
|
|
27c0c7c01f | ||
|
|
0d652d995e | ||
|
|
0e03fbbee2 | ||
|
|
7cfd7e8d5c | ||
|
|
84b6c71748 | ||
|
|
db9aaf920e | ||
|
|
69d28a461d | ||
|
|
03e414cc9f | ||
|
|
7674762c9a | ||
|
|
a47de15e42 | ||
|
|
37f3057d31 | ||
|
|
d55c8d3726 | ||
|
|
3990560cd7 |
8
.github/workflows/ci.yml
vendored
8
.github/workflows/ci.yml
vendored
@@ -28,8 +28,8 @@ jobs:
|
|||||||
test-virgin-root:
|
test-virgin-root:
|
||||||
uses: ./.github/workflows/test-virgin-root.yml
|
uses: ./.github/workflows/test-virgin-root.yml
|
||||||
|
|
||||||
codesniffer-shellcheck:
|
linter-shell:
|
||||||
uses: ./.github/workflows/codesniffer-shellcheck.yml
|
uses: ./.github/workflows/linter-shell.yml
|
||||||
|
|
||||||
codesniffer-ruff:
|
linter-python:
|
||||||
uses: ./.github/workflows/codesniffer-ruff.yml
|
uses: ./.github/workflows/linter-python.yml
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ on:
|
|||||||
workflow_call:
|
workflow_call:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
codesniffer-ruff:
|
linter-python:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
@@ -4,7 +4,7 @@ on:
|
|||||||
workflow_call:
|
workflow_call:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
codesniffer-shellcheck:
|
linter-shell:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
12
.github/workflows/mark-stable.yml
vendored
12
.github/workflows/mark-stable.yml
vendored
@@ -29,16 +29,16 @@ jobs:
|
|||||||
test-virgin-root:
|
test-virgin-root:
|
||||||
uses: ./.github/workflows/test-virgin-root.yml
|
uses: ./.github/workflows/test-virgin-root.yml
|
||||||
|
|
||||||
codesniffer-shellcheck:
|
linter-shell:
|
||||||
uses: ./.github/workflows/codesniffer-shellcheck.yml
|
uses: ./.github/workflows/linter-shell.yml
|
||||||
|
|
||||||
codesniffer-ruff:
|
linter-python:
|
||||||
uses: ./.github/workflows/codesniffer-ruff.yml
|
uses: ./.github/workflows/linter-python.yml
|
||||||
|
|
||||||
mark-stable:
|
mark-stable:
|
||||||
needs:
|
needs:
|
||||||
- codesniffer-shellcheck
|
- linter-shell
|
||||||
- codesniffer-ruff
|
- linter-python
|
||||||
- test-unit
|
- test-unit
|
||||||
- test-integration
|
- test-integration
|
||||||
- test-env-nix
|
- test-env-nix
|
||||||
|
|||||||
12
.github/workflows/publish-containers.yml
vendored
12
.github/workflows/publish-containers.yml
vendored
@@ -19,7 +19,6 @@ jobs:
|
|||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
fetch-tags: true
|
|
||||||
|
|
||||||
- name: Checkout workflow_run commit and refresh tags
|
- name: Checkout workflow_run commit and refresh tags
|
||||||
run: |
|
run: |
|
||||||
@@ -35,22 +34,30 @@ jobs:
|
|||||||
SHA="$(git rev-parse HEAD)"
|
SHA="$(git rev-parse HEAD)"
|
||||||
|
|
||||||
V_TAG="$(git tag --points-at "${SHA}" --list 'v*' | sort -V | tail -n1)"
|
V_TAG="$(git tag --points-at "${SHA}" --list 'v*' | sort -V | tail -n1)"
|
||||||
[[ -n "$V_TAG" ]] || { echo "No version tag found"; exit 1; }
|
if [[ -z "${V_TAG}" ]]; then
|
||||||
|
echo "No version tag found for ${SHA}. Skipping publish."
|
||||||
|
echo "should_publish=false" >> "$GITHUB_OUTPUT"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
VERSION="${V_TAG#v}"
|
VERSION="${V_TAG#v}"
|
||||||
|
|
||||||
STABLE_SHA="$(git rev-parse -q --verify refs/tags/stable^{commit} 2>/dev/null || true)"
|
STABLE_SHA="$(git rev-parse -q --verify refs/tags/stable^{commit} 2>/dev/null || true)"
|
||||||
IS_STABLE=false
|
IS_STABLE=false
|
||||||
[[ -n "${STABLE_SHA}" && "${STABLE_SHA}" == "${SHA}" ]] && IS_STABLE=true
|
[[ -n "${STABLE_SHA}" && "${STABLE_SHA}" == "${SHA}" ]] && IS_STABLE=true
|
||||||
|
|
||||||
|
echo "should_publish=true" >> "$GITHUB_OUTPUT"
|
||||||
echo "version=${VERSION}" >> "$GITHUB_OUTPUT"
|
echo "version=${VERSION}" >> "$GITHUB_OUTPUT"
|
||||||
echo "is_stable=${IS_STABLE}" >> "$GITHUB_OUTPUT"
|
echo "is_stable=${IS_STABLE}" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
|
if: ${{ steps.info.outputs.should_publish == 'true' }}
|
||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@v3
|
||||||
with:
|
with:
|
||||||
use: true
|
use: true
|
||||||
|
|
||||||
- name: Login to GHCR
|
- name: Login to GHCR
|
||||||
|
if: ${{ steps.info.outputs.should_publish == 'true' }}
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
@@ -58,6 +65,7 @@ jobs:
|
|||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Publish all images
|
- name: Publish all images
|
||||||
|
if: ${{ steps.info.outputs.should_publish == 'true' }}
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
OWNER="${{ github.repository_owner }}" \
|
OWNER="${{ github.repository_owner }}" \
|
||||||
|
|||||||
43
CHANGELOG.md
43
CHANGELOG.md
@@ -1,3 +1,46 @@
|
|||||||
|
## [1.6.4] - 2025-12-14
|
||||||
|
|
||||||
|
* * Improved reliability of Nix installs and updates, including automatic resolution of profile conflicts and better handling of GitHub 403 rate limits.
|
||||||
|
* More stable launcher behavior in packaged and virtual-env setups.
|
||||||
|
* Enhanced mirror and remote handling: repository owner/name are derived from URLs, with smoother provisioning and clearer credential handling.
|
||||||
|
* More reliable releases and artifacts due to safer CI behavior when no version tag is present.
|
||||||
|
|
||||||
|
|
||||||
|
## [1.6.3] - 2025-12-14
|
||||||
|
|
||||||
|
* ***Fixed:*** Corrected repository path resolution so release and version logic consistently use the canonical packaging/* layout, preventing changelog and packaging files from being read or updated from incorrect locations.
|
||||||
|
|
||||||
|
|
||||||
|
## [1.6.2] - 2025-12-14
|
||||||
|
|
||||||
|
* **pkgmgr version** now also shows the installed pkgmgr version when run outside a repository.
|
||||||
|
|
||||||
|
|
||||||
|
## [1.6.1] - 2025-12-14
|
||||||
|
|
||||||
|
* * Added automatic retry handling for GitHub 403 / rate-limit errors during Nix flake installs (Fibonacci backoff with jitter).
|
||||||
|
|
||||||
|
|
||||||
|
## [1.6.0] - 2025-12-14
|
||||||
|
|
||||||
|
* *** Changed ***
|
||||||
|
- Unified update handling via a single top-level `pkgmgr update` command, removing ambiguous update paths.
|
||||||
|
- Improved update reliability by routing all update logic through a central UpdateManager.
|
||||||
|
- Renamed system update flag from `--system-update` to `--system` for clarity and consistency.
|
||||||
|
- Made mirror handling explicit and safer by separating setup, check, and provision responsibilities.
|
||||||
|
- Improved credential resolution for remote providers (environment → keyring → interactive).
|
||||||
|
|
||||||
|
*** Added ***
|
||||||
|
- Optional system updates via `pkgmgr update --system` (Arch, Debian/Ubuntu, Fedora/RHEL).
|
||||||
|
- `pkgmgr install --update` to force re-running installers and refresh existing installations.
|
||||||
|
- Remote repository provisioning for mirrors on supported providers.
|
||||||
|
- Extended end-to-end test coverage for update and mirror workflows.
|
||||||
|
|
||||||
|
*** Fixed ***
|
||||||
|
- Resolved “Unknown repos command: update” errors after CLI refactoring.
|
||||||
|
- Improved Nix update stability and reduced CI failures caused by transient rate limits.
|
||||||
|
|
||||||
|
|
||||||
## [1.5.0] - 2025-12-13
|
## [1.5.0] - 2025-12-13
|
||||||
|
|
||||||
* - Commands now show live output while running, making long operations easier to follow
|
* - Commands now show live output while running, making long operations easier to follow
|
||||||
|
|||||||
2
MIRRORS
2
MIRRORS
@@ -1,3 +1,3 @@
|
|||||||
git@github.com:kevinveenbirkenbach/package-manager.git
|
git@github.com:kevinveenbirkenbach/package-manager.git
|
||||||
ssh://git@git.veen.world:2201/kevinveenbirkenbach/pkgmgr.git
|
ssh://git@git.veen.world:2201/kevinveenbirkenbach/pkgmgr.git
|
||||||
ssh://git@code.cymais.cloud:2201/kevinveenbirkenbach/pkgmgr.git
|
ssh://git@code.infinito.nexus:2201/kevinveenbirkenbach/pkgmgr.git
|
||||||
|
|||||||
2
Makefile
2
Makefile
@@ -44,7 +44,7 @@ install:
|
|||||||
# ------------------------------------------------------------
|
# ------------------------------------------------------------
|
||||||
|
|
||||||
# Default: keep current auto-detection behavior
|
# Default: keep current auto-detection behavior
|
||||||
setup: setup-nix setup-venv
|
setup: setup-venv
|
||||||
|
|
||||||
# Explicit: developer setup (Python venv + shell RC + install)
|
# Explicit: developer setup (Python venv + shell RC + install)
|
||||||
setup-venv: setup-nix
|
setup-venv: setup-nix
|
||||||
|
|||||||
@@ -32,7 +32,7 @@
|
|||||||
rec {
|
rec {
|
||||||
pkgmgr = pyPkgs.buildPythonApplication {
|
pkgmgr = pyPkgs.buildPythonApplication {
|
||||||
pname = "package-manager";
|
pname = "package-manager";
|
||||||
version = "1.5.0";
|
version = "1.6.4";
|
||||||
|
|
||||||
# Use the git repo as source
|
# Use the git repo as source
|
||||||
src = ./.;
|
src = ./.;
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
# Maintainer: Kevin Veen-Birkenbach <info@veen.world>
|
# Maintainer: Kevin Veen-Birkenbach <info@veen.world>
|
||||||
|
|
||||||
pkgname=package-manager
|
pkgname=package-manager
|
||||||
pkgver=0.9.1
|
pkgver=1.6.4
|
||||||
pkgrel=1
|
pkgrel=1
|
||||||
pkgdesc="Local-flake wrapper for Kevin's package-manager (Nix-based)."
|
pkgdesc="Local-flake wrapper for Kevin's package-manager (Nix-based)."
|
||||||
arch=('any')
|
arch=('any')
|
||||||
|
|||||||
@@ -1,3 +1,18 @@
|
|||||||
|
package-manager (1.6.4-1) unstable; urgency=medium
|
||||||
|
|
||||||
|
* * Improved reliability of Nix installs and updates, including automatic resolution of profile conflicts and better handling of GitHub 403 rate limits.
|
||||||
|
* More stable launcher behavior in packaged and virtual-env setups.
|
||||||
|
* Enhanced mirror and remote handling: repository owner/name are derived from URLs, with smoother provisioning and clearer credential handling.
|
||||||
|
* More reliable releases and artifacts due to safer CI behavior when no version tag is present.
|
||||||
|
|
||||||
|
-- Kevin Veen-Birkenbach <kevin@veen.world> Sun, 14 Dec 2025 19:33:07 +0100
|
||||||
|
|
||||||
|
package-manager (1.6.3-1) unstable; urgency=medium
|
||||||
|
|
||||||
|
* ***Fixed:*** Corrected repository path resolution so release and version logic consistently use the canonical packaging/* layout, preventing changelog and packaging files from being read or updated from incorrect locations.
|
||||||
|
|
||||||
|
-- Kevin Veen-Birkenbach <kevin@veen.world> Sun, 14 Dec 2025 13:39:52 +0100
|
||||||
|
|
||||||
package-manager (0.9.1-1) unstable; urgency=medium
|
package-manager (0.9.1-1) unstable; urgency=medium
|
||||||
|
|
||||||
* * Refactored installer: new `venv-create.sh`, cleaner root/user setup flow, updated README with architecture map.
|
* * Refactored installer: new `venv-create.sh`, cleaner root/user setup flow, updated README with architecture map.
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
Name: package-manager
|
Name: package-manager
|
||||||
Version: 0.9.1
|
Version: 1.6.4
|
||||||
Release: 1%{?dist}
|
Release: 1%{?dist}
|
||||||
Summary: Wrapper that runs Kevin's package-manager via Nix flake
|
Summary: Wrapper that runs Kevin's package-manager via Nix flake
|
||||||
|
|
||||||
@@ -74,6 +74,15 @@ echo ">>> package-manager removed. Nix itself was not removed."
|
|||||||
/usr/lib/package-manager/
|
/usr/lib/package-manager/
|
||||||
|
|
||||||
%changelog
|
%changelog
|
||||||
|
* Sun Dec 14 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.6.4-1
|
||||||
|
- * Improved reliability of Nix installs and updates, including automatic resolution of profile conflicts and better handling of GitHub 403 rate limits.
|
||||||
|
* More stable launcher behavior in packaged and virtual-env setups.
|
||||||
|
* Enhanced mirror and remote handling: repository owner/name are derived from URLs, with smoother provisioning and clearer credential handling.
|
||||||
|
* More reliable releases and artifacts due to safer CI behavior when no version tag is present.
|
||||||
|
|
||||||
|
* Sun Dec 14 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.6.3-1
|
||||||
|
- ***Fixed:*** Corrected repository path resolution so release and version logic consistently use the canonical packaging/* layout, preventing changelog and packaging files from being read or updated from incorrect locations.
|
||||||
|
|
||||||
* Wed Dec 10 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 0.9.1-1
|
* Wed Dec 10 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 0.9.1-1
|
||||||
- * Refactored installer: new `venv-create.sh`, cleaner root/user setup flow, updated README with architecture map.
|
- * Refactored installer: new `venv-create.sh`, cleaner root/user setup flow, updated README with architecture map.
|
||||||
* Split virgin tests into root/user workflows; stabilized Nix installer across distros; improved test scripts with dynamic distro selection and isolated Nix stores.
|
* Split virgin tests into root/user workflows; stabilized Nix installer across distros; improved test scripts with dynamic distro selection and isolated Nix stores.
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ build-backend = "setuptools.build_meta"
|
|||||||
|
|
||||||
[project]
|
[project]
|
||||||
name = "package-manager"
|
name = "package-manager"
|
||||||
version = "1.5.0"
|
version = "1.6.4"
|
||||||
description = "Kevin's package-manager tool (pkgmgr)"
|
description = "Kevin's package-manager tool (pkgmgr)"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
requires-python = ">=3.9"
|
requires-python = ">=3.9"
|
||||||
@@ -19,7 +19,8 @@ authors = [
|
|||||||
|
|
||||||
# Base runtime dependencies
|
# Base runtime dependencies
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"PyYAML>=6.0"
|
"PyYAML>=6.0",
|
||||||
|
"tomli; python_version < \"3.11\"",
|
||||||
]
|
]
|
||||||
|
|
||||||
[project.urls]
|
[project.urls]
|
||||||
|
|||||||
@@ -2,6 +2,16 @@
|
|||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
FLAKE_DIR="/usr/lib/package-manager"
|
FLAKE_DIR="/usr/lib/package-manager"
|
||||||
|
NIX_LIB_DIR="${FLAKE_DIR}/nix/lib"
|
||||||
|
RETRY_LIB="${NIX_LIB_DIR}/retry_403.sh"
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Hard requirement: retry helper must exist (fail if missing)
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
if [[ ! -f "${RETRY_LIB}" ]]; then
|
||||||
|
echo "[launcher] ERROR: Required retry helper not found: ${RETRY_LIB}" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
# Try to ensure that "nix" is on PATH (common locations + container user)
|
# Try to ensure that "nix" is on PATH (common locations + container user)
|
||||||
@@ -32,9 +42,13 @@ if ! command -v nix >/dev/null 2>&1; then
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
# Primary path: use Nix flake if available
|
# Primary path: use Nix flake if available (with GitHub 403 retry)
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
if command -v nix >/dev/null 2>&1; then
|
if declare -F run_with_github_403_retry >/dev/null; then
|
||||||
|
# shellcheck source=./scripts/nix/lib/retry_403.sh
|
||||||
|
source "${RETRY_LIB}"
|
||||||
|
exec run_with_github_403_retry nix run "${FLAKE_DIR}#pkgmgr" -- "$@"
|
||||||
|
else
|
||||||
exec nix run "${FLAKE_DIR}#pkgmgr" -- "$@"
|
exec nix run "${FLAKE_DIR}#pkgmgr" -- "$@"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|||||||
@@ -1,32 +1,49 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
IMAGE="pkgmgr-$PKGMGR_DISTRO"
|
IMAGE="pkgmgr-${PKGMGR_DISTRO}"
|
||||||
|
|
||||||
echo
|
echo
|
||||||
echo "------------------------------------------------------------"
|
echo "------------------------------------------------------------"
|
||||||
echo ">>> Testing VENV: $IMAGE"
|
echo ">>> Testing VENV: ${IMAGE}"
|
||||||
echo "------------------------------------------------------------"
|
echo "------------------------------------------------------------"
|
||||||
|
|
||||||
echo "[test-env-virtual] Inspect image metadata:"
|
echo "[test-env-virtual] Inspect image metadata:"
|
||||||
docker image inspect "$IMAGE" | sed -n '1,40p'
|
docker image inspect "${IMAGE}" | sed -n '1,40p'
|
||||||
|
|
||||||
echo "[test-env-virtual] Running: docker run --rm --entrypoint pkgmgr $IMAGE --help"
|
|
||||||
echo
|
echo
|
||||||
|
|
||||||
# Run the command and capture the output
|
# ------------------------------------------------------------
|
||||||
|
# Run VENV-based pkgmgr test inside container
|
||||||
|
# ------------------------------------------------------------
|
||||||
if OUTPUT=$(docker run --rm \
|
if OUTPUT=$(docker run --rm \
|
||||||
-e REINSTALL_PKGMGR=1 \
|
-e REINSTALL_PKGMGR=1 \
|
||||||
-v "pkgmgr_nix_store_${PKGMGR_DISTRO}:/nix" \
|
|
||||||
-v "$(pwd):/src" \
|
-v "$(pwd):/src" \
|
||||||
-v "pkgmgr_nix_cache_${PKGMGR_DISTRO}:/root/.cache/nix" \
|
-w /src \
|
||||||
"$IMAGE" 2>&1); then
|
"${IMAGE}" \
|
||||||
|
bash -lc '
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
echo "[test-env-virtual] Installing pkgmgr (distro package)..."
|
||||||
|
make install
|
||||||
|
|
||||||
|
echo "[test-env-virtual] Setting up Python venv..."
|
||||||
|
make setup-venv
|
||||||
|
|
||||||
|
echo "[test-env-virtual] Activating venv..."
|
||||||
|
. "$HOME/.venvs/pkgmgr/bin/activate"
|
||||||
|
|
||||||
|
echo "[test-env-virtual] Using pkgmgr from:"
|
||||||
|
command -v pkgmgr
|
||||||
|
pkgmgr --help
|
||||||
|
' 2>&1); then
|
||||||
|
|
||||||
echo "$OUTPUT"
|
echo "$OUTPUT"
|
||||||
echo
|
echo
|
||||||
echo "[test-env-virtual] SUCCESS: $IMAGE responded to 'pkgmgr --help'"
|
echo "[test-env-virtual] SUCCESS: venv-based pkgmgr works in ${IMAGE}"
|
||||||
|
|
||||||
else
|
else
|
||||||
echo "$OUTPUT"
|
echo "$OUTPUT"
|
||||||
echo
|
echo
|
||||||
echo "[test-env-virtual] ERROR: $IMAGE failed to run 'pkgmgr --help'"
|
echo "[test-env-virtual] ERROR: venv-based pkgmgr failed in ${IMAGE}"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
@@ -28,7 +28,7 @@ from pkgmgr.actions.install.installers.os_packages import (
|
|||||||
DebianControlInstaller,
|
DebianControlInstaller,
|
||||||
RpmSpecInstaller,
|
RpmSpecInstaller,
|
||||||
)
|
)
|
||||||
from pkgmgr.actions.install.installers.nix_flake import (
|
from pkgmgr.actions.install.installers.nix import (
|
||||||
NixFlakeInstaller,
|
NixFlakeInstaller,
|
||||||
)
|
)
|
||||||
from pkgmgr.actions.install.installers.python import PythonInstaller
|
from pkgmgr.actions.install.installers.python import PythonInstaller
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ pkgmgr.actions.install.installers.
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
from pkgmgr.actions.install.installers.base import BaseInstaller # noqa: F401
|
from pkgmgr.actions.install.installers.base import BaseInstaller # noqa: F401
|
||||||
from pkgmgr.actions.install.installers.nix_flake import NixFlakeInstaller # noqa: F401
|
from pkgmgr.actions.install.installers.nix import NixFlakeInstaller # noqa: F401
|
||||||
from pkgmgr.actions.install.installers.python import PythonInstaller # noqa: F401
|
from pkgmgr.actions.install.installers.python import PythonInstaller # noqa: F401
|
||||||
from pkgmgr.actions.install.installers.makefile import MakefileInstaller # noqa: F401
|
from pkgmgr.actions.install.installers.makefile import MakefileInstaller # noqa: F401
|
||||||
|
|
||||||
|
|||||||
4
src/pkgmgr/actions/install/installers/nix/__init__.py
Normal file
4
src/pkgmgr/actions/install/installers/nix/__init__.py
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
from .installer import NixFlakeInstaller
|
||||||
|
from .retry import RetryPolicy
|
||||||
|
|
||||||
|
__all__ = ["NixFlakeInstaller", "RetryPolicy"]
|
||||||
100
src/pkgmgr/actions/install/installers/nix/conflicts.py
Normal file
100
src/pkgmgr/actions/install/installers/nix/conflicts.py
Normal file
@@ -0,0 +1,100 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import TYPE_CHECKING, List
|
||||||
|
|
||||||
|
from .profile import NixProfileInspector
|
||||||
|
from .retry import GitHubRateLimitRetry
|
||||||
|
from .runner import CommandRunner
|
||||||
|
from .textparse import NixConflictTextParser
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from pkgmgr.actions.install.context import RepoContext
|
||||||
|
|
||||||
|
|
||||||
|
class NixConflictResolver:
|
||||||
|
"""
|
||||||
|
Resolves nix profile file conflicts by:
|
||||||
|
1. Parsing conflicting store paths from stderr
|
||||||
|
2. Mapping them to profile remove tokens via `nix profile list --json`
|
||||||
|
3. Removing those tokens deterministically
|
||||||
|
4. Retrying install
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
runner: CommandRunner,
|
||||||
|
retry: GitHubRateLimitRetry,
|
||||||
|
profile: NixProfileInspector,
|
||||||
|
) -> None:
|
||||||
|
self._runner = runner
|
||||||
|
self._retry = retry
|
||||||
|
self._profile = profile
|
||||||
|
self._parser = NixConflictTextParser()
|
||||||
|
|
||||||
|
def resolve(
|
||||||
|
self,
|
||||||
|
ctx: "RepoContext",
|
||||||
|
install_cmd: str,
|
||||||
|
stdout: str,
|
||||||
|
stderr: str,
|
||||||
|
*,
|
||||||
|
output: str,
|
||||||
|
max_rounds: int = 10,
|
||||||
|
) -> bool:
|
||||||
|
quiet = bool(getattr(ctx, "quiet", False))
|
||||||
|
combined = f"{stdout}\n{stderr}"
|
||||||
|
|
||||||
|
for _ in range(max_rounds):
|
||||||
|
# 1) Extract conflicting store prefixes from nix error output
|
||||||
|
store_prefixes = self._parser.existing_store_prefixes(combined)
|
||||||
|
|
||||||
|
# 2) Resolve them to concrete remove tokens
|
||||||
|
tokens: List[str] = self._profile.find_remove_tokens_for_store_prefixes(
|
||||||
|
ctx,
|
||||||
|
self._runner,
|
||||||
|
store_prefixes,
|
||||||
|
)
|
||||||
|
|
||||||
|
# 3) Fallback: output-name based lookup (also covers nix suggesting: `nix profile remove pkgmgr`)
|
||||||
|
if not tokens:
|
||||||
|
tokens = self._profile.find_remove_tokens_for_output(ctx, self._runner, output)
|
||||||
|
|
||||||
|
if tokens:
|
||||||
|
if not quiet:
|
||||||
|
print(
|
||||||
|
"[nix] conflict detected; removing existing profile entries: "
|
||||||
|
+ ", ".join(tokens)
|
||||||
|
)
|
||||||
|
|
||||||
|
for t in tokens:
|
||||||
|
# tokens may contain things like "pkgmgr" or "pkgmgr-1" or quoted tokens (we keep raw)
|
||||||
|
self._runner.run(ctx, f"nix profile remove {t}", allow_failure=True)
|
||||||
|
|
||||||
|
res = self._retry.run_with_retry(ctx, self._runner, install_cmd)
|
||||||
|
if res.returncode == 0:
|
||||||
|
return True
|
||||||
|
|
||||||
|
combined = f"{res.stdout}\n{res.stderr}"
|
||||||
|
continue
|
||||||
|
|
||||||
|
# 4) Last-resort fallback: use textual remove tokens from stderr (“nix profile remove X”)
|
||||||
|
tokens = self._parser.remove_tokens(combined)
|
||||||
|
if tokens:
|
||||||
|
if not quiet:
|
||||||
|
print("[nix] fallback remove tokens: " + ", ".join(tokens))
|
||||||
|
|
||||||
|
for t in tokens:
|
||||||
|
self._runner.run(ctx, f"nix profile remove {t}", allow_failure=True)
|
||||||
|
|
||||||
|
res = self._retry.run_with_retry(ctx, self._runner, install_cmd)
|
||||||
|
if res.returncode == 0:
|
||||||
|
return True
|
||||||
|
|
||||||
|
combined = f"{res.stdout}\n{res.stderr}"
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not quiet:
|
||||||
|
print("[nix] conflict detected but could not resolve profile entries to remove.")
|
||||||
|
return False
|
||||||
|
|
||||||
|
return False
|
||||||
229
src/pkgmgr/actions/install/installers/nix/installer.py
Normal file
229
src/pkgmgr/actions/install/installers/nix/installer.py
Normal file
@@ -0,0 +1,229 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
from typing import TYPE_CHECKING, List, Tuple
|
||||||
|
|
||||||
|
from pkgmgr.actions.install.installers.base import BaseInstaller
|
||||||
|
|
||||||
|
from .conflicts import NixConflictResolver
|
||||||
|
from .profile import NixProfileInspector
|
||||||
|
from .retry import GitHubRateLimitRetry, RetryPolicy
|
||||||
|
from .runner import CommandRunner
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from pkgmgr.actions.install.context import RepoContext
|
||||||
|
|
||||||
|
|
||||||
|
class NixFlakeInstaller(BaseInstaller):
|
||||||
|
layer = "nix"
|
||||||
|
FLAKE_FILE = "flake.nix"
|
||||||
|
|
||||||
|
def __init__(self, policy: RetryPolicy | None = None) -> None:
|
||||||
|
self._runner = CommandRunner()
|
||||||
|
self._retry = GitHubRateLimitRetry(policy=policy)
|
||||||
|
self._profile = NixProfileInspector()
|
||||||
|
self._conflicts = NixConflictResolver(self._runner, self._retry, self._profile)
|
||||||
|
|
||||||
|
# Newer nix rejects numeric indices; we learn this at runtime and cache the decision.
|
||||||
|
self._indices_supported: bool | None = None
|
||||||
|
|
||||||
|
def supports(self, ctx: "RepoContext") -> bool:
|
||||||
|
if os.environ.get("PKGMGR_DISABLE_NIX_FLAKE_INSTALLER") == "1":
|
||||||
|
if not ctx.quiet:
|
||||||
|
print(
|
||||||
|
"[INFO] PKGMGR_DISABLE_NIX_FLAKE_INSTALLER=1 – "
|
||||||
|
"skipping NixFlakeInstaller."
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
if shutil.which("nix") is None:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return os.path.exists(os.path.join(ctx.repo_dir, self.FLAKE_FILE))
|
||||||
|
|
||||||
|
def _profile_outputs(self, ctx: "RepoContext") -> List[Tuple[str, bool]]:
|
||||||
|
# (output_name, allow_failure)
|
||||||
|
if ctx.identifier in {"pkgmgr", "package-manager"}:
|
||||||
|
return [("pkgmgr", False), ("default", True)]
|
||||||
|
return [("default", False)]
|
||||||
|
|
||||||
|
def run(self, ctx: "RepoContext") -> None:
|
||||||
|
if not self.supports(ctx):
|
||||||
|
return
|
||||||
|
|
||||||
|
outputs = self._profile_outputs(ctx)
|
||||||
|
|
||||||
|
if not ctx.quiet:
|
||||||
|
msg = (
|
||||||
|
"[nix] flake detected in "
|
||||||
|
f"{ctx.identifier}, ensuring outputs: "
|
||||||
|
+ ", ".join(name for name, _ in outputs)
|
||||||
|
)
|
||||||
|
print(msg)
|
||||||
|
|
||||||
|
for output, allow_failure in outputs:
|
||||||
|
if ctx.force_update:
|
||||||
|
self._force_upgrade_output(ctx, output, allow_failure)
|
||||||
|
else:
|
||||||
|
self._install_only(ctx, output, allow_failure)
|
||||||
|
|
||||||
|
def _installable(self, ctx: "RepoContext", output: str) -> str:
|
||||||
|
return f"{ctx.repo_dir}#{output}"
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------
|
||||||
|
# Core install path
|
||||||
|
# ---------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _install_only(self, ctx: "RepoContext", output: str, allow_failure: bool) -> None:
|
||||||
|
install_cmd = f"nix profile install {self._installable(ctx, output)}"
|
||||||
|
|
||||||
|
if not ctx.quiet:
|
||||||
|
print(f"[nix] install: {install_cmd}")
|
||||||
|
|
||||||
|
res = self._retry.run_with_retry(ctx, self._runner, install_cmd)
|
||||||
|
if res.returncode == 0:
|
||||||
|
if not ctx.quiet:
|
||||||
|
print(f"[nix] output '{output}' successfully installed.")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Conflict resolver first (handles the common “existing package already provides file” case)
|
||||||
|
if self._conflicts.resolve(
|
||||||
|
ctx,
|
||||||
|
install_cmd,
|
||||||
|
res.stdout,
|
||||||
|
res.stderr,
|
||||||
|
output=output,
|
||||||
|
):
|
||||||
|
if not ctx.quiet:
|
||||||
|
print(f"[nix] output '{output}' successfully installed after conflict cleanup.")
|
||||||
|
return
|
||||||
|
|
||||||
|
if not ctx.quiet:
|
||||||
|
print(
|
||||||
|
f"[nix] install failed for '{output}' (exit {res.returncode}), "
|
||||||
|
"trying upgrade/remove+install..."
|
||||||
|
)
|
||||||
|
|
||||||
|
# If indices are supported, try legacy index-upgrade path.
|
||||||
|
if self._indices_supported is not False:
|
||||||
|
indices = self._profile.find_installed_indices_for_output(ctx, self._runner, output)
|
||||||
|
|
||||||
|
upgraded = False
|
||||||
|
for idx in indices:
|
||||||
|
if self._upgrade_index(ctx, idx):
|
||||||
|
upgraded = True
|
||||||
|
if not ctx.quiet:
|
||||||
|
print(f"[nix] output '{output}' successfully upgraded (index {idx}).")
|
||||||
|
|
||||||
|
if upgraded:
|
||||||
|
return
|
||||||
|
|
||||||
|
if indices and not ctx.quiet:
|
||||||
|
print(f"[nix] upgrade failed; removing indices {indices} and reinstalling '{output}'.")
|
||||||
|
|
||||||
|
for idx in indices:
|
||||||
|
self._remove_index(ctx, idx)
|
||||||
|
|
||||||
|
# If we learned indices are unsupported, immediately fall back below
|
||||||
|
if self._indices_supported is False:
|
||||||
|
self._remove_tokens_for_output(ctx, output)
|
||||||
|
|
||||||
|
else:
|
||||||
|
# indices explicitly unsupported
|
||||||
|
self._remove_tokens_for_output(ctx, output)
|
||||||
|
|
||||||
|
final = self._runner.run(ctx, install_cmd, allow_failure=True)
|
||||||
|
if final.returncode == 0:
|
||||||
|
if not ctx.quiet:
|
||||||
|
print(f"[nix] output '{output}' successfully re-installed.")
|
||||||
|
return
|
||||||
|
|
||||||
|
print(f"[ERROR] Failed to install Nix flake output '{output}' (exit {final.returncode})")
|
||||||
|
if not allow_failure:
|
||||||
|
raise SystemExit(final.returncode)
|
||||||
|
|
||||||
|
print(f"[WARNING] Continuing despite failure of optional output '{output}'.")
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------
|
||||||
|
# force_update path
|
||||||
|
# ---------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _force_upgrade_output(self, ctx: "RepoContext", output: str, allow_failure: bool) -> None:
|
||||||
|
# Prefer token path if indices unsupported (new nix)
|
||||||
|
if self._indices_supported is False:
|
||||||
|
self._remove_tokens_for_output(ctx, output)
|
||||||
|
self._install_only(ctx, output, allow_failure)
|
||||||
|
if not ctx.quiet:
|
||||||
|
print(f"[nix] output '{output}' successfully upgraded.")
|
||||||
|
return
|
||||||
|
|
||||||
|
indices = self._profile.find_installed_indices_for_output(ctx, self._runner, output)
|
||||||
|
|
||||||
|
upgraded_any = False
|
||||||
|
for idx in indices:
|
||||||
|
if self._upgrade_index(ctx, idx):
|
||||||
|
upgraded_any = True
|
||||||
|
if not ctx.quiet:
|
||||||
|
print(f"[nix] output '{output}' successfully upgraded (index {idx}).")
|
||||||
|
|
||||||
|
if upgraded_any:
|
||||||
|
if not ctx.quiet:
|
||||||
|
print(f"[nix] output '{output}' successfully upgraded.")
|
||||||
|
return
|
||||||
|
|
||||||
|
if indices and not ctx.quiet:
|
||||||
|
print(f"[nix] upgrade failed; removing indices {indices} and reinstalling '{output}'.")
|
||||||
|
|
||||||
|
for idx in indices:
|
||||||
|
self._remove_index(ctx, idx)
|
||||||
|
|
||||||
|
# If we learned indices are unsupported, also remove by token to actually clear conflicts
|
||||||
|
if self._indices_supported is False:
|
||||||
|
self._remove_tokens_for_output(ctx, output)
|
||||||
|
|
||||||
|
self._install_only(ctx, output, allow_failure)
|
||||||
|
|
||||||
|
if not ctx.quiet:
|
||||||
|
print(f"[nix] output '{output}' successfully upgraded.")
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------
|
||||||
|
# Helpers
|
||||||
|
# ---------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _stderr_says_indices_unsupported(self, stderr: str) -> bool:
|
||||||
|
s = (stderr or "").lower()
|
||||||
|
return "no longer supports indices" in s or "does not support indices" in s
|
||||||
|
|
||||||
|
def _upgrade_index(self, ctx: "RepoContext", idx: int) -> bool:
|
||||||
|
cmd = f"nix profile upgrade --refresh {idx}"
|
||||||
|
res = self._runner.run(ctx, cmd, allow_failure=True)
|
||||||
|
|
||||||
|
if self._stderr_says_indices_unsupported(getattr(res, "stderr", "")):
|
||||||
|
self._indices_supported = False
|
||||||
|
return False
|
||||||
|
|
||||||
|
if self._indices_supported is None:
|
||||||
|
self._indices_supported = True
|
||||||
|
|
||||||
|
return res.returncode == 0
|
||||||
|
|
||||||
|
def _remove_index(self, ctx: "RepoContext", idx: int) -> None:
|
||||||
|
res = self._runner.run(ctx, f"nix profile remove {idx}", allow_failure=True)
|
||||||
|
|
||||||
|
if self._stderr_says_indices_unsupported(getattr(res, "stderr", "")):
|
||||||
|
self._indices_supported = False
|
||||||
|
|
||||||
|
if self._indices_supported is None:
|
||||||
|
self._indices_supported = True
|
||||||
|
|
||||||
|
def _remove_tokens_for_output(self, ctx: "RepoContext", output: str) -> None:
|
||||||
|
tokens = self._profile.find_remove_tokens_for_output(ctx, self._runner, output)
|
||||||
|
if not tokens:
|
||||||
|
return
|
||||||
|
|
||||||
|
if not ctx.quiet:
|
||||||
|
print(f"[nix] indices unsupported; removing by token(s): {', '.join(tokens)}")
|
||||||
|
|
||||||
|
for t in tokens:
|
||||||
|
self._runner.run(ctx, f"nix profile remove {t}", allow_failure=True)
|
||||||
@@ -0,0 +1,4 @@
|
|||||||
|
from .inspector import NixProfileInspector
|
||||||
|
from .models import NixProfileEntry
|
||||||
|
|
||||||
|
__all__ = ["NixProfileInspector", "NixProfileEntry"]
|
||||||
162
src/pkgmgr/actions/install/installers/nix/profile/inspector.py
Normal file
162
src/pkgmgr/actions/install/installers/nix/profile/inspector.py
Normal file
@@ -0,0 +1,162 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any, List, TYPE_CHECKING
|
||||||
|
|
||||||
|
from .matcher import (
|
||||||
|
entry_matches_output,
|
||||||
|
entry_matches_store_path,
|
||||||
|
stable_unique_ints,
|
||||||
|
)
|
||||||
|
from .normalizer import normalize_elements
|
||||||
|
from .parser import parse_profile_list_json
|
||||||
|
from .result import extract_stdout_text
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
# Keep these as TYPE_CHECKING-only to avoid runtime import cycles.
|
||||||
|
from pkgmgr.actions.install.context import RepoContext
|
||||||
|
from pkgmgr.core.command.runner import CommandRunner
|
||||||
|
|
||||||
|
|
||||||
|
class NixProfileInspector:
|
||||||
|
"""
|
||||||
|
Reads and inspects the user's Nix profile list (JSON).
|
||||||
|
|
||||||
|
Public API:
|
||||||
|
- list_json()
|
||||||
|
- find_installed_indices_for_output() (legacy; may not work on newer nix)
|
||||||
|
- find_indices_by_store_path() (legacy; may not work on newer nix)
|
||||||
|
- find_remove_tokens_for_output()
|
||||||
|
- find_remove_tokens_for_store_prefixes()
|
||||||
|
"""
|
||||||
|
|
||||||
|
def list_json(self, ctx: "RepoContext", runner: "CommandRunner") -> dict[str, Any]:
|
||||||
|
res = runner.run(ctx, "nix profile list --json", allow_failure=False)
|
||||||
|
raw = extract_stdout_text(res)
|
||||||
|
return parse_profile_list_json(raw)
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------
|
||||||
|
# Legacy index helpers (still useful on older nix; newer nix may reject indices)
|
||||||
|
# ---------------------------------------------------------------------
|
||||||
|
|
||||||
|
def find_installed_indices_for_output(
|
||||||
|
self,
|
||||||
|
ctx: "RepoContext",
|
||||||
|
runner: "CommandRunner",
|
||||||
|
output: str,
|
||||||
|
) -> List[int]:
|
||||||
|
data = self.list_json(ctx, runner)
|
||||||
|
entries = normalize_elements(data)
|
||||||
|
|
||||||
|
hits: List[int] = []
|
||||||
|
for e in entries:
|
||||||
|
if e.index is None:
|
||||||
|
continue
|
||||||
|
if entry_matches_output(e, output):
|
||||||
|
hits.append(e.index)
|
||||||
|
|
||||||
|
return stable_unique_ints(hits)
|
||||||
|
|
||||||
|
def find_indices_by_store_path(
|
||||||
|
self,
|
||||||
|
ctx: "RepoContext",
|
||||||
|
runner: "CommandRunner",
|
||||||
|
store_path: str,
|
||||||
|
) -> List[int]:
|
||||||
|
needle = (store_path or "").strip()
|
||||||
|
if not needle:
|
||||||
|
return []
|
||||||
|
|
||||||
|
data = self.list_json(ctx, runner)
|
||||||
|
entries = normalize_elements(data)
|
||||||
|
|
||||||
|
hits: List[int] = []
|
||||||
|
for e in entries:
|
||||||
|
if e.index is None:
|
||||||
|
continue
|
||||||
|
if entry_matches_store_path(e, needle):
|
||||||
|
hits.append(e.index)
|
||||||
|
|
||||||
|
return stable_unique_ints(hits)
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------
|
||||||
|
# New token-based helpers (works with newer nix where indices are rejected)
|
||||||
|
# ---------------------------------------------------------------------
|
||||||
|
|
||||||
|
def find_remove_tokens_for_output(
|
||||||
|
self,
|
||||||
|
ctx: "RepoContext",
|
||||||
|
runner: "CommandRunner",
|
||||||
|
output: str,
|
||||||
|
) -> List[str]:
|
||||||
|
"""
|
||||||
|
Returns profile remove tokens to remove entries matching a given output.
|
||||||
|
|
||||||
|
We always include the raw output token first because nix itself suggests:
|
||||||
|
nix profile remove pkgmgr
|
||||||
|
"""
|
||||||
|
out = (output or "").strip()
|
||||||
|
if not out:
|
||||||
|
return []
|
||||||
|
|
||||||
|
data = self.list_json(ctx, runner)
|
||||||
|
entries = normalize_elements(data)
|
||||||
|
|
||||||
|
tokens: List[str] = [out] # critical: matches nix's own suggestion for conflicts
|
||||||
|
|
||||||
|
for e in entries:
|
||||||
|
if entry_matches_output(e, out):
|
||||||
|
# Prefer removing by key/name (non-index) when possible.
|
||||||
|
# New nix rejects numeric indices; these tokens are safer.
|
||||||
|
k = (e.key or "").strip()
|
||||||
|
n = (e.name or "").strip()
|
||||||
|
|
||||||
|
if k and not k.isdigit():
|
||||||
|
tokens.append(k)
|
||||||
|
elif n and not n.isdigit():
|
||||||
|
tokens.append(n)
|
||||||
|
|
||||||
|
# stable unique preserving order
|
||||||
|
seen: set[str] = set()
|
||||||
|
uniq: List[str] = []
|
||||||
|
for t in tokens:
|
||||||
|
if t and t not in seen:
|
||||||
|
uniq.append(t)
|
||||||
|
seen.add(t)
|
||||||
|
return uniq
|
||||||
|
|
||||||
|
def find_remove_tokens_for_store_prefixes(
|
||||||
|
self,
|
||||||
|
ctx: "RepoContext",
|
||||||
|
runner: "CommandRunner",
|
||||||
|
prefixes: List[str],
|
||||||
|
) -> List[str]:
|
||||||
|
"""
|
||||||
|
Returns remove tokens for entries whose store path matches any prefix.
|
||||||
|
"""
|
||||||
|
prefixes = [(p or "").strip() for p in (prefixes or []) if p]
|
||||||
|
prefixes = [p for p in prefixes if p]
|
||||||
|
if not prefixes:
|
||||||
|
return []
|
||||||
|
|
||||||
|
data = self.list_json(ctx, runner)
|
||||||
|
entries = normalize_elements(data)
|
||||||
|
|
||||||
|
tokens: List[str] = []
|
||||||
|
for e in entries:
|
||||||
|
if not e.store_paths:
|
||||||
|
continue
|
||||||
|
if any(sp == p for sp in e.store_paths for p in prefixes):
|
||||||
|
k = (e.key or "").strip()
|
||||||
|
n = (e.name or "").strip()
|
||||||
|
if k and not k.isdigit():
|
||||||
|
tokens.append(k)
|
||||||
|
elif n and not n.isdigit():
|
||||||
|
tokens.append(n)
|
||||||
|
|
||||||
|
seen: set[str] = set()
|
||||||
|
uniq: List[str] = []
|
||||||
|
for t in tokens:
|
||||||
|
if t and t not in seen:
|
||||||
|
uniq.append(t)
|
||||||
|
seen.add(t)
|
||||||
|
return uniq
|
||||||
62
src/pkgmgr/actions/install/installers/nix/profile/matcher.py
Normal file
62
src/pkgmgr/actions/install/installers/nix/profile/matcher.py
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
from .models import NixProfileEntry
|
||||||
|
|
||||||
|
|
||||||
|
def entry_matches_output(entry: NixProfileEntry, output: str) -> bool:
|
||||||
|
"""
|
||||||
|
Heuristic matcher: output is typically a flake output name (e.g. "pkgmgr"),
|
||||||
|
and we match against name/attrPath patterns.
|
||||||
|
"""
|
||||||
|
out = (output or "").strip()
|
||||||
|
if not out:
|
||||||
|
return False
|
||||||
|
|
||||||
|
candidates = [entry.name, entry.attr_path]
|
||||||
|
|
||||||
|
for c in candidates:
|
||||||
|
c = (c or "").strip()
|
||||||
|
if not c:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Direct match
|
||||||
|
if c == out:
|
||||||
|
return True
|
||||||
|
|
||||||
|
# AttrPath contains "#<output>"
|
||||||
|
if f"#{out}" in c:
|
||||||
|
return True
|
||||||
|
|
||||||
|
# AttrPath ends with ".<output>"
|
||||||
|
if c.endswith(f".{out}"):
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Name pattern "<output>-<n>" (common, e.g. pkgmgr-1)
|
||||||
|
if c.startswith(f"{out}-"):
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Historical special case: repo is "package-manager" but output is "pkgmgr"
|
||||||
|
if out == "pkgmgr" and c.startswith("package-manager-"):
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def entry_matches_store_path(entry: NixProfileEntry, store_path: str) -> bool:
|
||||||
|
needle = (store_path or "").strip()
|
||||||
|
if not needle:
|
||||||
|
return False
|
||||||
|
return any((p or "") == needle for p in entry.store_paths)
|
||||||
|
|
||||||
|
|
||||||
|
def stable_unique_ints(values: List[int]) -> List[int]:
|
||||||
|
seen: set[int] = set()
|
||||||
|
uniq: List[int] = []
|
||||||
|
for v in values:
|
||||||
|
if v in seen:
|
||||||
|
continue
|
||||||
|
uniq.append(v)
|
||||||
|
seen.add(v)
|
||||||
|
return uniq
|
||||||
17
src/pkgmgr/actions/install/installers/nix/profile/models.py
Normal file
17
src/pkgmgr/actions/install/installers/nix/profile/models.py
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class NixProfileEntry:
|
||||||
|
"""
|
||||||
|
Minimal normalized representation of one nix profile element entry.
|
||||||
|
"""
|
||||||
|
|
||||||
|
key: str
|
||||||
|
index: Optional[int]
|
||||||
|
name: str
|
||||||
|
attr_path: str
|
||||||
|
store_paths: List[str]
|
||||||
128
src/pkgmgr/actions/install/installers/nix/profile/normalizer.py
Normal file
128
src/pkgmgr/actions/install/installers/nix/profile/normalizer.py
Normal file
@@ -0,0 +1,128 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import re
|
||||||
|
from typing import Any, Dict, Iterable, List, Optional
|
||||||
|
|
||||||
|
from .models import NixProfileEntry
|
||||||
|
|
||||||
|
|
||||||
|
def coerce_index(key: str, entry: Dict[str, Any]) -> Optional[int]:
|
||||||
|
"""
|
||||||
|
Nix JSON schema varies:
|
||||||
|
- elements keys might be "0", "1", ...
|
||||||
|
- or might be names like "pkgmgr-1"
|
||||||
|
Some versions include an explicit index field.
|
||||||
|
We try safe options in order.
|
||||||
|
"""
|
||||||
|
k = (key or "").strip()
|
||||||
|
|
||||||
|
# 1) Classic: numeric keys
|
||||||
|
if k.isdigit():
|
||||||
|
try:
|
||||||
|
return int(k)
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# 2) Explicit index fields (schema-dependent)
|
||||||
|
for field in ("index", "id", "position"):
|
||||||
|
v = entry.get(field)
|
||||||
|
if isinstance(v, int):
|
||||||
|
return v
|
||||||
|
if isinstance(v, str) and v.strip().isdigit():
|
||||||
|
try:
|
||||||
|
return int(v.strip())
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# 3) Last resort: extract trailing number from key if it looks like "<name>-<n>"
|
||||||
|
m = re.match(r"^.+-(\d+)$", k)
|
||||||
|
if m:
|
||||||
|
try:
|
||||||
|
return int(m.group(1))
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def iter_store_paths(entry: Dict[str, Any]) -> Iterable[str]:
|
||||||
|
"""
|
||||||
|
Yield all possible store paths from a nix profile JSON entry.
|
||||||
|
|
||||||
|
Nix has had schema shifts. We support common variants:
|
||||||
|
- "storePaths": ["/nix/store/..", ...]
|
||||||
|
- "storePaths": "/nix/store/.." (rare)
|
||||||
|
- "storePath": "/nix/store/.." (some variants)
|
||||||
|
- nested "outputs" dict(s) with store paths (best-effort)
|
||||||
|
"""
|
||||||
|
if not isinstance(entry, dict):
|
||||||
|
return
|
||||||
|
|
||||||
|
sp = entry.get("storePaths")
|
||||||
|
if isinstance(sp, list):
|
||||||
|
for p in sp:
|
||||||
|
if isinstance(p, str):
|
||||||
|
yield p
|
||||||
|
elif isinstance(sp, str):
|
||||||
|
yield sp
|
||||||
|
|
||||||
|
sp2 = entry.get("storePath")
|
||||||
|
if isinstance(sp2, str):
|
||||||
|
yield sp2
|
||||||
|
|
||||||
|
outs = entry.get("outputs")
|
||||||
|
if isinstance(outs, dict):
|
||||||
|
for _, ov in outs.items():
|
||||||
|
if isinstance(ov, dict):
|
||||||
|
p = ov.get("storePath")
|
||||||
|
if isinstance(p, str):
|
||||||
|
yield p
|
||||||
|
|
||||||
|
|
||||||
|
def normalize_store_path(store_path: str) -> str:
|
||||||
|
"""
|
||||||
|
Normalize store path for matching.
|
||||||
|
Currently just strips whitespace; hook for future normalization if needed.
|
||||||
|
"""
|
||||||
|
return (store_path or "").strip()
|
||||||
|
|
||||||
|
|
||||||
|
def normalize_elements(data: Dict[str, Any]) -> List[NixProfileEntry]:
|
||||||
|
"""
|
||||||
|
Converts nix profile list JSON into a list of normalized entries.
|
||||||
|
|
||||||
|
JSON formats observed:
|
||||||
|
- {"elements": {"0": {...}, "1": {...}}}
|
||||||
|
- {"elements": {"pkgmgr-1": {...}, "pkgmgr-2": {...}}}
|
||||||
|
"""
|
||||||
|
elements = data.get("elements")
|
||||||
|
if not isinstance(elements, dict):
|
||||||
|
return []
|
||||||
|
|
||||||
|
normalized: List[NixProfileEntry] = []
|
||||||
|
|
||||||
|
for k, entry in elements.items():
|
||||||
|
if not isinstance(entry, dict):
|
||||||
|
continue
|
||||||
|
|
||||||
|
idx = coerce_index(str(k), entry)
|
||||||
|
name = str(entry.get("name", "") or "")
|
||||||
|
attr = str(entry.get("attrPath", "") or "")
|
||||||
|
|
||||||
|
store_paths: List[str] = []
|
||||||
|
for p in iter_store_paths(entry):
|
||||||
|
sp = normalize_store_path(p)
|
||||||
|
if sp:
|
||||||
|
store_paths.append(sp)
|
||||||
|
|
||||||
|
normalized.append(
|
||||||
|
NixProfileEntry(
|
||||||
|
key=str(k),
|
||||||
|
index=idx,
|
||||||
|
name=name,
|
||||||
|
attr_path=attr,
|
||||||
|
store_paths=store_paths,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return normalized
|
||||||
19
src/pkgmgr/actions/install/installers/nix/profile/parser.py
Normal file
19
src/pkgmgr/actions/install/installers/nix/profile/parser.py
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
from typing import Any, Dict
|
||||||
|
|
||||||
|
|
||||||
|
def parse_profile_list_json(raw: str) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Parse JSON output from `nix profile list --json`.
|
||||||
|
|
||||||
|
Raises SystemExit with a helpful excerpt on parse failure.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return json.loads(raw)
|
||||||
|
except json.JSONDecodeError as e:
|
||||||
|
excerpt = (raw or "")[:5000]
|
||||||
|
raise SystemExit(
|
||||||
|
f"[nix] Failed to parse `nix profile list --json`: {e}\n{excerpt}"
|
||||||
|
) from e
|
||||||
28
src/pkgmgr/actions/install/installers/nix/profile/result.py
Normal file
28
src/pkgmgr/actions/install/installers/nix/profile/result.py
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
|
||||||
|
def extract_stdout_text(result: Any) -> str:
|
||||||
|
"""
|
||||||
|
Normalize different runner return types to a stdout string.
|
||||||
|
|
||||||
|
Supported patterns:
|
||||||
|
- result is str -> returned as-is
|
||||||
|
- result is bytes/bytearray -> decoded UTF-8 (replace errors)
|
||||||
|
- result has `.stdout` (str or bytes) -> used
|
||||||
|
- fallback: str(result)
|
||||||
|
"""
|
||||||
|
if isinstance(result, str):
|
||||||
|
return result
|
||||||
|
|
||||||
|
if isinstance(result, (bytes, bytearray)):
|
||||||
|
return bytes(result).decode("utf-8", errors="replace")
|
||||||
|
|
||||||
|
stdout = getattr(result, "stdout", None)
|
||||||
|
if isinstance(stdout, str):
|
||||||
|
return stdout
|
||||||
|
if isinstance(stdout, (bytes, bytearray)):
|
||||||
|
return bytes(stdout).decode("utf-8", errors="replace")
|
||||||
|
|
||||||
|
return str(result)
|
||||||
69
src/pkgmgr/actions/install/installers/nix/profile_list.py
Normal file
69
src/pkgmgr/actions/install/installers/nix/profile_list.py
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import re
|
||||||
|
from typing import TYPE_CHECKING, List, Tuple
|
||||||
|
|
||||||
|
from .runner import CommandRunner
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from pkgmgr.actions.install.context import RepoContext
|
||||||
|
|
||||||
|
|
||||||
|
class NixProfileListReader:
|
||||||
|
def __init__(self, runner: CommandRunner) -> None:
|
||||||
|
self._runner = runner
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _store_prefix(path: str) -> str:
|
||||||
|
raw = (path or "").strip()
|
||||||
|
m = re.match(r"^(/nix/store/[0-9a-z]{32}-[^/ \t]+)", raw)
|
||||||
|
return m.group(1) if m else raw
|
||||||
|
|
||||||
|
def entries(self, ctx: "RepoContext") -> List[Tuple[int, str]]:
|
||||||
|
res = self._runner.run(ctx, "nix profile list", allow_failure=True)
|
||||||
|
if res.returncode != 0:
|
||||||
|
return []
|
||||||
|
|
||||||
|
entries: List[Tuple[int, str]] = []
|
||||||
|
pat = re.compile(
|
||||||
|
r"^\s*(\d+)\s+.*?(/nix/store/[0-9a-z]{32}-[^/ \t]+)",
|
||||||
|
re.MULTILINE,
|
||||||
|
)
|
||||||
|
|
||||||
|
for m in pat.finditer(res.stdout or ""):
|
||||||
|
idx_s = m.group(1)
|
||||||
|
sp = m.group(2)
|
||||||
|
try:
|
||||||
|
idx = int(idx_s)
|
||||||
|
except Exception:
|
||||||
|
continue
|
||||||
|
entries.append((idx, self._store_prefix(sp)))
|
||||||
|
|
||||||
|
seen: set[int] = set()
|
||||||
|
uniq: List[Tuple[int, str]] = []
|
||||||
|
for idx, sp in entries:
|
||||||
|
if idx not in seen:
|
||||||
|
seen.add(idx)
|
||||||
|
uniq.append((idx, sp))
|
||||||
|
|
||||||
|
return uniq
|
||||||
|
|
||||||
|
def indices_matching_store_prefixes(self, ctx: "RepoContext", prefixes: List[str]) -> List[int]:
|
||||||
|
prefixes = [self._store_prefix(p) for p in prefixes if p]
|
||||||
|
prefixes = [p for p in prefixes if p]
|
||||||
|
if not prefixes:
|
||||||
|
return []
|
||||||
|
|
||||||
|
hits: List[int] = []
|
||||||
|
for idx, sp in self.entries(ctx):
|
||||||
|
if any(sp == p for p in prefixes):
|
||||||
|
hits.append(idx)
|
||||||
|
|
||||||
|
seen: set[int] = set()
|
||||||
|
uniq: List[int] = []
|
||||||
|
for i in hits:
|
||||||
|
if i not in seen:
|
||||||
|
seen.add(i)
|
||||||
|
uniq.append(i)
|
||||||
|
|
||||||
|
return uniq
|
||||||
87
src/pkgmgr/actions/install/installers/nix/retry.py
Normal file
87
src/pkgmgr/actions/install/installers/nix/retry.py
Normal file
@@ -0,0 +1,87 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import random
|
||||||
|
import time
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Iterable, TYPE_CHECKING
|
||||||
|
|
||||||
|
from .types import RunResult
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from pkgmgr.actions.install.context import RepoContext
|
||||||
|
from .runner import CommandRunner
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class RetryPolicy:
|
||||||
|
max_attempts: int = 7
|
||||||
|
base_delay_seconds: int = 30
|
||||||
|
jitter_seconds_min: int = 0
|
||||||
|
jitter_seconds_max: int = 60
|
||||||
|
|
||||||
|
|
||||||
|
class GitHubRateLimitRetry:
|
||||||
|
"""
|
||||||
|
Retries nix install commands only when the error looks like a GitHub API rate limit (HTTP 403).
|
||||||
|
Backoff: Fibonacci(base, base, ...) + random jitter.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, policy: RetryPolicy | None = None) -> None:
|
||||||
|
self._policy = policy or RetryPolicy()
|
||||||
|
|
||||||
|
def run_with_retry(
|
||||||
|
self,
|
||||||
|
ctx: "RepoContext",
|
||||||
|
runner: "CommandRunner",
|
||||||
|
install_cmd: str,
|
||||||
|
) -> RunResult:
|
||||||
|
quiet = bool(getattr(ctx, "quiet", False))
|
||||||
|
delays = list(self._fibonacci_backoff(self._policy.base_delay_seconds, self._policy.max_attempts))
|
||||||
|
|
||||||
|
last: RunResult | None = None
|
||||||
|
|
||||||
|
for attempt, base_delay in enumerate(delays, start=1):
|
||||||
|
if not quiet:
|
||||||
|
print(f"[nix] attempt {attempt}/{self._policy.max_attempts}: {install_cmd}")
|
||||||
|
|
||||||
|
res = runner.run(ctx, install_cmd, allow_failure=True)
|
||||||
|
last = res
|
||||||
|
|
||||||
|
if res.returncode == 0:
|
||||||
|
return res
|
||||||
|
|
||||||
|
combined = f"{res.stdout}\n{res.stderr}"
|
||||||
|
if not self._is_github_rate_limit_error(combined):
|
||||||
|
return res
|
||||||
|
|
||||||
|
if attempt >= self._policy.max_attempts:
|
||||||
|
break
|
||||||
|
|
||||||
|
jitter = random.randint(self._policy.jitter_seconds_min, self._policy.jitter_seconds_max)
|
||||||
|
wait_time = base_delay + jitter
|
||||||
|
|
||||||
|
if not quiet:
|
||||||
|
print(
|
||||||
|
"[nix] GitHub rate limit detected (403). "
|
||||||
|
f"Retrying in {wait_time}s (base={base_delay}s, jitter={jitter}s)..."
|
||||||
|
)
|
||||||
|
|
||||||
|
time.sleep(wait_time)
|
||||||
|
|
||||||
|
return last if last is not None else RunResult(returncode=1, stdout="", stderr="nix install retry failed")
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _is_github_rate_limit_error(text: str) -> bool:
|
||||||
|
t = (text or "").lower()
|
||||||
|
return (
|
||||||
|
"http error 403" in t
|
||||||
|
or "rate limit exceeded" in t
|
||||||
|
or "github api rate limit" in t
|
||||||
|
or "api rate limit exceeded" in t
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _fibonacci_backoff(base: int, attempts: int) -> Iterable[int]:
|
||||||
|
a, b = base, base
|
||||||
|
for _ in range(max(1, attempts)):
|
||||||
|
yield a
|
||||||
|
a, b = b, a + b
|
||||||
64
src/pkgmgr/actions/install/installers/nix/runner.py
Normal file
64
src/pkgmgr/actions/install/installers/nix/runner.py
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from .types import RunResult
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from pkgmgr.actions.install.context import RepoContext
|
||||||
|
|
||||||
|
class CommandRunner:
|
||||||
|
"""
|
||||||
|
Executes commands (shell=True) inside a repository directory (if provided).
|
||||||
|
Supports preview mode and compact failure output logging.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def run(self, ctx: "RepoContext", cmd: str, allow_failure: bool) -> RunResult:
|
||||||
|
repo_dir = getattr(ctx, "repo_dir", None) or getattr(ctx, "repo_path", None)
|
||||||
|
preview = bool(getattr(ctx, "preview", False))
|
||||||
|
quiet = bool(getattr(ctx, "quiet", False))
|
||||||
|
|
||||||
|
if preview:
|
||||||
|
if not quiet:
|
||||||
|
print(f"[preview] {cmd}")
|
||||||
|
return RunResult(returncode=0, stdout="", stderr="")
|
||||||
|
|
||||||
|
try:
|
||||||
|
p = subprocess.run(
|
||||||
|
cmd,
|
||||||
|
shell=True,
|
||||||
|
cwd=repo_dir,
|
||||||
|
check=False,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.PIPE,
|
||||||
|
text=True,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
if not allow_failure:
|
||||||
|
raise
|
||||||
|
return RunResult(returncode=1, stdout="", stderr=str(e))
|
||||||
|
|
||||||
|
res = RunResult(returncode=p.returncode, stdout=p.stdout or "", stderr=p.stderr or "")
|
||||||
|
|
||||||
|
if res.returncode != 0 and not quiet:
|
||||||
|
self._print_compact_failure(res)
|
||||||
|
|
||||||
|
if res.returncode != 0 and not allow_failure:
|
||||||
|
raise SystemExit(res.returncode)
|
||||||
|
|
||||||
|
return res
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _print_compact_failure(res: RunResult) -> None:
|
||||||
|
out = (res.stdout or "").strip()
|
||||||
|
err = (res.stderr or "").strip()
|
||||||
|
|
||||||
|
if out:
|
||||||
|
print("[nix] stdout (last lines):")
|
||||||
|
print("\n".join(out.splitlines()[-20:]))
|
||||||
|
|
||||||
|
if err:
|
||||||
|
print("[nix] stderr (last lines):")
|
||||||
|
print("\n".join(err.splitlines()[-40:]))
|
||||||
76
src/pkgmgr/actions/install/installers/nix/textparse.py
Normal file
76
src/pkgmgr/actions/install/installers/nix/textparse.py
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import re
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
|
||||||
|
class NixConflictTextParser:
|
||||||
|
@staticmethod
|
||||||
|
def _store_prefix(path: str) -> str:
|
||||||
|
raw = (path or "").strip()
|
||||||
|
m = re.match(r"^(/nix/store/[0-9a-z]{32}-[^/ \t]+)", raw)
|
||||||
|
return m.group(1) if m else raw
|
||||||
|
|
||||||
|
def remove_tokens(self, text: str) -> List[str]:
|
||||||
|
pat = re.compile(
|
||||||
|
r"^\s*nix profile remove\s+([^\s'\"`]+|'[^']+'|\"[^\"]+\")\s*$",
|
||||||
|
re.MULTILINE,
|
||||||
|
)
|
||||||
|
|
||||||
|
tokens: List[str] = []
|
||||||
|
for m in pat.finditer(text or ""):
|
||||||
|
t = (m.group(1) or "").strip()
|
||||||
|
if (t.startswith("'") and t.endswith("'")) or (t.startswith('"') and t.endswith('"')):
|
||||||
|
t = t[1:-1]
|
||||||
|
if t:
|
||||||
|
tokens.append(t)
|
||||||
|
|
||||||
|
seen: set[str] = set()
|
||||||
|
uniq: List[str] = []
|
||||||
|
for t in tokens:
|
||||||
|
if t not in seen:
|
||||||
|
seen.add(t)
|
||||||
|
uniq.append(t)
|
||||||
|
|
||||||
|
return uniq
|
||||||
|
|
||||||
|
def existing_store_prefixes(self, text: str) -> List[str]:
|
||||||
|
lines = (text or "").splitlines()
|
||||||
|
prefixes: List[str] = []
|
||||||
|
|
||||||
|
in_existing = False
|
||||||
|
in_new = False
|
||||||
|
|
||||||
|
store_pat = re.compile(r"^\s*(/nix/store/[0-9a-z]{32}-[^ \t]+)")
|
||||||
|
|
||||||
|
for raw in lines:
|
||||||
|
line = raw.strip()
|
||||||
|
|
||||||
|
if "An existing package already provides the following file" in line:
|
||||||
|
in_existing = True
|
||||||
|
in_new = False
|
||||||
|
continue
|
||||||
|
|
||||||
|
if "This is the conflicting file from the new package" in line:
|
||||||
|
in_existing = False
|
||||||
|
in_new = True
|
||||||
|
continue
|
||||||
|
|
||||||
|
if in_existing:
|
||||||
|
m = store_pat.match(raw)
|
||||||
|
if m:
|
||||||
|
prefixes.append(m.group(1))
|
||||||
|
continue
|
||||||
|
|
||||||
|
_ = in_new
|
||||||
|
|
||||||
|
norm = [self._store_prefix(p) for p in prefixes if p]
|
||||||
|
|
||||||
|
seen: set[str] = set()
|
||||||
|
uniq: List[str] = []
|
||||||
|
for p in norm:
|
||||||
|
if p and p not in seen:
|
||||||
|
seen.add(p)
|
||||||
|
uniq.append(p)
|
||||||
|
|
||||||
|
return uniq
|
||||||
10
src/pkgmgr/actions/install/installers/nix/types.py
Normal file
10
src/pkgmgr/actions/install/installers/nix/types.py
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class RunResult:
|
||||||
|
returncode: int
|
||||||
|
stdout: str
|
||||||
|
stderr: str
|
||||||
@@ -1,238 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
import shutil
|
|
||||||
import subprocess
|
|
||||||
from typing import TYPE_CHECKING, List, Tuple
|
|
||||||
|
|
||||||
from pkgmgr.actions.install.installers.base import BaseInstaller
|
|
||||||
from pkgmgr.core.command.run import run_command
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from pkgmgr.actions.install.context import RepoContext
|
|
||||||
|
|
||||||
|
|
||||||
class NixFlakeInstaller(BaseInstaller):
|
|
||||||
layer = "nix"
|
|
||||||
FLAKE_FILE = "flake.nix"
|
|
||||||
|
|
||||||
def supports(self, ctx: "RepoContext") -> bool:
|
|
||||||
if os.environ.get("PKGMGR_DISABLE_NIX_FLAKE_INSTALLER") == "1":
|
|
||||||
if not ctx.quiet:
|
|
||||||
print("[INFO] PKGMGR_DISABLE_NIX_FLAKE_INSTALLER=1 – skipping NixFlakeInstaller.")
|
|
||||||
return False
|
|
||||||
|
|
||||||
if shutil.which("nix") is None:
|
|
||||||
return False
|
|
||||||
|
|
||||||
return os.path.exists(os.path.join(ctx.repo_dir, self.FLAKE_FILE))
|
|
||||||
|
|
||||||
def _profile_outputs(self, ctx: "RepoContext") -> List[Tuple[str, bool]]:
|
|
||||||
# (output_name, allow_failure)
|
|
||||||
if ctx.identifier in {"pkgmgr", "package-manager"}:
|
|
||||||
return [("pkgmgr", False), ("default", True)]
|
|
||||||
return [("default", False)]
|
|
||||||
|
|
||||||
def _installable(self, ctx: "RepoContext", output: str) -> str:
|
|
||||||
return f"{ctx.repo_dir}#{output}"
|
|
||||||
|
|
||||||
def _run(self, ctx: "RepoContext", cmd: str, allow_failure: bool = True):
|
|
||||||
return run_command(
|
|
||||||
cmd,
|
|
||||||
cwd=ctx.repo_dir,
|
|
||||||
preview=ctx.preview,
|
|
||||||
allow_failure=allow_failure,
|
|
||||||
)
|
|
||||||
|
|
||||||
def _profile_list_json(self, ctx: "RepoContext") -> dict:
|
|
||||||
"""
|
|
||||||
Read current Nix profile entries as JSON (best-effort).
|
|
||||||
|
|
||||||
NOTE: Nix versions differ:
|
|
||||||
- Newer: {"elements": [ { "index": 0, "attrPath": "...", ... }, ... ]}
|
|
||||||
- Older: {"elements": [ "nixpkgs#hello", ... ]} (strings)
|
|
||||||
|
|
||||||
We return {} on failure or in preview mode.
|
|
||||||
"""
|
|
||||||
if ctx.preview:
|
|
||||||
return {}
|
|
||||||
|
|
||||||
proc = subprocess.run(
|
|
||||||
["nix", "profile", "list", "--json"],
|
|
||||||
check=False,
|
|
||||||
stdout=subprocess.PIPE,
|
|
||||||
stderr=subprocess.STDOUT,
|
|
||||||
text=True,
|
|
||||||
env=os.environ.copy(),
|
|
||||||
)
|
|
||||||
if proc.returncode != 0:
|
|
||||||
return {}
|
|
||||||
|
|
||||||
try:
|
|
||||||
return json.loads(proc.stdout or "{}")
|
|
||||||
except json.JSONDecodeError:
|
|
||||||
return {}
|
|
||||||
|
|
||||||
def _find_installed_indices_for_output(self, ctx: "RepoContext", output: str) -> List[int]:
|
|
||||||
"""
|
|
||||||
Find installed profile indices for a given output.
|
|
||||||
|
|
||||||
Works across Nix JSON variants:
|
|
||||||
- If elements are dicts: we can extract indices.
|
|
||||||
- If elements are strings: we cannot extract indices -> return [].
|
|
||||||
"""
|
|
||||||
data = self._profile_list_json(ctx)
|
|
||||||
elements = data.get("elements", []) or []
|
|
||||||
|
|
||||||
matches: List[int] = []
|
|
||||||
|
|
||||||
for el in elements:
|
|
||||||
# Legacy JSON format: plain strings -> no index information
|
|
||||||
if not isinstance(el, dict):
|
|
||||||
continue
|
|
||||||
|
|
||||||
idx = el.get("index")
|
|
||||||
if idx is None:
|
|
||||||
continue
|
|
||||||
|
|
||||||
attr_path = el.get("attrPath") or el.get("attr_path") or ""
|
|
||||||
pname = el.get("pname") or ""
|
|
||||||
name = el.get("name") or ""
|
|
||||||
|
|
||||||
if attr_path == output:
|
|
||||||
matches.append(int(idx))
|
|
||||||
continue
|
|
||||||
|
|
||||||
if pname == output or name == output:
|
|
||||||
matches.append(int(idx))
|
|
||||||
continue
|
|
||||||
|
|
||||||
if isinstance(attr_path, str) and attr_path.endswith(f".{output}"):
|
|
||||||
matches.append(int(idx))
|
|
||||||
continue
|
|
||||||
|
|
||||||
return matches
|
|
||||||
|
|
||||||
def _upgrade_index(self, ctx: "RepoContext", index: int) -> bool:
|
|
||||||
cmd = f"nix profile upgrade --refresh {index}"
|
|
||||||
if not ctx.quiet:
|
|
||||||
print(f"[nix] upgrade: {cmd}")
|
|
||||||
res = self._run(ctx, cmd, allow_failure=True)
|
|
||||||
return res.returncode == 0
|
|
||||||
|
|
||||||
def _remove_index(self, ctx: "RepoContext", index: int) -> None:
|
|
||||||
cmd = f"nix profile remove {index}"
|
|
||||||
if not ctx.quiet:
|
|
||||||
print(f"[nix] remove: {cmd}")
|
|
||||||
self._run(ctx, cmd, allow_failure=True)
|
|
||||||
|
|
||||||
def _install_only(self, ctx: "RepoContext", output: str, allow_failure: bool) -> None:
|
|
||||||
"""
|
|
||||||
Install output; on failure, try index-based upgrade/remove+install if possible.
|
|
||||||
"""
|
|
||||||
installable = self._installable(ctx, output)
|
|
||||||
install_cmd = f"nix profile install {installable}"
|
|
||||||
|
|
||||||
if not ctx.quiet:
|
|
||||||
print(f"[nix] install: {install_cmd}")
|
|
||||||
|
|
||||||
res = self._run(ctx, install_cmd, allow_failure=True)
|
|
||||||
if res.returncode == 0:
|
|
||||||
if not ctx.quiet:
|
|
||||||
print(f"[nix] output '{output}' successfully installed.")
|
|
||||||
return
|
|
||||||
|
|
||||||
if not ctx.quiet:
|
|
||||||
print(
|
|
||||||
f"[nix] install failed for '{output}' (exit {res.returncode}), "
|
|
||||||
"trying index-based upgrade/remove+install..."
|
|
||||||
)
|
|
||||||
|
|
||||||
indices = self._find_installed_indices_for_output(ctx, output)
|
|
||||||
|
|
||||||
# 1) Try upgrading existing indices (only possible on newer JSON format)
|
|
||||||
upgraded = False
|
|
||||||
for idx in indices:
|
|
||||||
if self._upgrade_index(ctx, idx):
|
|
||||||
upgraded = True
|
|
||||||
if not ctx.quiet:
|
|
||||||
print(f"[nix] output '{output}' successfully upgraded (index {idx}).")
|
|
||||||
|
|
||||||
if upgraded:
|
|
||||||
return
|
|
||||||
|
|
||||||
# 2) Remove matching indices and retry install
|
|
||||||
if indices and not ctx.quiet:
|
|
||||||
print(f"[nix] upgrade failed; removing indices {indices} and reinstalling '{output}'.")
|
|
||||||
|
|
||||||
for idx in indices:
|
|
||||||
self._remove_index(ctx, idx)
|
|
||||||
|
|
||||||
final = self._run(ctx, install_cmd, allow_failure=True)
|
|
||||||
if final.returncode == 0:
|
|
||||||
if not ctx.quiet:
|
|
||||||
print(f"[nix] output '{output}' successfully re-installed.")
|
|
||||||
return
|
|
||||||
|
|
||||||
msg = f"[ERROR] Failed to install Nix flake output '{output}' (exit {final.returncode})"
|
|
||||||
print(msg)
|
|
||||||
|
|
||||||
if not allow_failure:
|
|
||||||
raise SystemExit(final.returncode)
|
|
||||||
|
|
||||||
print(f"[WARNING] Continuing despite failure of optional output '{output}'.")
|
|
||||||
|
|
||||||
def _force_upgrade_output(self, ctx: "RepoContext", output: str, allow_failure: bool) -> None:
|
|
||||||
"""
|
|
||||||
force_update path:
|
|
||||||
- Prefer upgrading existing entries via indices (if we can discover them).
|
|
||||||
- If no indices (legacy JSON) or upgrade fails, fall back to install-only logic.
|
|
||||||
"""
|
|
||||||
indices = self._find_installed_indices_for_output(ctx, output)
|
|
||||||
|
|
||||||
upgraded_any = False
|
|
||||||
for idx in indices:
|
|
||||||
if self._upgrade_index(ctx, idx):
|
|
||||||
upgraded_any = True
|
|
||||||
if not ctx.quiet:
|
|
||||||
print(f"[nix] output '{output}' successfully upgraded (index {idx}).")
|
|
||||||
|
|
||||||
if upgraded_any:
|
|
||||||
# Make upgrades visible to tests
|
|
||||||
print(f"[nix] output '{output}' successfully upgraded.")
|
|
||||||
return
|
|
||||||
|
|
||||||
if indices and not ctx.quiet:
|
|
||||||
print(f"[nix] upgrade failed; removing indices {indices} and reinstalling '{output}'.")
|
|
||||||
|
|
||||||
for idx in indices:
|
|
||||||
self._remove_index(ctx, idx)
|
|
||||||
|
|
||||||
# Ensure installed (includes its own fallback logic)
|
|
||||||
self._install_only(ctx, output, allow_failure)
|
|
||||||
|
|
||||||
# Make upgrades visible to tests (semantic: update requested)
|
|
||||||
print(f"[nix] output '{output}' successfully upgraded.")
|
|
||||||
|
|
||||||
def run(self, ctx: "RepoContext") -> None:
|
|
||||||
if not self.supports(ctx):
|
|
||||||
return
|
|
||||||
|
|
||||||
outputs = self._profile_outputs(ctx)
|
|
||||||
|
|
||||||
if not ctx.quiet:
|
|
||||||
print(
|
|
||||||
"[nix] flake detected in "
|
|
||||||
f"{ctx.identifier}, ensuring outputs: "
|
|
||||||
+ ", ".join(name for name, _ in outputs)
|
|
||||||
)
|
|
||||||
|
|
||||||
for output, allow_failure in outputs:
|
|
||||||
if ctx.force_update:
|
|
||||||
self._force_upgrade_output(ctx, output, allow_failure)
|
|
||||||
else:
|
|
||||||
self._install_only(ctx, output, allow_failure)
|
|
||||||
21
src/pkgmgr/actions/mirror/remote_check.py
Normal file
21
src/pkgmgr/actions/mirror/remote_check.py
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
# src/pkgmgr/actions/mirror/remote_check.py
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Tuple
|
||||||
|
|
||||||
|
from pkgmgr.core.git import GitError, run_git
|
||||||
|
|
||||||
|
|
||||||
|
def probe_mirror(url: str, repo_dir: str) -> Tuple[bool, str]:
|
||||||
|
"""
|
||||||
|
Probe a remote mirror URL using `git ls-remote`.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
(True, "") on success,
|
||||||
|
(False, error_message) on failure.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
run_git(["ls-remote", url], cwd=repo_dir)
|
||||||
|
return True, ""
|
||||||
|
except GitError as exc:
|
||||||
|
return False, str(exc)
|
||||||
70
src/pkgmgr/actions/mirror/remote_provision.py
Normal file
70
src/pkgmgr/actions/mirror/remote_provision.py
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
# src/pkgmgr/actions/mirror/remote_provision.py
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
from pkgmgr.core.remote_provisioning import ProviderHint, RepoSpec, ensure_remote_repo
|
||||||
|
from pkgmgr.core.remote_provisioning.ensure import EnsureOptions
|
||||||
|
|
||||||
|
from .context import build_context
|
||||||
|
from .git_remote import determine_primary_remote_url
|
||||||
|
from .types import Repository
|
||||||
|
from .url_utils import normalize_provider_host, parse_repo_from_git_url
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_remote_repository(
|
||||||
|
repo: Repository,
|
||||||
|
repositories_base_dir: str,
|
||||||
|
all_repos: List[Repository],
|
||||||
|
preview: bool,
|
||||||
|
) -> None:
|
||||||
|
ctx = build_context(repo, repositories_base_dir, all_repos)
|
||||||
|
resolved_mirrors = ctx.resolved_mirrors
|
||||||
|
|
||||||
|
primary_url = determine_primary_remote_url(repo, resolved_mirrors)
|
||||||
|
if not primary_url:
|
||||||
|
print("[INFO] No remote URL could be derived; skipping remote provisioning.")
|
||||||
|
return
|
||||||
|
|
||||||
|
host_raw, owner_from_url, name_from_url = parse_repo_from_git_url(primary_url)
|
||||||
|
host = normalize_provider_host(host_raw)
|
||||||
|
|
||||||
|
if not host or not owner_from_url or not name_from_url:
|
||||||
|
print("[WARN] Could not derive host/owner/repository from URL; cannot ensure remote repo.")
|
||||||
|
print(f" url={primary_url!r}")
|
||||||
|
print(f" host={host!r}, owner={owner_from_url!r}, repository={name_from_url!r}")
|
||||||
|
return
|
||||||
|
|
||||||
|
print("------------------------------------------------------------")
|
||||||
|
print(f"[REMOTE ENSURE] {ctx.identifier}")
|
||||||
|
print(f"[REMOTE ENSURE] host: {host}")
|
||||||
|
print("------------------------------------------------------------")
|
||||||
|
|
||||||
|
spec = RepoSpec(
|
||||||
|
host=str(host),
|
||||||
|
owner=str(owner_from_url),
|
||||||
|
name=str(name_from_url),
|
||||||
|
private=bool(repo.get("private", True)),
|
||||||
|
description=str(repo.get("description", "")),
|
||||||
|
)
|
||||||
|
|
||||||
|
provider_kind = str(repo.get("provider", "")).strip().lower() or None
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = ensure_remote_repo(
|
||||||
|
spec,
|
||||||
|
provider_hint=ProviderHint(kind=provider_kind),
|
||||||
|
options=EnsureOptions(
|
||||||
|
preview=preview,
|
||||||
|
interactive=True,
|
||||||
|
allow_prompt=True,
|
||||||
|
save_prompt_token_to_keyring=True,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
print(f"[REMOTE ENSURE] {result.status.upper()}: {result.message}")
|
||||||
|
if result.url:
|
||||||
|
print(f"[REMOTE ENSURE] URL: {result.url}")
|
||||||
|
except Exception as exc: # noqa: BLE001
|
||||||
|
print(f"[ERROR] Remote provisioning failed: {exc}")
|
||||||
|
|
||||||
|
print()
|
||||||
@@ -1,131 +1,20 @@
|
|||||||
# src/pkgmgr/actions/mirror/setup_cmd.py
|
# src/pkgmgr/actions/mirror/setup_cmd.py
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from typing import List, Tuple
|
from typing import List
|
||||||
from urllib.parse import urlparse
|
|
||||||
|
|
||||||
from pkgmgr.core.git import GitError, run_git
|
|
||||||
from pkgmgr.core.remote_provisioning import ProviderHint, RepoSpec, ensure_remote_repo
|
|
||||||
from pkgmgr.core.remote_provisioning.ensure import EnsureOptions
|
|
||||||
|
|
||||||
from .context import build_context
|
from .context import build_context
|
||||||
from .git_remote import determine_primary_remote_url, ensure_origin_remote
|
from .git_remote import ensure_origin_remote, determine_primary_remote_url
|
||||||
|
from .remote_check import probe_mirror
|
||||||
|
from .remote_provision import ensure_remote_repository
|
||||||
from .types import Repository
|
from .types import Repository
|
||||||
|
|
||||||
|
|
||||||
def _probe_mirror(url: str, repo_dir: str) -> Tuple[bool, str]:
|
|
||||||
"""
|
|
||||||
Probe a remote mirror URL using `git ls-remote`.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
(True, "") on success,
|
|
||||||
(False, error_message) on failure.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
run_git(["ls-remote", url], cwd=repo_dir)
|
|
||||||
return True, ""
|
|
||||||
except GitError as exc:
|
|
||||||
return False, str(exc)
|
|
||||||
|
|
||||||
|
|
||||||
def _host_from_git_url(url: str) -> str:
|
|
||||||
url = (url or "").strip()
|
|
||||||
if not url:
|
|
||||||
return ""
|
|
||||||
|
|
||||||
if "://" in url:
|
|
||||||
parsed = urlparse(url)
|
|
||||||
netloc = (parsed.netloc or "").strip()
|
|
||||||
if "@" in netloc:
|
|
||||||
netloc = netloc.split("@", 1)[1]
|
|
||||||
# keep optional :port
|
|
||||||
return netloc
|
|
||||||
|
|
||||||
# scp-like: git@host:owner/repo.git
|
|
||||||
if "@" in url and ":" in url:
|
|
||||||
after_at = url.split("@", 1)[1]
|
|
||||||
host = after_at.split(":", 1)[0]
|
|
||||||
return host.strip()
|
|
||||||
|
|
||||||
return url.split("/", 1)[0].strip()
|
|
||||||
|
|
||||||
def _ensure_remote_repository(
|
|
||||||
repo: Repository,
|
|
||||||
repositories_base_dir: str,
|
|
||||||
all_repos: List[Repository],
|
|
||||||
preview: bool,
|
|
||||||
) -> None:
|
|
||||||
"""
|
|
||||||
Ensure that the remote repository exists using provider APIs.
|
|
||||||
|
|
||||||
This is ONLY called when ensure_remote=True.
|
|
||||||
"""
|
|
||||||
ctx = build_context(repo, repositories_base_dir, all_repos)
|
|
||||||
resolved_mirrors = ctx.resolved_mirrors
|
|
||||||
|
|
||||||
primary_url = determine_primary_remote_url(repo, resolved_mirrors)
|
|
||||||
if not primary_url:
|
|
||||||
print("[INFO] No remote URL could be derived; skipping remote provisioning.")
|
|
||||||
return
|
|
||||||
|
|
||||||
# IMPORTANT:
|
|
||||||
# - repo["provider"] is typically a provider *kind* (e.g. "github" / "gitea"),
|
|
||||||
# NOT a hostname. We derive the actual host from the remote URL.
|
|
||||||
host = _host_from_git_url(primary_url)
|
|
||||||
owner = repo.get("account")
|
|
||||||
name = repo.get("repository")
|
|
||||||
|
|
||||||
if not host or not owner or not name:
|
|
||||||
print("[WARN] Missing host/account/repository; cannot ensure remote repo.")
|
|
||||||
print(f" host={host!r}, account={owner!r}, repository={name!r}")
|
|
||||||
return
|
|
||||||
|
|
||||||
print("------------------------------------------------------------")
|
|
||||||
print(f"[REMOTE ENSURE] {ctx.identifier}")
|
|
||||||
print(f"[REMOTE ENSURE] host: {host}")
|
|
||||||
print("------------------------------------------------------------")
|
|
||||||
|
|
||||||
spec = RepoSpec(
|
|
||||||
host=str(host),
|
|
||||||
owner=str(owner),
|
|
||||||
name=str(name),
|
|
||||||
private=bool(repo.get("private", True)),
|
|
||||||
description=str(repo.get("description", "")),
|
|
||||||
)
|
|
||||||
|
|
||||||
provider_kind = str(repo.get("provider", "")).strip().lower() or None
|
|
||||||
|
|
||||||
try:
|
|
||||||
result = ensure_remote_repo(
|
|
||||||
spec,
|
|
||||||
provider_hint=ProviderHint(kind=provider_kind),
|
|
||||||
options=EnsureOptions(
|
|
||||||
preview=preview,
|
|
||||||
interactive=True,
|
|
||||||
allow_prompt=True,
|
|
||||||
save_prompt_token_to_keyring=True,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
print(f"[REMOTE ENSURE] {result.status.upper()}: {result.message}")
|
|
||||||
if result.url:
|
|
||||||
print(f"[REMOTE ENSURE] URL: {result.url}")
|
|
||||||
except Exception as exc: # noqa: BLE001
|
|
||||||
# Keep action layer resilient
|
|
||||||
print(f"[ERROR] Remote provisioning failed: {exc}")
|
|
||||||
|
|
||||||
print()
|
|
||||||
|
|
||||||
|
|
||||||
def _setup_local_mirrors_for_repo(
|
def _setup_local_mirrors_for_repo(
|
||||||
repo: Repository,
|
repo: Repository,
|
||||||
repositories_base_dir: str,
|
repositories_base_dir: str,
|
||||||
all_repos: List[Repository],
|
all_repos: List[Repository],
|
||||||
preview: bool,
|
preview: bool,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""
|
|
||||||
Local setup:
|
|
||||||
- Ensure 'origin' remote exists and is sane
|
|
||||||
"""
|
|
||||||
ctx = build_context(repo, repositories_base_dir, all_repos)
|
ctx = build_context(repo, repositories_base_dir, all_repos)
|
||||||
|
|
||||||
print("------------------------------------------------------------")
|
print("------------------------------------------------------------")
|
||||||
@@ -144,17 +33,6 @@ def _setup_remote_mirrors_for_repo(
|
|||||||
preview: bool,
|
preview: bool,
|
||||||
ensure_remote: bool,
|
ensure_remote: bool,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""
|
|
||||||
Remote-side setup / validation.
|
|
||||||
|
|
||||||
Default behavior:
|
|
||||||
- Non-destructive checks using `git ls-remote`.
|
|
||||||
|
|
||||||
Optional behavior:
|
|
||||||
- If ensure_remote=True:
|
|
||||||
* Attempt to create missing repositories via provider API
|
|
||||||
* Uses TokenResolver (ENV -> keyring -> prompt)
|
|
||||||
"""
|
|
||||||
ctx = build_context(repo, repositories_base_dir, all_repos)
|
ctx = build_context(repo, repositories_base_dir, all_repos)
|
||||||
resolved_mirrors = ctx.resolved_mirrors
|
resolved_mirrors = ctx.resolved_mirrors
|
||||||
|
|
||||||
@@ -164,7 +42,7 @@ def _setup_remote_mirrors_for_repo(
|
|||||||
print("------------------------------------------------------------")
|
print("------------------------------------------------------------")
|
||||||
|
|
||||||
if ensure_remote:
|
if ensure_remote:
|
||||||
_ensure_remote_repository(
|
ensure_remote_repository(
|
||||||
repo,
|
repo,
|
||||||
repositories_base_dir=repositories_base_dir,
|
repositories_base_dir=repositories_base_dir,
|
||||||
all_repos=all_repos,
|
all_repos=all_repos,
|
||||||
@@ -178,7 +56,7 @@ def _setup_remote_mirrors_for_repo(
|
|||||||
print()
|
print()
|
||||||
return
|
return
|
||||||
|
|
||||||
ok, error_message = _probe_mirror(primary_url, ctx.repo_dir)
|
ok, error_message = probe_mirror(primary_url, ctx.repo_dir)
|
||||||
if ok:
|
if ok:
|
||||||
print(f"[OK] primary: {primary_url}")
|
print(f"[OK] primary: {primary_url}")
|
||||||
else:
|
else:
|
||||||
@@ -190,7 +68,7 @@ def _setup_remote_mirrors_for_repo(
|
|||||||
return
|
return
|
||||||
|
|
||||||
for name, url in sorted(resolved_mirrors.items()):
|
for name, url in sorted(resolved_mirrors.items()):
|
||||||
ok, error_message = _probe_mirror(url, ctx.repo_dir)
|
ok, error_message = probe_mirror(url, ctx.repo_dir)
|
||||||
if ok:
|
if ok:
|
||||||
print(f"[OK] {name}: {url}")
|
print(f"[OK] {name}: {url}")
|
||||||
else:
|
else:
|
||||||
@@ -210,19 +88,6 @@ def setup_mirrors(
|
|||||||
remote: bool = True,
|
remote: bool = True,
|
||||||
ensure_remote: bool = False,
|
ensure_remote: bool = False,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""
|
|
||||||
Setup mirrors for the selected repositories.
|
|
||||||
|
|
||||||
local:
|
|
||||||
- Configure local Git remotes (ensure 'origin' exists).
|
|
||||||
|
|
||||||
remote:
|
|
||||||
- Non-destructive remote checks using `git ls-remote`.
|
|
||||||
|
|
||||||
ensure_remote:
|
|
||||||
- If True, attempt to create missing remote repositories via provider APIs.
|
|
||||||
- This is explicit and NEVER enabled implicitly.
|
|
||||||
"""
|
|
||||||
for repo in selected_repos:
|
for repo in selected_repos:
|
||||||
if local:
|
if local:
|
||||||
_setup_local_mirrors_for_repo(
|
_setup_local_mirrors_for_repo(
|
||||||
|
|||||||
111
src/pkgmgr/actions/mirror/url_utils.py
Normal file
111
src/pkgmgr/actions/mirror/url_utils.py
Normal file
@@ -0,0 +1,111 @@
|
|||||||
|
# src/pkgmgr/actions/mirror/url_utils.py
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
from typing import Optional, Tuple
|
||||||
|
|
||||||
|
|
||||||
|
def hostport_from_git_url(url: str) -> Tuple[str, Optional[str]]:
|
||||||
|
url = (url or "").strip()
|
||||||
|
if not url:
|
||||||
|
return "", None
|
||||||
|
|
||||||
|
if "://" in url:
|
||||||
|
parsed = urlparse(url)
|
||||||
|
netloc = (parsed.netloc or "").strip()
|
||||||
|
if "@" in netloc:
|
||||||
|
netloc = netloc.split("@", 1)[1]
|
||||||
|
|
||||||
|
if netloc.startswith("[") and "]" in netloc:
|
||||||
|
host = netloc[1:netloc.index("]")]
|
||||||
|
rest = netloc[netloc.index("]") + 1 :]
|
||||||
|
port = rest[1:] if rest.startswith(":") else None
|
||||||
|
return host.strip(), (port.strip() if port else None)
|
||||||
|
|
||||||
|
if ":" in netloc:
|
||||||
|
host, port = netloc.rsplit(":", 1)
|
||||||
|
return host.strip(), (port.strip() or None)
|
||||||
|
|
||||||
|
return netloc.strip(), None
|
||||||
|
|
||||||
|
if "@" in url and ":" in url:
|
||||||
|
after_at = url.split("@", 1)[1]
|
||||||
|
host = after_at.split(":", 1)[0].strip()
|
||||||
|
return host, None
|
||||||
|
|
||||||
|
host = url.split("/", 1)[0].strip()
|
||||||
|
return host, None
|
||||||
|
|
||||||
|
|
||||||
|
def normalize_provider_host(host: str) -> str:
|
||||||
|
host = (host or "").strip()
|
||||||
|
if not host:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
if host.startswith("[") and "]" in host:
|
||||||
|
host = host[1:host.index("]")]
|
||||||
|
|
||||||
|
if ":" in host and host.count(":") == 1:
|
||||||
|
host = host.rsplit(":", 1)[0]
|
||||||
|
|
||||||
|
return host.strip().lower()
|
||||||
|
|
||||||
|
|
||||||
|
def _strip_dot_git(name: str) -> str:
|
||||||
|
n = (name or "").strip()
|
||||||
|
if n.lower().endswith(".git"):
|
||||||
|
return n[:-4]
|
||||||
|
return n
|
||||||
|
|
||||||
|
|
||||||
|
def parse_repo_from_git_url(url: str) -> Tuple[str, Optional[str], Optional[str]]:
|
||||||
|
"""
|
||||||
|
Parse (host, owner, repo_name) from common Git remote URLs.
|
||||||
|
|
||||||
|
Supports:
|
||||||
|
- ssh://git@host:2201/owner/repo.git
|
||||||
|
- https://host/owner/repo.git
|
||||||
|
- git@host:owner/repo.git
|
||||||
|
- host/owner/repo(.git) (best-effort)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
(host, owner, repo_name) with owner/repo possibly None if not derivable.
|
||||||
|
"""
|
||||||
|
u = (url or "").strip()
|
||||||
|
if not u:
|
||||||
|
return "", None, None
|
||||||
|
|
||||||
|
# URL-style (ssh://, https://, http://)
|
||||||
|
if "://" in u:
|
||||||
|
parsed = urlparse(u)
|
||||||
|
host = (parsed.hostname or "").strip()
|
||||||
|
path = (parsed.path or "").strip("/")
|
||||||
|
parts = [p for p in path.split("/") if p]
|
||||||
|
if len(parts) >= 2:
|
||||||
|
owner = parts[0]
|
||||||
|
repo_name = _strip_dot_git(parts[1])
|
||||||
|
return host, owner, repo_name
|
||||||
|
return host, None, None
|
||||||
|
|
||||||
|
# SCP-like: git@host:owner/repo.git
|
||||||
|
if "@" in u and ":" in u:
|
||||||
|
after_at = u.split("@", 1)[1]
|
||||||
|
host = after_at.split(":", 1)[0].strip()
|
||||||
|
path = after_at.split(":", 1)[1].strip("/")
|
||||||
|
parts = [p for p in path.split("/") if p]
|
||||||
|
if len(parts) >= 2:
|
||||||
|
owner = parts[0]
|
||||||
|
repo_name = _strip_dot_git(parts[1])
|
||||||
|
return host, owner, repo_name
|
||||||
|
return host, None, None
|
||||||
|
|
||||||
|
# Fallback: host/owner/repo.git
|
||||||
|
host = u.split("/", 1)[0].strip()
|
||||||
|
rest = u.split("/", 1)[1] if "/" in u else ""
|
||||||
|
parts = [p for p in rest.strip("/").split("/") if p]
|
||||||
|
if len(parts) >= 2:
|
||||||
|
owner = parts[0]
|
||||||
|
repo_name = _strip_dot_git(parts[1])
|
||||||
|
return host, owner, repo_name
|
||||||
|
|
||||||
|
return host, None, None
|
||||||
@@ -1,10 +1,13 @@
|
|||||||
|
# src/pkgmgr/actions/release/workflow.py
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
from typing import Optional
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
from pkgmgr.actions.branch import close_branch
|
from pkgmgr.actions.branch import close_branch
|
||||||
from pkgmgr.core.git import get_current_branch, GitError
|
from pkgmgr.core.git import get_current_branch, GitError
|
||||||
|
from pkgmgr.core.repository.paths import resolve_repo_paths
|
||||||
|
|
||||||
from .files import (
|
from .files import (
|
||||||
update_changelog,
|
update_changelog,
|
||||||
@@ -55,8 +58,12 @@ def _release_impl(
|
|||||||
print(f"New version: {new_ver_str} ({release_type})")
|
print(f"New version: {new_ver_str} ({release_type})")
|
||||||
|
|
||||||
repo_root = os.path.dirname(os.path.abspath(pyproject_path))
|
repo_root = os.path.dirname(os.path.abspath(pyproject_path))
|
||||||
|
paths = resolve_repo_paths(repo_root)
|
||||||
|
|
||||||
|
# --- Update versioned files ------------------------------------------------
|
||||||
|
|
||||||
update_pyproject_version(pyproject_path, new_ver_str, preview=preview)
|
update_pyproject_version(pyproject_path, new_ver_str, preview=preview)
|
||||||
|
|
||||||
changelog_message = update_changelog(
|
changelog_message = update_changelog(
|
||||||
changelog_path,
|
changelog_path,
|
||||||
new_ver_str,
|
new_ver_str,
|
||||||
@@ -64,38 +71,46 @@ def _release_impl(
|
|||||||
preview=preview,
|
preview=preview,
|
||||||
)
|
)
|
||||||
|
|
||||||
flake_path = os.path.join(repo_root, "flake.nix")
|
update_flake_version(paths.flake_nix, new_ver_str, preview=preview)
|
||||||
update_flake_version(flake_path, new_ver_str, preview=preview)
|
|
||||||
|
|
||||||
pkgbuild_path = os.path.join(repo_root, "PKGBUILD")
|
if paths.arch_pkgbuild:
|
||||||
update_pkgbuild_version(pkgbuild_path, new_ver_str, preview=preview)
|
update_pkgbuild_version(paths.arch_pkgbuild, new_ver_str, preview=preview)
|
||||||
|
else:
|
||||||
|
print("[INFO] No PKGBUILD found (packaging/arch/PKGBUILD or PKGBUILD). Skipping.")
|
||||||
|
|
||||||
spec_path = os.path.join(repo_root, "package-manager.spec")
|
if paths.rpm_spec:
|
||||||
update_spec_version(spec_path, new_ver_str, preview=preview)
|
update_spec_version(paths.rpm_spec, new_ver_str, preview=preview)
|
||||||
|
else:
|
||||||
|
print("[INFO] No RPM spec file found. Skipping spec version update.")
|
||||||
|
|
||||||
effective_message: Optional[str] = message
|
effective_message: Optional[str] = message
|
||||||
if effective_message is None and isinstance(changelog_message, str):
|
if effective_message is None and isinstance(changelog_message, str):
|
||||||
if changelog_message.strip():
|
if changelog_message.strip():
|
||||||
effective_message = changelog_message.strip()
|
effective_message = changelog_message.strip()
|
||||||
|
|
||||||
debian_changelog_path = os.path.join(repo_root, "debian", "changelog")
|
|
||||||
package_name = os.path.basename(repo_root) or "package-manager"
|
package_name = os.path.basename(repo_root) or "package-manager"
|
||||||
|
|
||||||
|
if paths.debian_changelog:
|
||||||
update_debian_changelog(
|
update_debian_changelog(
|
||||||
debian_changelog_path,
|
paths.debian_changelog,
|
||||||
|
package_name=package_name,
|
||||||
|
new_version=new_ver_str,
|
||||||
|
message=effective_message,
|
||||||
|
preview=preview,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
print("[INFO] No debian changelog found. Skipping debian/changelog update.")
|
||||||
|
|
||||||
|
if paths.rpm_spec:
|
||||||
|
update_spec_changelog(
|
||||||
|
spec_path=paths.rpm_spec,
|
||||||
package_name=package_name,
|
package_name=package_name,
|
||||||
new_version=new_ver_str,
|
new_version=new_ver_str,
|
||||||
message=effective_message,
|
message=effective_message,
|
||||||
preview=preview,
|
preview=preview,
|
||||||
)
|
)
|
||||||
|
|
||||||
update_spec_changelog(
|
# --- Git commit / tag / push ----------------------------------------------
|
||||||
spec_path=spec_path,
|
|
||||||
package_name=package_name,
|
|
||||||
new_version=new_ver_str,
|
|
||||||
message=effective_message,
|
|
||||||
preview=preview,
|
|
||||||
)
|
|
||||||
|
|
||||||
commit_msg = f"Release version {new_ver_str}"
|
commit_msg = f"Release version {new_ver_str}"
|
||||||
tag_msg = effective_message or commit_msg
|
tag_msg = effective_message or commit_msg
|
||||||
@@ -103,12 +118,12 @@ def _release_impl(
|
|||||||
files_to_add = [
|
files_to_add = [
|
||||||
pyproject_path,
|
pyproject_path,
|
||||||
changelog_path,
|
changelog_path,
|
||||||
flake_path,
|
paths.flake_nix,
|
||||||
pkgbuild_path,
|
paths.arch_pkgbuild,
|
||||||
spec_path,
|
paths.rpm_spec,
|
||||||
debian_changelog_path,
|
paths.debian_changelog,
|
||||||
]
|
]
|
||||||
existing_files = [p for p in files_to_add if p and os.path.exists(p)]
|
existing_files = [p for p in files_to_add if isinstance(p, str) and p and os.path.exists(p)]
|
||||||
|
|
||||||
if preview:
|
if preview:
|
||||||
for path in existing_files:
|
for path in existing_files:
|
||||||
|
|||||||
@@ -9,8 +9,13 @@ from pkgmgr.core.repository.dir import get_repo_dir
|
|||||||
from pkgmgr.core.repository.identifier import get_repo_identifier
|
from pkgmgr.core.repository.identifier import get_repo_identifier
|
||||||
from pkgmgr.core.git import get_tags
|
from pkgmgr.core.git import get_tags
|
||||||
from pkgmgr.core.version.semver import SemVer, find_latest_version
|
from pkgmgr.core.version.semver import SemVer, find_latest_version
|
||||||
|
from pkgmgr.core.version.installed import (
|
||||||
|
get_installed_python_version,
|
||||||
|
get_installed_nix_profile_version,
|
||||||
|
)
|
||||||
from pkgmgr.core.version.source import (
|
from pkgmgr.core.version.source import (
|
||||||
read_pyproject_version,
|
read_pyproject_version,
|
||||||
|
read_pyproject_project_name,
|
||||||
read_flake_version,
|
read_flake_version,
|
||||||
read_pkgbuild_version,
|
read_pkgbuild_version,
|
||||||
read_debian_changelog_version,
|
read_debian_changelog_version,
|
||||||
@@ -18,10 +23,54 @@ from pkgmgr.core.version.source import (
|
|||||||
read_ansible_galaxy_version,
|
read_ansible_galaxy_version,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
Repository = Dict[str, Any]
|
Repository = Dict[str, Any]
|
||||||
|
|
||||||
|
|
||||||
|
def _print_pkgmgr_self_version() -> None:
|
||||||
|
"""
|
||||||
|
Print version information for pkgmgr itself (installed env + nix profile),
|
||||||
|
used when no repository is selected (e.g. user is not inside a repo).
|
||||||
|
"""
|
||||||
|
print("pkgmgr version info")
|
||||||
|
print("====================")
|
||||||
|
print("\nRepository: <pkgmgr self>")
|
||||||
|
print("----------------------------------------")
|
||||||
|
|
||||||
|
# Common distribution/module naming variants.
|
||||||
|
python_candidates = [
|
||||||
|
"package-manager", # PyPI dist name in your project
|
||||||
|
"package_manager", # module-ish variant
|
||||||
|
"pkgmgr", # console/alias-ish
|
||||||
|
]
|
||||||
|
nix_candidates = [
|
||||||
|
"pkgmgr",
|
||||||
|
"package-manager",
|
||||||
|
]
|
||||||
|
|
||||||
|
installed_python = get_installed_python_version(*python_candidates)
|
||||||
|
installed_nix = get_installed_nix_profile_version(*nix_candidates)
|
||||||
|
|
||||||
|
if installed_python:
|
||||||
|
print(
|
||||||
|
f"Installed (Python env): {installed_python.version} "
|
||||||
|
f"(dist: {installed_python.name})"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
print("Installed (Python env): <not installed>")
|
||||||
|
|
||||||
|
if installed_nix:
|
||||||
|
print(
|
||||||
|
f"Installed (Nix profile): {installed_nix.version} "
|
||||||
|
f"(match: {installed_nix.name})"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
print("Installed (Nix profile): <not installed>")
|
||||||
|
|
||||||
|
# Helpful context for debugging "why do versions differ?"
|
||||||
|
print(f"Python executable: {sys.executable}")
|
||||||
|
print(f"Python prefix: {sys.prefix}")
|
||||||
|
|
||||||
|
|
||||||
def handle_version(
|
def handle_version(
|
||||||
args,
|
args,
|
||||||
ctx: CLIContext,
|
ctx: CLIContext,
|
||||||
@@ -30,20 +79,39 @@ def handle_version(
|
|||||||
"""
|
"""
|
||||||
Handle the 'version' command.
|
Handle the 'version' command.
|
||||||
|
|
||||||
Shows version information from various sources (git tags, pyproject,
|
Shows version information from:
|
||||||
flake.nix, PKGBUILD, debian, spec, Ansible Galaxy).
|
- Git tags
|
||||||
"""
|
- packaging metadata
|
||||||
|
- installed Python environment
|
||||||
|
- installed Nix profile
|
||||||
|
|
||||||
repo_list = selected
|
Special case:
|
||||||
if not repo_list:
|
- If no repositories are selected (e.g. not in a repo and no identifiers),
|
||||||
print("No repositories selected for version.")
|
print pkgmgr's own installed versions instead of exiting with an error.
|
||||||
sys.exit(1)
|
"""
|
||||||
|
if not selected:
|
||||||
|
_print_pkgmgr_self_version()
|
||||||
|
return
|
||||||
|
|
||||||
print("pkgmgr version info")
|
print("pkgmgr version info")
|
||||||
print("====================")
|
print("====================")
|
||||||
|
|
||||||
for repo in repo_list:
|
for repo in selected:
|
||||||
# Resolve repository directory
|
identifier = get_repo_identifier(repo, ctx.all_repositories)
|
||||||
|
|
||||||
|
python_candidates: list[str] = []
|
||||||
|
nix_candidates: list[str] = [identifier]
|
||||||
|
|
||||||
|
for key in ("pypi", "pip", "python_package", "distribution", "package"):
|
||||||
|
val = repo.get(key)
|
||||||
|
if isinstance(val, str) and val.strip():
|
||||||
|
python_candidates.append(val.strip())
|
||||||
|
|
||||||
|
python_candidates.append(identifier)
|
||||||
|
|
||||||
|
installed_python = get_installed_python_version(*python_candidates)
|
||||||
|
installed_nix = get_installed_nix_profile_version(*nix_candidates)
|
||||||
|
|
||||||
repo_dir = repo.get("directory")
|
repo_dir = repo.get("directory")
|
||||||
if not repo_dir:
|
if not repo_dir:
|
||||||
try:
|
try:
|
||||||
@@ -51,51 +119,79 @@ def handle_version(
|
|||||||
except Exception:
|
except Exception:
|
||||||
repo_dir = None
|
repo_dir = None
|
||||||
|
|
||||||
# If no local clone exists, skip gracefully with info message
|
|
||||||
if not repo_dir or not os.path.isdir(repo_dir):
|
if not repo_dir or not os.path.isdir(repo_dir):
|
||||||
identifier = get_repo_identifier(repo, ctx.all_repositories)
|
|
||||||
print(f"\nRepository: {identifier}")
|
print(f"\nRepository: {identifier}")
|
||||||
print("----------------------------------------")
|
print("----------------------------------------")
|
||||||
print(
|
print(
|
||||||
"[INFO] Skipped: repository directory does not exist "
|
"[INFO] Skipped: repository directory does not exist locally, "
|
||||||
"locally, version detection is not possible."
|
"version detection is not possible."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if installed_python:
|
||||||
|
print(
|
||||||
|
f"Installed (Python env): {installed_python.version} "
|
||||||
|
f"(dist: {installed_python.name})"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
print("Installed (Python env): <not installed>")
|
||||||
|
|
||||||
|
if installed_nix:
|
||||||
|
print(
|
||||||
|
f"Installed (Nix profile): {installed_nix.version} "
|
||||||
|
f"(match: {installed_nix.name})"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
print("Installed (Nix profile): <not installed>")
|
||||||
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
print(f"\nRepository: {repo_dir}")
|
print(f"\nRepository: {repo_dir}")
|
||||||
print("----------------------------------------")
|
print("----------------------------------------")
|
||||||
|
|
||||||
# 1) Git tags (SemVer)
|
|
||||||
try:
|
try:
|
||||||
tags = get_tags(cwd=repo_dir)
|
tags = get_tags(cwd=repo_dir)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
print(f"[ERROR] Could not read git tags: {exc}")
|
print(f"[ERROR] Could not read git tags: {exc}")
|
||||||
tags = []
|
tags = []
|
||||||
|
|
||||||
latest_tag_info: Optional[Tuple[str, SemVer]]
|
latest_tag_info: Optional[Tuple[str, SemVer]] = (
|
||||||
latest_tag_info = find_latest_version(tags) if tags else None
|
find_latest_version(tags) if tags else None
|
||||||
|
)
|
||||||
|
|
||||||
if latest_tag_info is None:
|
if latest_tag_info:
|
||||||
latest_tag_str = None
|
tag, ver = latest_tag_info
|
||||||
latest_ver = None
|
print(f"Git (latest SemVer tag): {tag} (parsed: {ver})")
|
||||||
else:
|
else:
|
||||||
latest_tag_str, latest_ver = latest_tag_info
|
print("Git (latest SemVer tag): <none found>")
|
||||||
|
|
||||||
# 2) Packaging / metadata sources
|
|
||||||
pyproject_version = read_pyproject_version(repo_dir)
|
pyproject_version = read_pyproject_version(repo_dir)
|
||||||
|
pyproject_name = read_pyproject_project_name(repo_dir)
|
||||||
flake_version = read_flake_version(repo_dir)
|
flake_version = read_flake_version(repo_dir)
|
||||||
pkgbuild_version = read_pkgbuild_version(repo_dir)
|
pkgbuild_version = read_pkgbuild_version(repo_dir)
|
||||||
debian_version = read_debian_changelog_version(repo_dir)
|
debian_version = read_debian_changelog_version(repo_dir)
|
||||||
spec_version = read_spec_version(repo_dir)
|
spec_version = read_spec_version(repo_dir)
|
||||||
ansible_version = read_ansible_galaxy_version(repo_dir)
|
ansible_version = read_ansible_galaxy_version(repo_dir)
|
||||||
|
|
||||||
# 3) Print version summary
|
if pyproject_name:
|
||||||
if latest_ver is not None:
|
installed_python = get_installed_python_version(
|
||||||
|
pyproject_name, *python_candidates
|
||||||
|
)
|
||||||
|
|
||||||
|
if installed_python:
|
||||||
print(
|
print(
|
||||||
f"Git (latest SemVer tag): {latest_tag_str} (parsed: {latest_ver})"
|
f"Installed (Python env): {installed_python.version} "
|
||||||
|
f"(dist: {installed_python.name})"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
print("Git (latest SemVer tag): <none found>")
|
print("Installed (Python env): <not installed>")
|
||||||
|
|
||||||
|
if installed_nix:
|
||||||
|
print(
|
||||||
|
f"Installed (Nix profile): {installed_nix.version} "
|
||||||
|
f"(match: {installed_nix.name})"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
print("Installed (Nix profile): <not installed>")
|
||||||
|
|
||||||
print(f"pyproject.toml: {pyproject_version or '<not found>'}")
|
print(f"pyproject.toml: {pyproject_version or '<not found>'}")
|
||||||
print(f"flake.nix: {flake_version or '<not found>'}")
|
print(f"flake.nix: {flake_version or '<not found>'}")
|
||||||
@@ -104,15 +200,16 @@ def handle_version(
|
|||||||
print(f"package-manager.spec: {spec_version or '<not found>'}")
|
print(f"package-manager.spec: {spec_version or '<not found>'}")
|
||||||
print(f"Ansible Galaxy meta: {ansible_version or '<not found>'}")
|
print(f"Ansible Galaxy meta: {ansible_version or '<not found>'}")
|
||||||
|
|
||||||
# 4) Consistency hint (Git tag vs. pyproject)
|
if latest_tag_info and pyproject_version:
|
||||||
if latest_ver is not None and pyproject_version is not None:
|
|
||||||
try:
|
try:
|
||||||
file_ver = SemVer.parse(pyproject_version)
|
file_ver = SemVer.parse(pyproject_version)
|
||||||
if file_ver != latest_ver:
|
if file_ver != latest_tag_info[1]:
|
||||||
print(
|
print(
|
||||||
f"[WARN] Version mismatch: Git={latest_ver}, pyproject={file_ver}"
|
f"[WARN] Version mismatch: "
|
||||||
|
f"Git={latest_tag_info[1]}, pyproject={file_ver}"
|
||||||
)
|
)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
print(
|
print(
|
||||||
f"[WARN] pyproject version {pyproject_version!r} is not valid SemVer."
|
f"[WARN] pyproject version {pyproject_version!r} "
|
||||||
|
f"is not valid SemVer."
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -9,15 +9,33 @@ from ..types import KeyringUnavailableError, TokenRequest, TokenResult
|
|||||||
|
|
||||||
|
|
||||||
def _import_keyring():
|
def _import_keyring():
|
||||||
|
"""
|
||||||
|
Import python-keyring.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
KeyringUnavailableError if:
|
||||||
|
- library is missing
|
||||||
|
- no backend is configured / usable
|
||||||
|
- import fails for any reason
|
||||||
|
"""
|
||||||
try:
|
try:
|
||||||
import keyring # type: ignore
|
import keyring # type: ignore
|
||||||
|
|
||||||
return keyring
|
|
||||||
except Exception as exc: # noqa: BLE001
|
except Exception as exc: # noqa: BLE001
|
||||||
raise KeyringUnavailableError(
|
raise KeyringUnavailableError(
|
||||||
"python-keyring is not available or no backend is configured."
|
"python-keyring is not installed."
|
||||||
) from exc
|
) from exc
|
||||||
|
|
||||||
|
# Some environments have keyring installed but no usable backend.
|
||||||
|
# We do a lightweight "backend sanity check" by attempting to read the backend.
|
||||||
|
try:
|
||||||
|
_ = keyring.get_keyring()
|
||||||
|
except Exception as exc: # noqa: BLE001
|
||||||
|
raise KeyringUnavailableError(
|
||||||
|
"python-keyring is installed but no usable keyring backend is configured."
|
||||||
|
) from exc
|
||||||
|
|
||||||
|
return keyring
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True)
|
@dataclass(frozen=True)
|
||||||
class KeyringTokenProvider:
|
class KeyringTokenProvider:
|
||||||
|
|||||||
@@ -9,6 +9,37 @@ from typing import Optional
|
|||||||
from ..types import TokenRequest, TokenResult
|
from ..types import TokenRequest, TokenResult
|
||||||
|
|
||||||
|
|
||||||
|
def _token_help_url(provider_kind: str, host: str) -> Optional[str]:
|
||||||
|
"""
|
||||||
|
Return a provider-specific URL where a user can create/get an API token.
|
||||||
|
|
||||||
|
Keep this conservative and stable:
|
||||||
|
- GitHub: official token settings URL
|
||||||
|
- Gitea/Forgejo: common settings path on the given host
|
||||||
|
- GitLab: common personal access token path
|
||||||
|
"""
|
||||||
|
kind = (provider_kind or "").strip().lower()
|
||||||
|
h = (host or "").strip()
|
||||||
|
|
||||||
|
# GitHub (cloud)
|
||||||
|
if kind == "github":
|
||||||
|
return "https://github.com/settings/tokens"
|
||||||
|
|
||||||
|
# Gitea / Forgejo (self-hosted)
|
||||||
|
if kind in ("gitea", "forgejo"):
|
||||||
|
# Typical UI path: Settings -> Applications -> Access Tokens
|
||||||
|
# In many installations this is available at /user/settings/applications
|
||||||
|
base = f"https://{h}".rstrip("/")
|
||||||
|
return f"{base}/user/settings/applications"
|
||||||
|
|
||||||
|
# GitLab (cloud or self-hosted)
|
||||||
|
if kind == "gitlab":
|
||||||
|
base = "https://gitlab.com" if not h else f"https://{h}".rstrip("/")
|
||||||
|
return f"{base}/-/profile/personal_access_tokens"
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True)
|
@dataclass(frozen=True)
|
||||||
class PromptTokenProvider:
|
class PromptTokenProvider:
|
||||||
"""Interactively prompt for a token.
|
"""Interactively prompt for a token.
|
||||||
@@ -25,6 +56,11 @@ class PromptTokenProvider:
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
owner_info = f" (owner: {request.owner})" if request.owner else ""
|
owner_info = f" (owner: {request.owner})" if request.owner else ""
|
||||||
|
help_url = _token_help_url(request.provider_kind, request.host)
|
||||||
|
|
||||||
|
if help_url:
|
||||||
|
print(f"[INFO] Create/get your token here: {help_url}")
|
||||||
|
|
||||||
prompt = f"Enter API token for {request.provider_kind} on {request.host}{owner_info}: "
|
prompt = f"Enter API token for {request.provider_kind} on {request.host}{owner_info}: "
|
||||||
token = (getpass(prompt) or "").strip()
|
token = (getpass(prompt) or "").strip()
|
||||||
if not token:
|
if not token:
|
||||||
|
|||||||
@@ -1,13 +1,14 @@
|
|||||||
# src/pkgmgr/core/credentials/resolver.py
|
# src/pkgmgr/core/credentials/resolver.py
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import sys
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from .providers.env import EnvTokenProvider
|
from .providers.env import EnvTokenProvider
|
||||||
from .providers.keyring import KeyringTokenProvider
|
from .providers.keyring import KeyringTokenProvider
|
||||||
from .providers.prompt import PromptTokenProvider
|
from .providers.prompt import PromptTokenProvider
|
||||||
from .types import NoCredentialsError, TokenRequest, TokenResult
|
from .types import KeyringUnavailableError, NoCredentialsError, TokenRequest, TokenResult
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True)
|
@dataclass(frozen=True)
|
||||||
@@ -26,6 +27,26 @@ class TokenResolver:
|
|||||||
self._env = EnvTokenProvider()
|
self._env = EnvTokenProvider()
|
||||||
self._keyring = KeyringTokenProvider()
|
self._keyring = KeyringTokenProvider()
|
||||||
self._prompt = PromptTokenProvider()
|
self._prompt = PromptTokenProvider()
|
||||||
|
self._warned_keyring: bool = False
|
||||||
|
|
||||||
|
def _warn_keyring_unavailable(self, exc: Exception) -> None:
|
||||||
|
if self._warned_keyring:
|
||||||
|
return
|
||||||
|
self._warned_keyring = True
|
||||||
|
|
||||||
|
msg = str(exc).strip() or "Keyring is unavailable."
|
||||||
|
print("[WARN] Keyring support is not available.", file=sys.stderr)
|
||||||
|
print(f" {msg}", file=sys.stderr)
|
||||||
|
print(" Tokens will NOT be persisted securely.", file=sys.stderr)
|
||||||
|
print("", file=sys.stderr)
|
||||||
|
print(" To enable secure token storage, install python-keyring:", file=sys.stderr)
|
||||||
|
print(" pip install keyring", file=sys.stderr)
|
||||||
|
print("", file=sys.stderr)
|
||||||
|
print(" Or install via system packages:", file=sys.stderr)
|
||||||
|
print(" sudo apt install python3-keyring", file=sys.stderr)
|
||||||
|
print(" sudo pacman -S python-keyring", file=sys.stderr)
|
||||||
|
print(" sudo dnf install python3-keyring", file=sys.stderr)
|
||||||
|
print("", file=sys.stderr)
|
||||||
|
|
||||||
def get_token(
|
def get_token(
|
||||||
self,
|
self,
|
||||||
@@ -47,9 +68,11 @@ class TokenResolver:
|
|||||||
kr_res = self._keyring.get(request)
|
kr_res = self._keyring.get(request)
|
||||||
if kr_res:
|
if kr_res:
|
||||||
return kr_res
|
return kr_res
|
||||||
|
except KeyringUnavailableError as exc:
|
||||||
|
# Show a helpful warning once, then continue (prompt fallback).
|
||||||
|
self._warn_keyring_unavailable(exc)
|
||||||
except Exception:
|
except Exception:
|
||||||
# Keyring missing/unavailable: ignore to allow prompt (workstations)
|
# Unknown keyring errors: do not block prompting; still avoid hard crash.
|
||||||
# or to fail cleanly below (headless CI without prompt).
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# 3) Prompt (optional)
|
# 3) Prompt (optional)
|
||||||
@@ -59,6 +82,8 @@ class TokenResolver:
|
|||||||
if opts.save_prompt_token_to_keyring:
|
if opts.save_prompt_token_to_keyring:
|
||||||
try:
|
try:
|
||||||
self._keyring.set(request, prompt_res.token)
|
self._keyring.set(request, prompt_res.token)
|
||||||
|
except KeyringUnavailableError as exc:
|
||||||
|
self._warn_keyring_unavailable(exc)
|
||||||
except Exception:
|
except Exception:
|
||||||
# If keyring cannot store, still use token for this run.
|
# If keyring cannot store, still use token for this run.
|
||||||
pass
|
pass
|
||||||
|
|||||||
@@ -64,10 +64,12 @@ def ensure_remote_repo(
|
|||||||
provider = reg.resolve(spec.host)
|
provider = reg.resolve(spec.host)
|
||||||
if provider_hint and provider_hint.kind:
|
if provider_hint and provider_hint.kind:
|
||||||
forced = provider_hint.kind.strip().lower()
|
forced = provider_hint.kind.strip().lower()
|
||||||
provider = next(
|
forced_provider = next(
|
||||||
(p for p in reg.providers if getattr(p, "kind", "").lower() == forced),
|
(p for p in reg.providers if getattr(p, "kind", "").lower() == forced),
|
||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
|
if forced_provider is not None:
|
||||||
|
provider = forced_provider
|
||||||
|
|
||||||
if provider is None:
|
if provider is None:
|
||||||
raise UnsupportedProviderError(f"No provider matched host: {spec.host}")
|
raise UnsupportedProviderError(f"No provider matched host: {spec.host}")
|
||||||
|
|||||||
124
src/pkgmgr/core/repository/paths.py
Normal file
124
src/pkgmgr/core/repository/paths.py
Normal file
@@ -0,0 +1,124 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
"""
|
||||||
|
Central repository path resolver.
|
||||||
|
|
||||||
|
Goal:
|
||||||
|
- Provide ONE place to define where packaging / changelog / metadata files live.
|
||||||
|
- Prefer modern layout (packaging/*) but stay backwards-compatible with legacy
|
||||||
|
root-level paths.
|
||||||
|
|
||||||
|
Both:
|
||||||
|
- readers (pkgmgr.core.version.source)
|
||||||
|
- writers (pkgmgr.actions.release.workflow)
|
||||||
|
|
||||||
|
should use this module instead of hardcoding paths.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Iterable, Optional
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class RepoPaths:
|
||||||
|
repo_dir: str
|
||||||
|
|
||||||
|
pyproject_toml: str
|
||||||
|
flake_nix: str
|
||||||
|
|
||||||
|
# Human changelog (typically Markdown)
|
||||||
|
changelog_md: Optional[str]
|
||||||
|
|
||||||
|
# Packaging-related files
|
||||||
|
arch_pkgbuild: Optional[str]
|
||||||
|
debian_changelog: Optional[str]
|
||||||
|
rpm_spec: Optional[str]
|
||||||
|
|
||||||
|
|
||||||
|
def _first_existing(candidates: Iterable[str]) -> Optional[str]:
|
||||||
|
for p in candidates:
|
||||||
|
if p and os.path.isfile(p):
|
||||||
|
return p
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _find_first_spec_in_dir(dir_path: str) -> Optional[str]:
|
||||||
|
if not os.path.isdir(dir_path):
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
for fn in sorted(os.listdir(dir_path)):
|
||||||
|
if fn.endswith(".spec"):
|
||||||
|
p = os.path.join(dir_path, fn)
|
||||||
|
if os.path.isfile(p):
|
||||||
|
return p
|
||||||
|
except OSError:
|
||||||
|
return None
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def resolve_repo_paths(repo_dir: str) -> RepoPaths:
|
||||||
|
"""
|
||||||
|
Resolve canonical file locations for a repository.
|
||||||
|
|
||||||
|
Preferences (new layout first, legacy fallback second):
|
||||||
|
- PKGBUILD: packaging/arch/PKGBUILD -> PKGBUILD
|
||||||
|
- Debian changelog: packaging/debian/changelog -> debian/changelog
|
||||||
|
- RPM spec: packaging/fedora/package-manager.spec
|
||||||
|
-> first *.spec in packaging/fedora
|
||||||
|
-> first *.spec in repo root
|
||||||
|
- CHANGELOG.md: CHANGELOG.md -> packaging/CHANGELOG.md (optional fallback)
|
||||||
|
|
||||||
|
Notes:
|
||||||
|
- This resolver only returns paths; it does not read/parse files.
|
||||||
|
- Callers should treat Optional paths as "may not exist".
|
||||||
|
"""
|
||||||
|
repo_dir = os.path.abspath(repo_dir)
|
||||||
|
|
||||||
|
pyproject_toml = os.path.join(repo_dir, "pyproject.toml")
|
||||||
|
flake_nix = os.path.join(repo_dir, "flake.nix")
|
||||||
|
|
||||||
|
changelog_md = _first_existing(
|
||||||
|
[
|
||||||
|
os.path.join(repo_dir, "CHANGELOG.md"),
|
||||||
|
os.path.join(repo_dir, "packaging", "CHANGELOG.md"),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
arch_pkgbuild = _first_existing(
|
||||||
|
[
|
||||||
|
os.path.join(repo_dir, "packaging", "arch", "PKGBUILD"),
|
||||||
|
os.path.join(repo_dir, "PKGBUILD"),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
debian_changelog = _first_existing(
|
||||||
|
[
|
||||||
|
os.path.join(repo_dir, "packaging", "debian", "changelog"),
|
||||||
|
os.path.join(repo_dir, "debian", "changelog"),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
# RPM spec: prefer the canonical file, else first spec in packaging/fedora, else first spec in repo root.
|
||||||
|
rpm_spec = _first_existing(
|
||||||
|
[
|
||||||
|
os.path.join(repo_dir, "packaging", "fedora", "package-manager.spec"),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
if rpm_spec is None:
|
||||||
|
rpm_spec = _find_first_spec_in_dir(os.path.join(repo_dir, "packaging", "fedora"))
|
||||||
|
if rpm_spec is None:
|
||||||
|
rpm_spec = _find_first_spec_in_dir(repo_dir)
|
||||||
|
|
||||||
|
return RepoPaths(
|
||||||
|
repo_dir=repo_dir,
|
||||||
|
pyproject_toml=pyproject_toml,
|
||||||
|
flake_nix=flake_nix,
|
||||||
|
changelog_md=changelog_md,
|
||||||
|
arch_pkgbuild=arch_pkgbuild,
|
||||||
|
debian_changelog=debian_changelog,
|
||||||
|
rpm_spec=rpm_spec,
|
||||||
|
)
|
||||||
168
src/pkgmgr/core/version/installed.py
Normal file
168
src/pkgmgr/core/version/installed.py
Normal file
@@ -0,0 +1,168 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Iterable, Optional, Tuple
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class InstalledVersion:
|
||||||
|
"""
|
||||||
|
Represents a resolved installed version and the matched name.
|
||||||
|
"""
|
||||||
|
name: str
|
||||||
|
version: str
|
||||||
|
|
||||||
|
|
||||||
|
def _normalize(name: str) -> str:
|
||||||
|
return re.sub(r"[-_.]+", "-", (name or "").strip()).lower()
|
||||||
|
|
||||||
|
|
||||||
|
def _unique_candidates(names: Iterable[str]) -> list[str]:
|
||||||
|
seen: set[str] = set()
|
||||||
|
out: list[str] = []
|
||||||
|
for n in names:
|
||||||
|
if not n:
|
||||||
|
continue
|
||||||
|
key = _normalize(n)
|
||||||
|
if key in seen:
|
||||||
|
continue
|
||||||
|
seen.add(key)
|
||||||
|
out.append(n)
|
||||||
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
def get_installed_python_version(*candidates: str) -> Optional[InstalledVersion]:
|
||||||
|
"""
|
||||||
|
Detect installed Python package version in the CURRENT Python environment.
|
||||||
|
|
||||||
|
Strategy:
|
||||||
|
1) Exact normalized match using importlib.metadata.version()
|
||||||
|
2) Substring fallback by scanning installed distributions
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
from importlib import metadata as importlib_metadata
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
candidates = _unique_candidates(candidates)
|
||||||
|
|
||||||
|
expanded: list[str] = []
|
||||||
|
for c in candidates:
|
||||||
|
n = _normalize(c)
|
||||||
|
expanded.extend([c, n, n.replace("-", "_"), n.replace("-", ".")])
|
||||||
|
expanded = _unique_candidates(expanded)
|
||||||
|
|
||||||
|
# 1) Direct queries first (fast path)
|
||||||
|
for name in expanded:
|
||||||
|
try:
|
||||||
|
version = importlib_metadata.version(name)
|
||||||
|
return InstalledVersion(name=name, version=version)
|
||||||
|
except Exception:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# 2) Fallback: scan distributions (last resort)
|
||||||
|
try:
|
||||||
|
dists = importlib_metadata.distributions()
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
norm_candidates = {_normalize(c) for c in candidates}
|
||||||
|
|
||||||
|
for dist in dists:
|
||||||
|
dist_name = dist.metadata.get("Name", "") or ""
|
||||||
|
norm_dist = _normalize(dist_name)
|
||||||
|
for c in norm_candidates:
|
||||||
|
if c and (c in norm_dist or norm_dist in c):
|
||||||
|
ver = getattr(dist, "version", None)
|
||||||
|
if ver:
|
||||||
|
return InstalledVersion(name=dist_name, version=ver)
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _run_nix(args: list[str]) -> Tuple[int, str, str]:
|
||||||
|
p = subprocess.run(
|
||||||
|
args,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.PIPE,
|
||||||
|
text=True,
|
||||||
|
check=False,
|
||||||
|
)
|
||||||
|
return p.returncode, p.stdout or "", p.stderr or ""
|
||||||
|
|
||||||
|
|
||||||
|
def _extract_version_from_store_path(path: str) -> Optional[str]:
|
||||||
|
if not path:
|
||||||
|
return None
|
||||||
|
base = path.rstrip("/").split("/")[-1]
|
||||||
|
if "-" not in base:
|
||||||
|
return None
|
||||||
|
tail = base.split("-")[-1]
|
||||||
|
if re.match(r"\d+(\.\d+){0,3}([a-z0-9+._-]*)?$", tail, re.I):
|
||||||
|
return tail
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def get_installed_nix_profile_version(*candidates: str) -> Optional[InstalledVersion]:
|
||||||
|
"""
|
||||||
|
Detect installed version from the current Nix profile.
|
||||||
|
|
||||||
|
Strategy:
|
||||||
|
1) JSON output (exact normalized match)
|
||||||
|
2) Text fallback (substring)
|
||||||
|
"""
|
||||||
|
if shutil.which("nix") is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
candidates = _unique_candidates(candidates)
|
||||||
|
if not candidates:
|
||||||
|
return None
|
||||||
|
|
||||||
|
norm_candidates = {_normalize(c) for c in candidates}
|
||||||
|
|
||||||
|
# Preferred: JSON output
|
||||||
|
rc, out, _ = _run_nix(["nix", "profile", "list", "--json"])
|
||||||
|
if rc == 0 and out.strip():
|
||||||
|
try:
|
||||||
|
data = json.loads(out)
|
||||||
|
elements = data.get("elements") or data.get("items") or {}
|
||||||
|
if isinstance(elements, dict):
|
||||||
|
for elem in elements.values():
|
||||||
|
if not isinstance(elem, dict):
|
||||||
|
continue
|
||||||
|
name = (elem.get("name") or elem.get("pname") or "").strip()
|
||||||
|
version = (elem.get("version") or "").strip()
|
||||||
|
norm_name = _normalize(name)
|
||||||
|
|
||||||
|
if norm_name in norm_candidates:
|
||||||
|
if version:
|
||||||
|
return InstalledVersion(name=name, version=version)
|
||||||
|
for sp in elem.get("storePaths", []) or []:
|
||||||
|
guess = _extract_version_from_store_path(sp)
|
||||||
|
if guess:
|
||||||
|
return InstalledVersion(name=name, version=guess)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Fallback: text mode
|
||||||
|
rc, out, _ = _run_nix(["nix", "profile", "list"])
|
||||||
|
if rc != 0:
|
||||||
|
return None
|
||||||
|
|
||||||
|
for line in out.splitlines():
|
||||||
|
norm_line = _normalize(line)
|
||||||
|
for c in norm_candidates:
|
||||||
|
if c in norm_line:
|
||||||
|
m = re.search(r"\b\d+(\.\d+){0,3}[a-z0-9+._-]*\b", line, re.I)
|
||||||
|
if m:
|
||||||
|
return InstalledVersion(name=c, version=m.group(0))
|
||||||
|
if "/nix/store/" in line:
|
||||||
|
guess = _extract_version_from_store_path(line.split()[-1])
|
||||||
|
if guess:
|
||||||
|
return InstalledVersion(name=c, version=guess)
|
||||||
|
|
||||||
|
return None
|
||||||
@@ -1,21 +1,4 @@
|
|||||||
#!/usr/bin/env python3
|
# src/pkgmgr/core/version/source.py
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
"""
|
|
||||||
Helpers to extract version information from various packaging files.
|
|
||||||
|
|
||||||
All functions take a repository directory and return either a version
|
|
||||||
string or None if the corresponding file or version field is missing.
|
|
||||||
|
|
||||||
Supported sources:
|
|
||||||
- pyproject.toml (PEP 621, [project].version)
|
|
||||||
- flake.nix (version = "X.Y.Z";)
|
|
||||||
- PKGBUILD (pkgver / pkgrel)
|
|
||||||
- debian/changelog (first entry line: package (version) ...)
|
|
||||||
- RPM spec file (package-manager.spec: Version / Release)
|
|
||||||
- Ansible Galaxy (galaxy.yml or meta/main.yml)
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import os
|
import os
|
||||||
@@ -24,39 +7,59 @@ from typing import Optional
|
|||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
|
from pkgmgr.core.repository.paths import resolve_repo_paths
|
||||||
|
|
||||||
|
|
||||||
def read_pyproject_version(repo_dir: str) -> Optional[str]:
|
def read_pyproject_version(repo_dir: str) -> Optional[str]:
|
||||||
"""
|
"""
|
||||||
Read the version from pyproject.toml in repo_dir, if present.
|
Read the version from pyproject.toml in repo_dir, if present.
|
||||||
|
|
||||||
Expects a PEP 621-style [project] table with a 'version' field.
|
Expects a PEP 621-style [project] table with a 'version' field.
|
||||||
Returns the version string or None.
|
|
||||||
"""
|
"""
|
||||||
path = os.path.join(repo_dir, "pyproject.toml")
|
paths = resolve_repo_paths(repo_dir)
|
||||||
if not os.path.exists(path):
|
path = paths.pyproject_toml
|
||||||
|
if not os.path.isfile(path):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
try:
|
|
||||||
try:
|
try:
|
||||||
import tomllib # Python 3.11+
|
import tomllib # Python 3.11+
|
||||||
except ModuleNotFoundError: # pragma: no cover
|
except Exception:
|
||||||
tomllib = None
|
import tomli as tomllib # type: ignore
|
||||||
|
|
||||||
if tomllib is None:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
try:
|
||||||
with open(path, "rb") as f:
|
with open(path, "rb") as f:
|
||||||
data = tomllib.load(f)
|
data = tomllib.load(f)
|
||||||
|
project = data.get("project") or {}
|
||||||
project = data.get("project", {})
|
|
||||||
if isinstance(project, dict):
|
|
||||||
version = project.get("version")
|
version = project.get("version")
|
||||||
if isinstance(version, str):
|
return str(version).strip() if version else None
|
||||||
return version.strip() or None
|
|
||||||
except Exception:
|
except Exception:
|
||||||
# Intentionally swallow errors and fall back to None.
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def read_pyproject_project_name(repo_dir: str) -> Optional[str]:
|
||||||
|
"""
|
||||||
|
Read distribution name from pyproject.toml ([project].name).
|
||||||
|
|
||||||
|
This is required to correctly resolve installed Python package
|
||||||
|
versions via importlib.metadata.
|
||||||
|
"""
|
||||||
|
paths = resolve_repo_paths(repo_dir)
|
||||||
|
path = paths.pyproject_toml
|
||||||
|
if not os.path.isfile(path):
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
import tomllib # Python 3.11+
|
||||||
|
except Exception:
|
||||||
|
import tomli as tomllib # type: ignore
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(path, "rb") as f:
|
||||||
|
data = tomllib.load(f)
|
||||||
|
project = data.get("project") or {}
|
||||||
|
name = project.get("name")
|
||||||
|
return str(name).strip() if name else None
|
||||||
|
except Exception:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
@@ -64,12 +67,12 @@ def read_flake_version(repo_dir: str) -> Optional[str]:
|
|||||||
"""
|
"""
|
||||||
Read the version from flake.nix in repo_dir, if present.
|
Read the version from flake.nix in repo_dir, if present.
|
||||||
|
|
||||||
Looks for a line like:
|
Looks for:
|
||||||
version = "1.2.3";
|
version = "X.Y.Z";
|
||||||
and returns the string inside the quotes.
|
|
||||||
"""
|
"""
|
||||||
path = os.path.join(repo_dir, "flake.nix")
|
paths = resolve_repo_paths(repo_dir)
|
||||||
if not os.path.exists(path):
|
path = paths.flake_nix
|
||||||
|
if not os.path.isfile(path):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -81,22 +84,22 @@ def read_flake_version(repo_dir: str) -> Optional[str]:
|
|||||||
match = re.search(r'version\s*=\s*"([^"]+)"', text)
|
match = re.search(r'version\s*=\s*"([^"]+)"', text)
|
||||||
if not match:
|
if not match:
|
||||||
return None
|
return None
|
||||||
version = match.group(1).strip()
|
|
||||||
return version or None
|
return match.group(1).strip() or None
|
||||||
|
|
||||||
|
|
||||||
def read_pkgbuild_version(repo_dir: str) -> Optional[str]:
|
def read_pkgbuild_version(repo_dir: str) -> Optional[str]:
|
||||||
"""
|
"""
|
||||||
Read the version from PKGBUILD in repo_dir, if present.
|
Read the version from PKGBUILD (preferring packaging/arch/PKGBUILD).
|
||||||
|
|
||||||
Expects:
|
Combines pkgver and pkgrel if both exist:
|
||||||
pkgver=1.2.3
|
pkgver=1.2.3
|
||||||
pkgrel=1
|
pkgrel=1
|
||||||
|
-> 1.2.3-1
|
||||||
Returns either "1.2.3-1" (if both are present) or just "1.2.3".
|
|
||||||
"""
|
"""
|
||||||
path = os.path.join(repo_dir, "PKGBUILD")
|
paths = resolve_repo_paths(repo_dir)
|
||||||
if not os.path.exists(path):
|
path = paths.arch_pkgbuild
|
||||||
|
if not path or not os.path.isfile(path):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -121,15 +124,19 @@ def read_pkgbuild_version(repo_dir: str) -> Optional[str]:
|
|||||||
|
|
||||||
def read_debian_changelog_version(repo_dir: str) -> Optional[str]:
|
def read_debian_changelog_version(repo_dir: str) -> Optional[str]:
|
||||||
"""
|
"""
|
||||||
Read the latest Debian version from debian/changelog in repo_dir, if present.
|
Read the latest version from debian changelog.
|
||||||
|
|
||||||
The first non-empty line typically looks like:
|
Preferred path:
|
||||||
package-name (1.2.3-1) unstable; urgency=medium
|
packaging/debian/changelog
|
||||||
|
Fallback:
|
||||||
|
debian/changelog
|
||||||
|
|
||||||
We extract the text inside the first parentheses.
|
Expected format:
|
||||||
|
package (1.2.3-1) unstable; urgency=medium
|
||||||
"""
|
"""
|
||||||
path = os.path.join(repo_dir, "debian", "changelog")
|
paths = resolve_repo_paths(repo_dir)
|
||||||
if not os.path.exists(path):
|
path = paths.debian_changelog
|
||||||
|
if not path or not os.path.isfile(path):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -140,8 +147,7 @@ def read_debian_changelog_version(repo_dir: str) -> Optional[str]:
|
|||||||
continue
|
continue
|
||||||
match = re.search(r"\(([^)]+)\)", line)
|
match = re.search(r"\(([^)]+)\)", line)
|
||||||
if match:
|
if match:
|
||||||
version = match.group(1).strip()
|
return match.group(1).strip() or None
|
||||||
return version or None
|
|
||||||
break
|
break
|
||||||
except Exception:
|
except Exception:
|
||||||
return None
|
return None
|
||||||
@@ -151,19 +157,21 @@ def read_debian_changelog_version(repo_dir: str) -> Optional[str]:
|
|||||||
|
|
||||||
def read_spec_version(repo_dir: str) -> Optional[str]:
|
def read_spec_version(repo_dir: str) -> Optional[str]:
|
||||||
"""
|
"""
|
||||||
Read the version from a RPM spec file.
|
Read the version from an RPM spec file.
|
||||||
|
|
||||||
For now, we assume a fixed file name 'package-manager.spec'
|
Preferred paths:
|
||||||
in repo_dir with lines like:
|
packaging/fedora/package-manager.spec
|
||||||
|
packaging/fedora/*.spec
|
||||||
|
repo_root/*.spec
|
||||||
|
|
||||||
|
Combines:
|
||||||
Version: 1.2.3
|
Version: 1.2.3
|
||||||
Release: 1%{?dist}
|
Release: 1%{?dist}
|
||||||
|
-> 1.2.3-1
|
||||||
Returns either "1.2.3-1" (if Release is present) or "1.2.3".
|
|
||||||
Any RPM macro suffix like '%{?dist}' is stripped from the release.
|
|
||||||
"""
|
"""
|
||||||
path = os.path.join(repo_dir, "package-manager.spec")
|
paths = resolve_repo_paths(repo_dir)
|
||||||
if not os.path.exists(path):
|
path = paths.rpm_spec
|
||||||
|
if not path or not os.path.isfile(path):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -180,10 +188,7 @@ def read_spec_version(repo_dir: str) -> Optional[str]:
|
|||||||
rel_match = re.search(r"^Release:\s*(.+)$", text, re.MULTILINE)
|
rel_match = re.search(r"^Release:\s*(.+)$", text, re.MULTILINE)
|
||||||
if rel_match:
|
if rel_match:
|
||||||
release_raw = rel_match.group(1).strip()
|
release_raw = rel_match.group(1).strip()
|
||||||
# Strip common RPM macro suffix like %... (e.g. 1%{?dist})
|
release = release_raw.split("%", 1)[0].split(" ", 1)[0].strip()
|
||||||
release = release_raw.split("%", 1)[0].strip()
|
|
||||||
# Also strip anything after first whitespace, just in case
|
|
||||||
release = release.split(" ", 1)[0].strip()
|
|
||||||
if release:
|
if release:
|
||||||
return f"{version}-{release}"
|
return f"{version}-{release}"
|
||||||
|
|
||||||
@@ -192,40 +197,35 @@ def read_spec_version(repo_dir: str) -> Optional[str]:
|
|||||||
|
|
||||||
def read_ansible_galaxy_version(repo_dir: str) -> Optional[str]:
|
def read_ansible_galaxy_version(repo_dir: str) -> Optional[str]:
|
||||||
"""
|
"""
|
||||||
Read the version from Ansible Galaxy metadata, if present.
|
Read the version from Ansible Galaxy metadata.
|
||||||
|
|
||||||
Supported locations:
|
Supported:
|
||||||
- galaxy.yml (preferred for modern roles/collections)
|
- galaxy.yml
|
||||||
- meta/main.yml (legacy style roles; uses galaxy_info.version or version)
|
- meta/main.yml (galaxy_info.version or version)
|
||||||
"""
|
"""
|
||||||
# 1) galaxy.yml in repo root
|
galaxy_yml = os.path.join(repo_dir, "galaxy.yml")
|
||||||
galaxy_path = os.path.join(repo_dir, "galaxy.yml")
|
if os.path.isfile(galaxy_yml):
|
||||||
if os.path.exists(galaxy_path):
|
|
||||||
try:
|
try:
|
||||||
with open(galaxy_path, "r", encoding="utf-8") as f:
|
with open(galaxy_yml, "r", encoding="utf-8") as f:
|
||||||
data = yaml.safe_load(f) or {}
|
data = yaml.safe_load(f) or {}
|
||||||
version = data.get("version")
|
version = data.get("version")
|
||||||
if isinstance(version, str) and version.strip():
|
if isinstance(version, str) and version.strip():
|
||||||
return version.strip()
|
return version.strip()
|
||||||
except Exception:
|
except Exception:
|
||||||
# Ignore parse errors and fall through to meta/main.yml
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# 2) meta/main.yml (classic Ansible role)
|
meta_yml = os.path.join(repo_dir, "meta", "main.yml")
|
||||||
meta_path = os.path.join(repo_dir, "meta", "main.yml")
|
if os.path.isfile(meta_yml):
|
||||||
if os.path.exists(meta_path):
|
|
||||||
try:
|
try:
|
||||||
with open(meta_path, "r", encoding="utf-8") as f:
|
with open(meta_yml, "r", encoding="utf-8") as f:
|
||||||
data = yaml.safe_load(f) or {}
|
data = yaml.safe_load(f) or {}
|
||||||
|
|
||||||
# Preferred: galaxy_info.version
|
|
||||||
galaxy_info = data.get("galaxy_info") or {}
|
galaxy_info = data.get("galaxy_info") or {}
|
||||||
if isinstance(galaxy_info, dict):
|
if isinstance(galaxy_info, dict):
|
||||||
version = galaxy_info.get("version")
|
version = galaxy_info.get("version")
|
||||||
if isinstance(version, str) and version.strip():
|
if isinstance(version, str) and version.strip():
|
||||||
return version.strip()
|
return version.strip()
|
||||||
|
|
||||||
# Fallback: top-level 'version'
|
|
||||||
version = data.get("version")
|
version = data.get("version")
|
||||||
if isinstance(version, str) and version.strip():
|
if isinstance(version, str) and version.strip():
|
||||||
return version.strip()
|
return version.strip()
|
||||||
|
|||||||
@@ -1,164 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
"""
|
|
||||||
E2E integration tests for the `pkgmgr mirror` command family.
|
|
||||||
|
|
||||||
Covered commands:
|
|
||||||
|
|
||||||
- pkgmgr mirror --help
|
|
||||||
- pkgmgr mirror list --preview --all
|
|
||||||
- pkgmgr mirror diff --preview --all
|
|
||||||
- pkgmgr mirror merge config file --preview --all
|
|
||||||
- pkgmgr mirror setup --preview --all
|
|
||||||
- pkgmgr mirror check --preview --all
|
|
||||||
- pkgmgr mirror provision --preview --all
|
|
||||||
|
|
||||||
All commands are executed via the real CLI entry point (main module).
|
|
||||||
With --preview enabled, all operations are non-destructive and safe
|
|
||||||
to run inside CI containers.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import io
|
|
||||||
import runpy
|
|
||||||
import sys
|
|
||||||
import unittest
|
|
||||||
from contextlib import redirect_stdout, redirect_stderr
|
|
||||||
|
|
||||||
|
|
||||||
class TestIntegrationMirrorCommands(unittest.TestCase):
|
|
||||||
"""
|
|
||||||
End-to-end tests for `pkgmgr mirror` commands.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# ------------------------------------------------------------
|
|
||||||
# Helper
|
|
||||||
# ------------------------------------------------------------
|
|
||||||
def _run_pkgmgr(self, args):
|
|
||||||
"""
|
|
||||||
Execute pkgmgr with the given arguments and return captured output.
|
|
||||||
|
|
||||||
- Treat SystemExit(0) or SystemExit(None) as success.
|
|
||||||
- Any other exit code is considered a test failure.
|
|
||||||
"""
|
|
||||||
original_argv = list(sys.argv)
|
|
||||||
buffer = io.StringIO()
|
|
||||||
cmd_repr = "pkgmgr " + " ".join(args)
|
|
||||||
|
|
||||||
try:
|
|
||||||
sys.argv = ["pkgmgr"] + list(args)
|
|
||||||
|
|
||||||
try:
|
|
||||||
with redirect_stdout(buffer), redirect_stderr(buffer):
|
|
||||||
runpy.run_module("pkgmgr", run_name="__main__")
|
|
||||||
except SystemExit as exc:
|
|
||||||
code = exc.code if isinstance(exc.code, int) else None
|
|
||||||
if code not in (0, None):
|
|
||||||
raise AssertionError(
|
|
||||||
"%r failed with exit code %r.\n\nOutput:\n%s"
|
|
||||||
% (cmd_repr, exc.code, buffer.getvalue())
|
|
||||||
)
|
|
||||||
|
|
||||||
return buffer.getvalue()
|
|
||||||
|
|
||||||
finally:
|
|
||||||
sys.argv = original_argv
|
|
||||||
|
|
||||||
# ------------------------------------------------------------
|
|
||||||
# Tests
|
|
||||||
# ------------------------------------------------------------
|
|
||||||
|
|
||||||
def test_mirror_help(self):
|
|
||||||
"""
|
|
||||||
`pkgmgr mirror --help` should run without error and print usage info.
|
|
||||||
"""
|
|
||||||
output = self._run_pkgmgr(["mirror", "--help"])
|
|
||||||
self.assertIn("usage:", output)
|
|
||||||
self.assertIn("pkgmgr mirror", output)
|
|
||||||
|
|
||||||
def test_mirror_list_preview_all(self):
|
|
||||||
"""
|
|
||||||
`pkgmgr mirror list --preview --all`
|
|
||||||
"""
|
|
||||||
output = self._run_pkgmgr(
|
|
||||||
["mirror", "list", "--preview", "--all"]
|
|
||||||
)
|
|
||||||
self.assertTrue(
|
|
||||||
output.strip(),
|
|
||||||
"Expected output from mirror list",
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_mirror_diff_preview_all(self):
|
|
||||||
"""
|
|
||||||
`pkgmgr mirror diff --preview --all`
|
|
||||||
"""
|
|
||||||
output = self._run_pkgmgr(
|
|
||||||
["mirror", "diff", "--preview", "--all"]
|
|
||||||
)
|
|
||||||
self.assertTrue(
|
|
||||||
output.strip(),
|
|
||||||
"Expected output from mirror diff",
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_mirror_merge_config_to_file_preview_all(self):
|
|
||||||
"""
|
|
||||||
`pkgmgr mirror merge config file --preview --all`
|
|
||||||
"""
|
|
||||||
output = self._run_pkgmgr(
|
|
||||||
[
|
|
||||||
"mirror",
|
|
||||||
"merge",
|
|
||||||
"config",
|
|
||||||
"file",
|
|
||||||
"--preview",
|
|
||||||
"--all",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
self.assertTrue(
|
|
||||||
output.strip(),
|
|
||||||
"Expected output from mirror merge (config -> file)",
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_mirror_setup_preview_all(self):
|
|
||||||
"""
|
|
||||||
`pkgmgr mirror setup --preview --all`
|
|
||||||
"""
|
|
||||||
output = self._run_pkgmgr(
|
|
||||||
["mirror", "setup", "--preview", "--all"]
|
|
||||||
)
|
|
||||||
self.assertTrue(
|
|
||||||
output.strip(),
|
|
||||||
"Expected output from mirror setup",
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_mirror_check_preview_all(self):
|
|
||||||
"""
|
|
||||||
`pkgmgr mirror check --preview --all`
|
|
||||||
|
|
||||||
Performs non-destructive remote checks (git ls-remote).
|
|
||||||
"""
|
|
||||||
output = self._run_pkgmgr(
|
|
||||||
["mirror", "check", "--preview", "--all"]
|
|
||||||
)
|
|
||||||
self.assertTrue(
|
|
||||||
output.strip(),
|
|
||||||
"Expected output from mirror check",
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_mirror_provision_preview_all(self):
|
|
||||||
"""
|
|
||||||
`pkgmgr mirror provision --preview --all`
|
|
||||||
|
|
||||||
In preview mode this MUST NOT create remote repositories.
|
|
||||||
"""
|
|
||||||
output = self._run_pkgmgr(
|
|
||||||
["mirror", "provision", "--preview", "--all"]
|
|
||||||
)
|
|
||||||
self.assertTrue(
|
|
||||||
output.strip(),
|
|
||||||
"Expected output from mirror provision (preview)",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
unittest.main()
|
|
||||||
172
tests/integration/test_mirror_commands.py
Normal file
172
tests/integration/test_mirror_commands.py
Normal file
@@ -0,0 +1,172 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
"""
|
||||||
|
CLI integration tests for `pkgmgr mirror`.
|
||||||
|
|
||||||
|
These tests validate:
|
||||||
|
- CLI argument parsing
|
||||||
|
- command dispatch
|
||||||
|
- command orchestration
|
||||||
|
|
||||||
|
All side effects (git, network, remote provisioning, filesystem writes)
|
||||||
|
are patched to keep tests deterministic and CI-safe.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import importlib
|
||||||
|
import io
|
||||||
|
import os
|
||||||
|
import runpy
|
||||||
|
import sys
|
||||||
|
import unittest
|
||||||
|
from contextlib import ExitStack, redirect_stderr, redirect_stdout
|
||||||
|
from typing import Dict, List, Optional
|
||||||
|
from unittest.mock import MagicMock, PropertyMock, patch
|
||||||
|
|
||||||
|
|
||||||
|
class TestIntegrationMirrorCommands(unittest.TestCase):
|
||||||
|
"""
|
||||||
|
Integration tests for `pkgmgr mirror` commands.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _run_pkgmgr(self, args: List[str], extra_env: Optional[Dict[str, str]] = None) -> str:
|
||||||
|
"""
|
||||||
|
Execute pkgmgr with the given arguments and return captured output.
|
||||||
|
|
||||||
|
- Treat SystemExit(0) or SystemExit(None) as success.
|
||||||
|
- Any other exit code is considered a test failure.
|
||||||
|
- Mirror commands are patched to avoid network/destructive operations.
|
||||||
|
"""
|
||||||
|
original_argv = list(sys.argv)
|
||||||
|
original_env = dict(os.environ)
|
||||||
|
buffer = io.StringIO()
|
||||||
|
cmd_repr = "pkgmgr " + " ".join(args)
|
||||||
|
|
||||||
|
# Shared dummy context used by multiple mirror commands
|
||||||
|
dummy_ctx = MagicMock()
|
||||||
|
dummy_ctx.identifier = "dummy-repo"
|
||||||
|
dummy_ctx.repo_dir = "/tmp/dummy-repo"
|
||||||
|
dummy_ctx.config_mirrors = {"origin": "git@github.com:alice/repo.git"}
|
||||||
|
dummy_ctx.file_mirrors = {"backup": "ssh://git@git.example:2201/alice/repo.git"}
|
||||||
|
type(dummy_ctx).resolved_mirrors = PropertyMock(
|
||||||
|
return_value={
|
||||||
|
"origin": "git@github.com:alice/repo.git",
|
||||||
|
"backup": "ssh://git@git.example:2201/alice/repo.git",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Helper: patch with create=True so missing symbols don't explode.
|
||||||
|
# IMPORTANT: patch() does not auto-import submodules when resolving dotted names.
|
||||||
|
def _p(target: str, **kwargs):
|
||||||
|
module_name = target.rsplit(".", 1)[0]
|
||||||
|
try:
|
||||||
|
importlib.import_module(module_name)
|
||||||
|
except ModuleNotFoundError:
|
||||||
|
# If the module truly doesn't exist, create=True may still allow patching
|
||||||
|
# in some cases, but dotted resolution can still fail. Best-effort.
|
||||||
|
pass
|
||||||
|
return patch(target, create=True, **kwargs)
|
||||||
|
|
||||||
|
# Fake result for remote provisioning (preview-safe)
|
||||||
|
def _fake_ensure_remote_repo(spec, provider_hint=None, options=None):
|
||||||
|
# Safety: E2E should only ever call this in preview mode
|
||||||
|
if options is not None and getattr(options, "preview", False) is not True:
|
||||||
|
raise AssertionError(
|
||||||
|
f"{cmd_repr} attempted ensure_remote_repo without preview=True in E2E."
|
||||||
|
)
|
||||||
|
r = MagicMock()
|
||||||
|
r.status = "preview"
|
||||||
|
r.message = "Preview mode (E2E patched): no remote provisioning performed."
|
||||||
|
r.url = None
|
||||||
|
return r
|
||||||
|
|
||||||
|
try:
|
||||||
|
sys.argv = ["pkgmgr"] + list(args)
|
||||||
|
if extra_env:
|
||||||
|
os.environ.update(extra_env)
|
||||||
|
|
||||||
|
with ExitStack() as stack:
|
||||||
|
# build_context is imported directly in these modules:
|
||||||
|
stack.enter_context(_p("pkgmgr.actions.mirror.list_cmd.build_context", return_value=dummy_ctx))
|
||||||
|
stack.enter_context(_p("pkgmgr.actions.mirror.diff_cmd.build_context", return_value=dummy_ctx))
|
||||||
|
stack.enter_context(_p("pkgmgr.actions.mirror.merge_cmd.build_context", return_value=dummy_ctx))
|
||||||
|
stack.enter_context(_p("pkgmgr.actions.mirror.setup_cmd.build_context", return_value=dummy_ctx))
|
||||||
|
stack.enter_context(_p("pkgmgr.actions.mirror.remote_provision.build_context", return_value=dummy_ctx))
|
||||||
|
|
||||||
|
# Deterministic remote probing (covers setup + likely check implementations)
|
||||||
|
stack.enter_context(_p("pkgmgr.actions.mirror.remote_check.probe_mirror", return_value=(True, "")))
|
||||||
|
stack.enter_context(_p("pkgmgr.actions.mirror.setup_cmd.probe_mirror", return_value=(True, "")))
|
||||||
|
stack.enter_context(_p("pkgmgr.actions.mirror.git_remote.is_remote_reachable", return_value=True))
|
||||||
|
|
||||||
|
# setup_cmd imports ensure_origin_remote directly:
|
||||||
|
stack.enter_context(_p("pkgmgr.actions.mirror.setup_cmd.ensure_origin_remote", return_value=None))
|
||||||
|
# Extra safety: if any code calls git_remote.ensure_origin_remote directly
|
||||||
|
stack.enter_context(_p("pkgmgr.actions.mirror.git_remote.ensure_origin_remote", return_value=None))
|
||||||
|
|
||||||
|
# remote provisioning: remote_provision imports ensure_remote_repo directly from core:
|
||||||
|
stack.enter_context(
|
||||||
|
_p(
|
||||||
|
"pkgmgr.actions.mirror.remote_provision.ensure_remote_repo",
|
||||||
|
side_effect=_fake_ensure_remote_repo,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Extra safety: if anything calls remote_check.run_git directly, make it inert
|
||||||
|
stack.enter_context(_p("pkgmgr.actions.mirror.remote_check.run_git", return_value="dummy"))
|
||||||
|
|
||||||
|
with redirect_stdout(buffer), redirect_stderr(buffer):
|
||||||
|
try:
|
||||||
|
runpy.run_module("pkgmgr", run_name="__main__")
|
||||||
|
except SystemExit as exc:
|
||||||
|
code = exc.code if isinstance(exc.code, int) else None
|
||||||
|
if code not in (0, None):
|
||||||
|
raise AssertionError(
|
||||||
|
"%r failed with exit code %r.\n\nOutput:\n%s"
|
||||||
|
% (cmd_repr, exc.code, buffer.getvalue())
|
||||||
|
)
|
||||||
|
|
||||||
|
return buffer.getvalue()
|
||||||
|
|
||||||
|
finally:
|
||||||
|
sys.argv = original_argv
|
||||||
|
os.environ.clear()
|
||||||
|
os.environ.update(original_env)
|
||||||
|
|
||||||
|
# ------------------------------------------------------------
|
||||||
|
# Tests
|
||||||
|
# ------------------------------------------------------------
|
||||||
|
|
||||||
|
def test_mirror_help(self) -> None:
|
||||||
|
output = self._run_pkgmgr(["mirror", "--help"])
|
||||||
|
self.assertIn("usage:", output.lower())
|
||||||
|
self.assertIn("mirror", output.lower())
|
||||||
|
|
||||||
|
def test_mirror_list_preview_all(self) -> None:
|
||||||
|
output = self._run_pkgmgr(["mirror", "list", "--preview", "--all"])
|
||||||
|
self.assertTrue(output.strip(), "Expected output from mirror list")
|
||||||
|
|
||||||
|
def test_mirror_diff_preview_all(self) -> None:
|
||||||
|
output = self._run_pkgmgr(["mirror", "diff", "--preview", "--all"])
|
||||||
|
self.assertTrue(output.strip(), "Expected output from mirror diff")
|
||||||
|
|
||||||
|
def test_mirror_merge_config_to_file_preview_all(self) -> None:
|
||||||
|
output = self._run_pkgmgr(["mirror", "merge", "config", "file", "--preview", "--all"])
|
||||||
|
self.assertTrue(output.strip(), "Expected output from mirror merge (config -> file)")
|
||||||
|
|
||||||
|
def test_mirror_setup_preview_all(self) -> None:
|
||||||
|
output = self._run_pkgmgr(["mirror", "setup", "--preview", "--all"])
|
||||||
|
self.assertTrue(output.strip(), "Expected output from mirror setup")
|
||||||
|
|
||||||
|
def test_mirror_check_preview_all(self) -> None:
|
||||||
|
output = self._run_pkgmgr(["mirror", "check", "--preview", "--all"])
|
||||||
|
self.assertTrue(output.strip(), "Expected output from mirror check")
|
||||||
|
|
||||||
|
def test_mirror_provision_preview_all(self) -> None:
|
||||||
|
output = self._run_pkgmgr(["mirror", "provision", "--preview", "--all"])
|
||||||
|
self.assertTrue(output.strip(), "Expected output from mirror provision (preview)")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
unittest.main()
|
||||||
63
tests/integration/test_nix_profile_list_json.py
Normal file
63
tests/integration/test_nix_profile_list_json.py
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
import unittest
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class FakeRunResult:
|
||||||
|
"""
|
||||||
|
Mimics your runner returning a structured result object.
|
||||||
|
"""
|
||||||
|
returncode: int
|
||||||
|
stdout: str
|
||||||
|
stderr: str = ""
|
||||||
|
|
||||||
|
|
||||||
|
class FakeRunner:
|
||||||
|
"""
|
||||||
|
Minimal runner stub: returns exactly what we configure.
|
||||||
|
"""
|
||||||
|
def __init__(self, result):
|
||||||
|
self._result = result
|
||||||
|
|
||||||
|
def run(self, ctx, cmd: str, allow_failure: bool = False):
|
||||||
|
return self._result
|
||||||
|
|
||||||
|
|
||||||
|
class TestE2ENixProfileListJsonParsing(unittest.TestCase):
|
||||||
|
"""
|
||||||
|
This test verifies that NixProfileInspector can parse `nix profile list --json`
|
||||||
|
regardless of whether the CommandRunner returns:
|
||||||
|
- raw stdout string, OR
|
||||||
|
- a RunResult-like object with a `.stdout` attribute.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def test_list_json_accepts_raw_string(self) -> None:
|
||||||
|
from pkgmgr.actions.install.installers.nix.profile import NixProfileInspector
|
||||||
|
|
||||||
|
payload = {"elements": {"pkgmgr-1": {"attrPath": "packages.x86_64-linux.pkgmgr"}}}
|
||||||
|
raw = json.dumps(payload)
|
||||||
|
|
||||||
|
runner = FakeRunner(raw)
|
||||||
|
inspector = NixProfileInspector()
|
||||||
|
|
||||||
|
data = inspector.list_json(ctx=None, runner=runner)
|
||||||
|
self.assertEqual(data["elements"]["pkgmgr-1"]["attrPath"], "packages.x86_64-linux.pkgmgr")
|
||||||
|
|
||||||
|
def test_list_json_accepts_runresult_object(self) -> None:
|
||||||
|
from pkgmgr.actions.install.installers.nix.profile import NixProfileInspector
|
||||||
|
|
||||||
|
payload = {"elements": {"pkgmgr-1": {"attrPath": "packages.x86_64-linux.pkgmgr"}}}
|
||||||
|
raw = json.dumps(payload)
|
||||||
|
|
||||||
|
runner = FakeRunner(FakeRunResult(returncode=0, stdout=raw))
|
||||||
|
inspector = NixProfileInspector()
|
||||||
|
|
||||||
|
data = inspector.list_json(ctx=None, runner=runner)
|
||||||
|
self.assertEqual(data["elements"]["pkgmgr-1"]["attrPath"], "packages.x86_64-linux.pkgmgr")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
unittest.main()
|
||||||
@@ -23,7 +23,7 @@ from unittest.mock import patch
|
|||||||
import pkgmgr.actions.install as install_mod
|
import pkgmgr.actions.install as install_mod
|
||||||
from pkgmgr.actions.install import install_repos
|
from pkgmgr.actions.install import install_repos
|
||||||
from pkgmgr.actions.install.installers.makefile import MakefileInstaller
|
from pkgmgr.actions.install.installers.makefile import MakefileInstaller
|
||||||
from pkgmgr.actions.install.installers.nix_flake import NixFlakeInstaller
|
from pkgmgr.actions.install.installers.nix import NixFlakeInstaller
|
||||||
from pkgmgr.actions.install.installers.os_packages.arch_pkgbuild import (
|
from pkgmgr.actions.install.installers.os_packages.arch_pkgbuild import (
|
||||||
ArchPkgbuildInstaller,
|
ArchPkgbuildInstaller,
|
||||||
)
|
)
|
||||||
|
|||||||
65
tests/integration/test_repository_paths_exist.py
Normal file
65
tests/integration/test_repository_paths_exist.py
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
import unittest
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from pkgmgr.core.repository.paths import resolve_repo_paths
|
||||||
|
|
||||||
|
|
||||||
|
def _find_repo_root() -> Path:
|
||||||
|
"""
|
||||||
|
Locate the pkgmgr repository root from the test location.
|
||||||
|
|
||||||
|
Assumes:
|
||||||
|
repo_root/
|
||||||
|
src/pkgmgr/...
|
||||||
|
tests/integration/...
|
||||||
|
"""
|
||||||
|
here = Path(__file__).resolve()
|
||||||
|
for parent in here.parents:
|
||||||
|
if (parent / "pyproject.toml").is_file() and (parent / "src" / "pkgmgr").is_dir():
|
||||||
|
return parent
|
||||||
|
raise RuntimeError("Could not determine repository root for pkgmgr integration test")
|
||||||
|
|
||||||
|
|
||||||
|
class TestRepositoryPathsExist(unittest.TestCase):
|
||||||
|
"""
|
||||||
|
Integration test: pkgmgr is the TEMPLATE repository.
|
||||||
|
All canonical paths resolved for pkgmgr must exist.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def test_pkgmgr_repository_paths_exist(self) -> None:
|
||||||
|
repo_root = _find_repo_root()
|
||||||
|
paths = resolve_repo_paths(str(repo_root))
|
||||||
|
|
||||||
|
missing: list[str] = []
|
||||||
|
|
||||||
|
def require(path: str | None, description: str) -> None:
|
||||||
|
if not path:
|
||||||
|
missing.append(f"{description}: <not resolved>")
|
||||||
|
return
|
||||||
|
if not os.path.isfile(path):
|
||||||
|
missing.append(f"{description}: {path} (missing)")
|
||||||
|
|
||||||
|
# Core metadata
|
||||||
|
require(paths.pyproject_toml, "pyproject.toml")
|
||||||
|
require(paths.flake_nix, "flake.nix")
|
||||||
|
|
||||||
|
# Human changelog
|
||||||
|
require(paths.changelog_md, "CHANGELOG.md")
|
||||||
|
|
||||||
|
# Packaging files (pkgmgr defines the template)
|
||||||
|
require(paths.arch_pkgbuild, "Arch PKGBUILD")
|
||||||
|
require(paths.debian_changelog, "Debian changelog")
|
||||||
|
require(paths.rpm_spec, "RPM spec file")
|
||||||
|
|
||||||
|
if missing:
|
||||||
|
self.fail(
|
||||||
|
"pkgmgr repository does not satisfy the canonical repository layout:\n"
|
||||||
|
+ "\n".join(f" - {item}" for item in missing)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
unittest.main()
|
||||||
44
tests/unit/pkgmgr/actions/install/installers/nix/_fakes.py
Normal file
44
tests/unit/pkgmgr/actions/install/installers/nix/_fakes.py
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class FakeRunResult:
|
||||||
|
returncode: int
|
||||||
|
stdout: str = ""
|
||||||
|
stderr: str = ""
|
||||||
|
|
||||||
|
|
||||||
|
class FakeRunner:
|
||||||
|
"""
|
||||||
|
Minimal runner stub compatible with:
|
||||||
|
- CommandRunner.run(ctx, cmd, allow_failure=...)
|
||||||
|
- Generic runner.run(ctx, cmd, allow_failure=...)
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, mapping: Optional[dict[str, Any]] = None, default: Any = None):
|
||||||
|
self.mapping = mapping or {}
|
||||||
|
self.default = default if default is not None else FakeRunResult(0, "", "")
|
||||||
|
self.calls: list[tuple[Any, str, bool]] = []
|
||||||
|
|
||||||
|
def run(self, ctx, cmd: str, allow_failure: bool = False):
|
||||||
|
self.calls.append((ctx, cmd, allow_failure))
|
||||||
|
return self.mapping.get(cmd, self.default)
|
||||||
|
|
||||||
|
|
||||||
|
class FakeRetry:
|
||||||
|
"""
|
||||||
|
Mimics GitHubRateLimitRetry.run_with_retry(ctx, runner, cmd)
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, results: list[FakeRunResult]):
|
||||||
|
self._results = list(results)
|
||||||
|
self.calls: list[str] = []
|
||||||
|
|
||||||
|
def run_with_retry(self, ctx, runner, cmd: str):
|
||||||
|
self.calls.append(cmd)
|
||||||
|
if self._results:
|
||||||
|
return self._results.pop(0)
|
||||||
|
return FakeRunResult(0, "", "")
|
||||||
@@ -0,0 +1,58 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from pkgmgr.actions.install.installers.nix.conflicts import NixConflictResolver
|
||||||
|
from ._fakes import FakeRunResult, FakeRunner, FakeRetry
|
||||||
|
|
||||||
|
|
||||||
|
class DummyCtx:
|
||||||
|
quiet = True
|
||||||
|
|
||||||
|
|
||||||
|
class TestNixConflictResolver(unittest.TestCase):
|
||||||
|
def test_resolve_removes_tokens_and_retries_success(self) -> None:
|
||||||
|
ctx = DummyCtx()
|
||||||
|
install_cmd = "nix profile install /repo#default"
|
||||||
|
|
||||||
|
stderr = '''
|
||||||
|
error: An existing package already provides the following file:
|
||||||
|
/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pkgmgr/bin/pkgmgr
|
||||||
|
'''
|
||||||
|
|
||||||
|
runner = FakeRunner(mapping={
|
||||||
|
"nix profile remove pkgmgr": FakeRunResult(0, "", ""),
|
||||||
|
})
|
||||||
|
retry = FakeRetry(results=[FakeRunResult(0, "", "")])
|
||||||
|
|
||||||
|
class FakeProfile:
|
||||||
|
def find_remove_tokens_for_store_prefixes(self, ctx, runner, prefixes):
|
||||||
|
return []
|
||||||
|
def find_remove_tokens_for_output(self, ctx, runner, output):
|
||||||
|
return ["pkgmgr"]
|
||||||
|
|
||||||
|
resolver = NixConflictResolver(runner=runner, retry=retry, profile=FakeProfile())
|
||||||
|
ok = resolver.resolve(ctx, install_cmd, stdout="", stderr=stderr, output="pkgmgr", max_rounds=2)
|
||||||
|
self.assertTrue(ok)
|
||||||
|
self.assertIn("nix profile remove pkgmgr", [c[1] for c in runner.calls])
|
||||||
|
|
||||||
|
def test_resolve_uses_textual_remove_tokens_last_resort(self) -> None:
|
||||||
|
ctx = DummyCtx()
|
||||||
|
install_cmd = "nix profile install /repo#default"
|
||||||
|
|
||||||
|
stderr = "hint: try:\n nix profile remove 'pkgmgr-1'\n"
|
||||||
|
runner = FakeRunner(mapping={
|
||||||
|
"nix profile remove pkgmgr-1": FakeRunResult(0, "", ""),
|
||||||
|
})
|
||||||
|
retry = FakeRetry(results=[FakeRunResult(0, "", "")])
|
||||||
|
|
||||||
|
class FakeProfile:
|
||||||
|
def find_remove_tokens_for_store_prefixes(self, ctx, runner, prefixes):
|
||||||
|
return []
|
||||||
|
def find_remove_tokens_for_output(self, ctx, runner, output):
|
||||||
|
return []
|
||||||
|
|
||||||
|
resolver = NixConflictResolver(runner=runner, retry=retry, profile=FakeProfile())
|
||||||
|
ok = resolver.resolve(ctx, install_cmd, stdout="", stderr=stderr, output="pkgmgr", max_rounds=2)
|
||||||
|
self.assertTrue(ok)
|
||||||
|
self.assertIn("nix profile remove pkgmgr-1", [c[1] for c in runner.calls])
|
||||||
@@ -0,0 +1,62 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from pkgmgr.actions.install.installers.nix.profile import NixProfileInspector
|
||||||
|
from ._fakes import FakeRunResult, FakeRunner
|
||||||
|
|
||||||
|
|
||||||
|
class TestNixProfileInspector(unittest.TestCase):
|
||||||
|
def test_list_json_accepts_raw_string(self) -> None:
|
||||||
|
payload = {"elements": {"pkgmgr-1": {"attrPath": "packages.x86_64-linux.pkgmgr"}}}
|
||||||
|
raw = json.dumps(payload)
|
||||||
|
runner = FakeRunner(default=raw)
|
||||||
|
insp = NixProfileInspector()
|
||||||
|
data = insp.list_json(ctx=None, runner=runner)
|
||||||
|
self.assertEqual(data["elements"]["pkgmgr-1"]["attrPath"], "packages.x86_64-linux.pkgmgr")
|
||||||
|
|
||||||
|
def test_list_json_accepts_result_object(self) -> None:
|
||||||
|
payload = {"elements": {"pkgmgr-1": {"attrPath": "packages.x86_64-linux.pkgmgr"}}}
|
||||||
|
raw = json.dumps(payload)
|
||||||
|
runner = FakeRunner(default=FakeRunResult(0, stdout=raw))
|
||||||
|
insp = NixProfileInspector()
|
||||||
|
data = insp.list_json(ctx=None, runner=runner)
|
||||||
|
self.assertEqual(data["elements"]["pkgmgr-1"]["attrPath"], "packages.x86_64-linux.pkgmgr")
|
||||||
|
|
||||||
|
def test_find_remove_tokens_for_output_includes_output_first(self) -> None:
|
||||||
|
payload = {
|
||||||
|
"elements": {
|
||||||
|
"pkgmgr-1": {"name": "pkgmgr-1", "attrPath": "packages.x86_64-linux.pkgmgr"},
|
||||||
|
"default-1": {"name": "default-1", "attrPath": "packages.x86_64-linux.default"},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
raw = json.dumps(payload)
|
||||||
|
runner = FakeRunner(default=FakeRunResult(0, stdout=raw))
|
||||||
|
insp = NixProfileInspector()
|
||||||
|
tokens = insp.find_remove_tokens_for_output(ctx=None, runner=runner, output="pkgmgr")
|
||||||
|
self.assertEqual(tokens[0], "pkgmgr")
|
||||||
|
self.assertIn("pkgmgr-1", tokens)
|
||||||
|
|
||||||
|
def test_find_remove_tokens_for_store_prefixes(self) -> None:
|
||||||
|
payload = {
|
||||||
|
"elements": {
|
||||||
|
"pkgmgr-1": {
|
||||||
|
"name": "pkgmgr-1",
|
||||||
|
"attrPath": "packages.x86_64-linux.pkgmgr",
|
||||||
|
"storePaths": ["/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pkgmgr"],
|
||||||
|
},
|
||||||
|
"something": {
|
||||||
|
"name": "other",
|
||||||
|
"attrPath": "packages.x86_64-linux.other",
|
||||||
|
"storePaths": ["/nix/store/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb-other"],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
raw = json.dumps(payload)
|
||||||
|
runner = FakeRunner(default=FakeRunResult(0, stdout=raw))
|
||||||
|
insp = NixProfileInspector()
|
||||||
|
tokens = insp.find_remove_tokens_for_store_prefixes(
|
||||||
|
ctx=None, runner=runner, prefixes=["/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pkgmgr"]
|
||||||
|
)
|
||||||
|
self.assertIn("pkgmgr-1", tokens)
|
||||||
@@ -0,0 +1,88 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
from unittest.mock import MagicMock
|
||||||
|
|
||||||
|
from pkgmgr.actions.install.installers.nix.installer import NixFlakeInstaller
|
||||||
|
from ._fakes import FakeRunResult
|
||||||
|
|
||||||
|
|
||||||
|
class DummyCtx:
|
||||||
|
def __init__(self, identifier: str = "x", repo_dir: str = "/repo", quiet: bool = True, force_update: bool = False):
|
||||||
|
self.identifier = identifier
|
||||||
|
self.repo_dir = repo_dir
|
||||||
|
self.quiet = quiet
|
||||||
|
self.force_update = force_update
|
||||||
|
|
||||||
|
|
||||||
|
class TestNixFlakeInstallerCore(unittest.TestCase):
|
||||||
|
def test_install_only_success_returns(self) -> None:
|
||||||
|
ins = NixFlakeInstaller()
|
||||||
|
ins.supports = MagicMock(return_value=True)
|
||||||
|
|
||||||
|
ins._retry = MagicMock()
|
||||||
|
ins._retry.run_with_retry.return_value = FakeRunResult(0, "", "")
|
||||||
|
ins._conflicts = MagicMock()
|
||||||
|
ins._profile = MagicMock()
|
||||||
|
ins._runner = MagicMock()
|
||||||
|
|
||||||
|
ctx = DummyCtx(identifier="lib", repo_dir="/repo", quiet=True)
|
||||||
|
ins.run(ctx)
|
||||||
|
ins._retry.run_with_retry.assert_called()
|
||||||
|
|
||||||
|
def test_conflict_resolver_success_short_circuits(self) -> None:
|
||||||
|
ins = NixFlakeInstaller()
|
||||||
|
ins.supports = MagicMock(return_value=True)
|
||||||
|
|
||||||
|
ins._retry = MagicMock()
|
||||||
|
ins._retry.run_with_retry.return_value = FakeRunResult(1, "out", "err")
|
||||||
|
ins._conflicts = MagicMock()
|
||||||
|
ins._conflicts.resolve.return_value = True
|
||||||
|
ins._profile = MagicMock()
|
||||||
|
ins._runner = MagicMock()
|
||||||
|
|
||||||
|
ctx = DummyCtx(identifier="lib", repo_dir="/repo", quiet=True)
|
||||||
|
ins.run(ctx)
|
||||||
|
ins._conflicts.resolve.assert_called()
|
||||||
|
|
||||||
|
def test_mandatory_failure_raises_systemexit(self) -> None:
|
||||||
|
ins = NixFlakeInstaller()
|
||||||
|
ins.supports = MagicMock(return_value=True)
|
||||||
|
|
||||||
|
ins._retry = MagicMock()
|
||||||
|
ins._retry.run_with_retry.return_value = FakeRunResult(2, "", "no")
|
||||||
|
ins._conflicts = MagicMock()
|
||||||
|
ins._conflicts.resolve.return_value = False
|
||||||
|
ins._profile = MagicMock()
|
||||||
|
ins._profile.find_installed_indices_for_output.return_value = []
|
||||||
|
ins._runner = MagicMock()
|
||||||
|
ins._runner.run.return_value = FakeRunResult(2, "", "")
|
||||||
|
|
||||||
|
ctx = DummyCtx(identifier="lib", repo_dir="/repo", quiet=True)
|
||||||
|
with self.assertRaises(SystemExit) as cm:
|
||||||
|
ins.run(ctx)
|
||||||
|
self.assertEqual(cm.exception.code, 2)
|
||||||
|
|
||||||
|
def test_optional_failure_does_not_raise(self) -> None:
|
||||||
|
ins = NixFlakeInstaller()
|
||||||
|
ins.supports = MagicMock(return_value=True)
|
||||||
|
|
||||||
|
results = [
|
||||||
|
FakeRunResult(0, "", ""),
|
||||||
|
FakeRunResult(2, "", ""),
|
||||||
|
]
|
||||||
|
|
||||||
|
def run_with_retry(ctx, runner, cmd):
|
||||||
|
return results.pop(0)
|
||||||
|
|
||||||
|
ins._retry = MagicMock()
|
||||||
|
ins._retry.run_with_retry.side_effect = run_with_retry
|
||||||
|
ins._conflicts = MagicMock()
|
||||||
|
ins._conflicts.resolve.return_value = False
|
||||||
|
ins._profile = MagicMock()
|
||||||
|
ins._profile.find_installed_indices_for_output.return_value = []
|
||||||
|
ins._runner = MagicMock()
|
||||||
|
ins._runner.run.return_value = FakeRunResult(2, "", "")
|
||||||
|
|
||||||
|
ctx = DummyCtx(identifier="pkgmgr", repo_dir="/repo", quiet=True)
|
||||||
|
ins.run(ctx) # must not raise
|
||||||
136
tests/unit/pkgmgr/actions/install/installers/nix/test_legacy.py
Normal file
136
tests/unit/pkgmgr/actions/install/installers/nix/test_legacy.py
Normal file
@@ -0,0 +1,136 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
"""
|
||||||
|
Unit tests for NixFlakeInstaller using unittest (no pytest).
|
||||||
|
|
||||||
|
Covers:
|
||||||
|
- Successful installation (returncode == 0)
|
||||||
|
- Mandatory failure → SystemExit with correct code
|
||||||
|
- Optional failure (pkgmgr default) → no raise, but warning
|
||||||
|
- supports() behavior incl. PKGMGR_DISABLE_NIX_FLAKE_INSTALLER
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import io
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
import tempfile
|
||||||
|
import unittest
|
||||||
|
from contextlib import redirect_stdout
|
||||||
|
from typing import List
|
||||||
|
from unittest.mock import patch
|
||||||
|
|
||||||
|
from pkgmgr.actions.install.installers.nix import NixFlakeInstaller
|
||||||
|
|
||||||
|
|
||||||
|
class DummyCtx:
|
||||||
|
"""Minimal context object to satisfy NixFlakeInstaller.run() / supports()."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
identifier: str,
|
||||||
|
repo_dir: str,
|
||||||
|
preview: bool = False,
|
||||||
|
quiet: bool = False,
|
||||||
|
force_update: bool = False,
|
||||||
|
):
|
||||||
|
self.identifier = identifier
|
||||||
|
self.repo_dir = repo_dir
|
||||||
|
self.preview = preview
|
||||||
|
self.quiet = quiet
|
||||||
|
self.force_update = force_update
|
||||||
|
|
||||||
|
|
||||||
|
class TestNixFlakeInstaller(unittest.TestCase):
|
||||||
|
def setUp(self) -> None:
|
||||||
|
# Create a temporary repository directory with a flake.nix file
|
||||||
|
self._tmpdir = tempfile.mkdtemp(prefix="nix_flake_test_")
|
||||||
|
self.repo_dir = self._tmpdir
|
||||||
|
flake_path = os.path.join(self.repo_dir, "flake.nix")
|
||||||
|
with open(flake_path, "w", encoding="utf-8") as f:
|
||||||
|
f.write("{}\n")
|
||||||
|
|
||||||
|
# Ensure the disable env var is not set by default
|
||||||
|
os.environ.pop("PKGMGR_DISABLE_NIX_FLAKE_INSTALLER", None)
|
||||||
|
|
||||||
|
def tearDown(self) -> None:
|
||||||
|
if os.path.isdir(self._tmpdir):
|
||||||
|
shutil.rmtree(self._tmpdir, ignore_errors=True)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _cp(code: int, stdout: str = "", stderr: str = "") -> subprocess.CompletedProcess:
|
||||||
|
return subprocess.CompletedProcess(args=["nix"], returncode=code, stdout=stdout, stderr=stderr)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _enable_nix_in_module(which_patch) -> None:
|
||||||
|
"""Ensure shutil.which('nix') in nix installer module returns a path."""
|
||||||
|
which_patch.return_value = "/usr/bin/nix"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _install_cmds_from_calls(call_args_list) -> List[str]:
|
||||||
|
cmds: List[str] = []
|
||||||
|
for c in call_args_list:
|
||||||
|
if not c.args:
|
||||||
|
continue
|
||||||
|
cmd = c.args[0]
|
||||||
|
if isinstance(cmd, str) and cmd.startswith("nix profile install "):
|
||||||
|
cmds.append(cmd)
|
||||||
|
return cmds
|
||||||
|
|
||||||
|
def test_nix_flake_run_success(self) -> None:
|
||||||
|
"""
|
||||||
|
When install returns success (returncode 0), installer
|
||||||
|
should report success and not raise.
|
||||||
|
"""
|
||||||
|
ctx = DummyCtx(identifier="some-lib", repo_dir=self.repo_dir)
|
||||||
|
installer = NixFlakeInstaller()
|
||||||
|
|
||||||
|
install_results = [self._cp(0)] # first install succeeds
|
||||||
|
|
||||||
|
def fake_subprocess_run(cmd, *args, **kwargs):
|
||||||
|
# cmd is a string because CommandRunner uses shell=True
|
||||||
|
if isinstance(cmd, str) and cmd.startswith("nix profile list --json"):
|
||||||
|
return self._cp(0, stdout='{"elements": []}', stderr="")
|
||||||
|
if isinstance(cmd, str) and cmd.startswith("nix profile install "):
|
||||||
|
return install_results.pop(0)
|
||||||
|
return self._cp(0)
|
||||||
|
|
||||||
|
buf = io.StringIO()
|
||||||
|
with patch("pkgmgr.actions.install.installers.nix.installer.shutil.which") as which_mock, patch(
|
||||||
|
"pkgmgr.actions.install.installers.nix.installer.os.path.exists", return_value=True
|
||||||
|
), patch(
|
||||||
|
"pkgmgr.actions.install.installers.nix.runner.subprocess.run", side_effect=fake_subprocess_run
|
||||||
|
) as subproc_mock, redirect_stdout(buf):
|
||||||
|
self._enable_nix_in_module(which_mock)
|
||||||
|
|
||||||
|
self.assertTrue(installer.supports(ctx))
|
||||||
|
installer.run(ctx)
|
||||||
|
|
||||||
|
out = buf.getvalue()
|
||||||
|
self.assertIn("[nix] install: nix profile install", out)
|
||||||
|
self.assertIn("[nix] output 'default' successfully installed.", out)
|
||||||
|
|
||||||
|
install_cmds = self._install_cmds_from_calls(subproc_mock.call_args_list)
|
||||||
|
self.assertEqual(install_cmds, [f"nix profile install {self.repo_dir}#default"])
|
||||||
|
|
||||||
|
def test_nix_flake_supports_respects_disable_env(self) -> None:
|
||||||
|
"""
|
||||||
|
PKGMGR_DISABLE_NIX_FLAKE_INSTALLER=1 must disable the installer,
|
||||||
|
even if flake.nix exists and nix is available.
|
||||||
|
"""
|
||||||
|
ctx = DummyCtx(identifier="pkgmgr", repo_dir=self.repo_dir, quiet=False)
|
||||||
|
installer = NixFlakeInstaller()
|
||||||
|
|
||||||
|
with patch("pkgmgr.actions.install.installers.nix.installer.shutil.which") as which_mock, patch(
|
||||||
|
"pkgmgr.actions.install.installers.nix.installer.os.path.exists", return_value=True
|
||||||
|
):
|
||||||
|
self._enable_nix_in_module(which_mock)
|
||||||
|
os.environ["PKGMGR_DISABLE_NIX_FLAKE_INSTALLER"] = "1"
|
||||||
|
self.assertFalse(installer.supports(ctx))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
unittest.main()
|
||||||
@@ -0,0 +1,37 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from pkgmgr.actions.install.installers.nix.profile.models import NixProfileEntry
|
||||||
|
from pkgmgr.actions.install.installers.nix.profile.matcher import entry_matches_output, entry_matches_store_path
|
||||||
|
|
||||||
|
|
||||||
|
class TestMatcher(unittest.TestCase):
|
||||||
|
def _e(self, name: str, attr: str) -> NixProfileEntry:
|
||||||
|
return NixProfileEntry(
|
||||||
|
key="pkgmgr-1",
|
||||||
|
index=None,
|
||||||
|
name=name,
|
||||||
|
attr_path=attr,
|
||||||
|
store_paths=["/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pkgmgr"],
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_matches_direct_name(self) -> None:
|
||||||
|
self.assertTrue(entry_matches_output(self._e("pkgmgr", ""), "pkgmgr"))
|
||||||
|
|
||||||
|
def test_matches_attrpath_hash(self) -> None:
|
||||||
|
self.assertTrue(entry_matches_output(self._e("", "github:me/repo#pkgmgr"), "pkgmgr"))
|
||||||
|
|
||||||
|
def test_matches_attrpath_dot_suffix(self) -> None:
|
||||||
|
self.assertTrue(entry_matches_output(self._e("", "packages.x86_64-linux.pkgmgr"), "pkgmgr"))
|
||||||
|
|
||||||
|
def test_matches_name_with_suffix_number(self) -> None:
|
||||||
|
self.assertTrue(entry_matches_output(self._e("pkgmgr-1", ""), "pkgmgr"))
|
||||||
|
|
||||||
|
def test_package_manager_special_case(self) -> None:
|
||||||
|
self.assertTrue(entry_matches_output(self._e("package-manager-2", ""), "pkgmgr"))
|
||||||
|
|
||||||
|
def test_store_path_match(self) -> None:
|
||||||
|
entry = self._e("pkgmgr-1", "")
|
||||||
|
self.assertTrue(entry_matches_store_path(entry, "/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pkgmgr"))
|
||||||
|
self.assertFalse(entry_matches_store_path(entry, "/nix/store/cccccccccccccccccccccccccccccccc-zzz"))
|
||||||
@@ -0,0 +1,106 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
from unittest.mock import patch
|
||||||
|
|
||||||
|
from pkgmgr.actions.install.installers.nix.retry import GitHubRateLimitRetry, RetryPolicy
|
||||||
|
from pkgmgr.actions.install.installers.nix.types import RunResult
|
||||||
|
|
||||||
|
|
||||||
|
class DummyCtx:
|
||||||
|
def __init__(self, quiet: bool = True) -> None:
|
||||||
|
self.quiet = quiet
|
||||||
|
|
||||||
|
|
||||||
|
class FakeRunner:
|
||||||
|
"""
|
||||||
|
Simulates a runner that returns:
|
||||||
|
- HTTP 403 for the first N calls
|
||||||
|
- success afterwards
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, fail_count: int) -> None:
|
||||||
|
self.fail_count = fail_count
|
||||||
|
self.calls = 0
|
||||||
|
|
||||||
|
def run(self, ctx: DummyCtx, cmd: str, allow_failure: bool) -> RunResult:
|
||||||
|
self.calls += 1
|
||||||
|
|
||||||
|
if self.calls <= self.fail_count:
|
||||||
|
return RunResult(
|
||||||
|
returncode=1,
|
||||||
|
stdout="",
|
||||||
|
stderr="error: HTTP error 403: rate limit exceeded (simulated)",
|
||||||
|
)
|
||||||
|
|
||||||
|
return RunResult(returncode=0, stdout="ok", stderr="")
|
||||||
|
|
||||||
|
|
||||||
|
class TestGitHub403Retry(unittest.TestCase):
|
||||||
|
def test_retries_on_403_without_realtime_waiting(self) -> None:
|
||||||
|
"""
|
||||||
|
Ensure:
|
||||||
|
- It retries only on GitHub 403-like errors
|
||||||
|
- It does not actually sleep in realtime (time.sleep patched)
|
||||||
|
- It stops once a success occurs
|
||||||
|
- Wait times follow Fibonacci(base=30) + jitter
|
||||||
|
"""
|
||||||
|
policy = RetryPolicy(
|
||||||
|
max_attempts=3, # attempts: 1,2,3
|
||||||
|
base_delay_seconds=30, # fibonacci delays: 30, 30, 60
|
||||||
|
jitter_seconds_min=0,
|
||||||
|
jitter_seconds_max=60,
|
||||||
|
)
|
||||||
|
|
||||||
|
retry = GitHubRateLimitRetry(policy=policy)
|
||||||
|
ctx = DummyCtx(quiet=True)
|
||||||
|
runner = FakeRunner(fail_count=2) # fail twice (403), then succeed
|
||||||
|
|
||||||
|
# Make jitter deterministic and prevent real sleeping.
|
||||||
|
with patch("pkgmgr.actions.install.installers.nix.retry.random.randint", return_value=5) as jitter_mock, patch(
|
||||||
|
"pkgmgr.actions.install.installers.nix.retry.time.sleep"
|
||||||
|
) as sleep_mock:
|
||||||
|
res = retry.run_with_retry(ctx, runner, "nix profile install /tmp#default")
|
||||||
|
|
||||||
|
# Result should be success on 3rd attempt.
|
||||||
|
self.assertEqual(res.returncode, 0)
|
||||||
|
self.assertEqual(runner.calls, 3)
|
||||||
|
|
||||||
|
# jitter should be used for each retry sleep (attempt 1->2, attempt 2->3) => 2 sleeps
|
||||||
|
self.assertEqual(jitter_mock.call_count, 2)
|
||||||
|
self.assertEqual(sleep_mock.call_count, 2)
|
||||||
|
|
||||||
|
# Fibonacci delays for attempts=3: [30, 30, 60]
|
||||||
|
# sleep occurs after failed attempt 1 and 2, so base delays are 30 and 30
|
||||||
|
# wait_time = base_delay + jitter(5) => 35, 35
|
||||||
|
sleep_args = [c.args[0] for c in sleep_mock.call_args_list]
|
||||||
|
self.assertEqual(sleep_args, [35, 35])
|
||||||
|
|
||||||
|
def test_does_not_retry_on_non_403_errors(self) -> None:
|
||||||
|
"""
|
||||||
|
Ensure it does not retry when the error is not recognized as GitHub 403/rate limit.
|
||||||
|
"""
|
||||||
|
policy = RetryPolicy(max_attempts=7, base_delay_seconds=30)
|
||||||
|
retry = GitHubRateLimitRetry(policy=policy)
|
||||||
|
ctx = DummyCtx(quiet=True)
|
||||||
|
|
||||||
|
class Non403Runner:
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self.calls = 0
|
||||||
|
|
||||||
|
def run(self, ctx: DummyCtx, cmd: str, allow_failure: bool) -> RunResult:
|
||||||
|
self.calls += 1
|
||||||
|
return RunResult(returncode=1, stdout="", stderr="some other error (simulated)")
|
||||||
|
|
||||||
|
runner = Non403Runner()
|
||||||
|
|
||||||
|
with patch("pkgmgr.actions.install.installers.nix.retry.time.sleep") as sleep_mock:
|
||||||
|
res = retry.run_with_retry(ctx, runner, "nix profile install /tmp#default")
|
||||||
|
|
||||||
|
self.assertEqual(res.returncode, 1)
|
||||||
|
self.assertEqual(runner.calls, 1) # no retries
|
||||||
|
self.assertEqual(sleep_mock.call_count, 0)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
unittest.main()
|
||||||
@@ -0,0 +1,39 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from pkgmgr.actions.install.installers.nix.profile.normalizer import coerce_index, normalize_elements
|
||||||
|
|
||||||
|
|
||||||
|
class TestNormalizer(unittest.TestCase):
|
||||||
|
def test_coerce_index_numeric_key(self) -> None:
|
||||||
|
self.assertEqual(coerce_index("3", {"name": "x"}), 3)
|
||||||
|
|
||||||
|
def test_coerce_index_explicit_field(self) -> None:
|
||||||
|
self.assertEqual(coerce_index("pkgmgr-1", {"index": 7}), 7)
|
||||||
|
self.assertEqual(coerce_index("pkgmgr-1", {"id": "8"}), 8)
|
||||||
|
|
||||||
|
def test_coerce_index_trailing_number(self) -> None:
|
||||||
|
self.assertEqual(coerce_index("pkgmgr-42", {"name": "x"}), 42)
|
||||||
|
|
||||||
|
def test_normalize_elements_handles_missing_elements(self) -> None:
|
||||||
|
self.assertEqual(normalize_elements({}), [])
|
||||||
|
|
||||||
|
def test_normalize_elements_collects_store_paths(self) -> None:
|
||||||
|
data = {
|
||||||
|
"elements": {
|
||||||
|
"pkgmgr-1": {
|
||||||
|
"name": "pkgmgr-1",
|
||||||
|
"attrPath": "packages.x86_64-linux.pkgmgr",
|
||||||
|
"storePaths": ["/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pkgmgr"],
|
||||||
|
},
|
||||||
|
"2": {
|
||||||
|
"name": "foo",
|
||||||
|
"attrPath": "packages.x86_64-linux.default",
|
||||||
|
"storePath": "/nix/store/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb-foo",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
entries = normalize_elements(data)
|
||||||
|
self.assertEqual(len(entries), 2)
|
||||||
|
self.assertTrue(entries[0].store_paths)
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from pkgmgr.actions.install.installers.nix.profile.parser import parse_profile_list_json
|
||||||
|
|
||||||
|
|
||||||
|
class TestParseProfileListJson(unittest.TestCase):
|
||||||
|
def test_parses_valid_json(self) -> None:
|
||||||
|
payload = {"elements": {"0": {"name": "pkgmgr"}}}
|
||||||
|
raw = json.dumps(payload)
|
||||||
|
self.assertEqual(parse_profile_list_json(raw)["elements"]["0"]["name"], "pkgmgr")
|
||||||
|
|
||||||
|
def test_raises_systemexit_on_invalid_json(self) -> None:
|
||||||
|
with self.assertRaises(SystemExit) as cm:
|
||||||
|
parse_profile_list_json("{not json")
|
||||||
|
self.assertIn("Failed to parse", str(cm.exception))
|
||||||
@@ -0,0 +1,29 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from pkgmgr.actions.install.installers.nix.profile_list import NixProfileListReader
|
||||||
|
from ._fakes import FakeRunResult, FakeRunner
|
||||||
|
|
||||||
|
|
||||||
|
class TestNixProfileListReader(unittest.TestCase):
|
||||||
|
def test_entries_parses_indices_and_store_prefixes(self) -> None:
|
||||||
|
out = '''
|
||||||
|
0 something /nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pkgmgr
|
||||||
|
1 something /nix/store/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb-foo
|
||||||
|
'''
|
||||||
|
runner = FakeRunner(default=FakeRunResult(0, stdout=out))
|
||||||
|
reader = NixProfileListReader(runner=runner)
|
||||||
|
entries = reader.entries(ctx=None)
|
||||||
|
self.assertEqual(entries[0][0], 0)
|
||||||
|
self.assertTrue(entries[0][1].startswith("/nix/store/"))
|
||||||
|
|
||||||
|
def test_indices_matching_store_prefixes(self) -> None:
|
||||||
|
out = " 7 x /nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pkgmgr\n"
|
||||||
|
runner = FakeRunner(default=FakeRunResult(0, stdout=out))
|
||||||
|
reader = NixProfileListReader(runner=runner)
|
||||||
|
hits = reader.indices_matching_store_prefixes(
|
||||||
|
ctx=None,
|
||||||
|
prefixes=["/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pkgmgr"],
|
||||||
|
)
|
||||||
|
self.assertEqual(hits, [7])
|
||||||
@@ -0,0 +1,29 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from pkgmgr.actions.install.installers.nix.profile.result import extract_stdout_text
|
||||||
|
|
||||||
|
|
||||||
|
class TestExtractStdoutText(unittest.TestCase):
|
||||||
|
def test_accepts_string(self) -> None:
|
||||||
|
self.assertEqual(extract_stdout_text("hello"), "hello")
|
||||||
|
|
||||||
|
def test_accepts_bytes(self) -> None:
|
||||||
|
self.assertEqual(extract_stdout_text(b"hi"), "hi")
|
||||||
|
|
||||||
|
def test_accepts_object_with_stdout_str(self) -> None:
|
||||||
|
class R:
|
||||||
|
stdout = "ok"
|
||||||
|
self.assertEqual(extract_stdout_text(R()), "ok")
|
||||||
|
|
||||||
|
def test_accepts_object_with_stdout_bytes(self) -> None:
|
||||||
|
class R:
|
||||||
|
stdout = b"ok"
|
||||||
|
self.assertEqual(extract_stdout_text(R()), "ok")
|
||||||
|
|
||||||
|
def test_fallback_str(self) -> None:
|
||||||
|
class R:
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return "repr"
|
||||||
|
self.assertEqual(extract_stdout_text(R()), "repr")
|
||||||
@@ -0,0 +1,30 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from pkgmgr.actions.install.installers.nix.textparse import NixConflictTextParser
|
||||||
|
|
||||||
|
|
||||||
|
class TestNixConflictTextParser(unittest.TestCase):
|
||||||
|
def test_remove_tokens_parses_unquoted_and_quoted(self) -> None:
|
||||||
|
t = NixConflictTextParser()
|
||||||
|
text = '''
|
||||||
|
nix profile remove pkgmgr
|
||||||
|
nix profile remove 'pkgmgr-1'
|
||||||
|
nix profile remove "default-2"
|
||||||
|
'''
|
||||||
|
tokens = t.remove_tokens(text)
|
||||||
|
self.assertEqual(tokens, ["pkgmgr", "pkgmgr-1", "default-2"])
|
||||||
|
|
||||||
|
def test_existing_store_prefixes_extracts_existing_section_only(self) -> None:
|
||||||
|
t = NixConflictTextParser()
|
||||||
|
text = '''
|
||||||
|
error: An existing package already provides the following file:
|
||||||
|
/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pkgmgr/bin/pkgmgr
|
||||||
|
/nix/store/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb-pkgmgr/share/doc
|
||||||
|
This is the conflicting file from the new package:
|
||||||
|
/nix/store/cccccccccccccccccccccccccccccccc-pkgmgr/bin/pkgmgr
|
||||||
|
'''
|
||||||
|
prefixes = t.existing_store_prefixes(text)
|
||||||
|
self.assertEqual(len(prefixes), 2)
|
||||||
|
self.assertTrue(prefixes[0].startswith("/nix/store/"))
|
||||||
@@ -1,219 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
"""
|
|
||||||
Unit tests for NixFlakeInstaller using unittest (no pytest).
|
|
||||||
|
|
||||||
Covers:
|
|
||||||
- Successful installation (returncode == 0)
|
|
||||||
- Mandatory failure → SystemExit with correct code
|
|
||||||
- Optional failure (pkgmgr default) → no raise, but warning
|
|
||||||
- supports() behavior incl. PKGMGR_DISABLE_NIX_FLAKE_INSTALLER
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import io
|
|
||||||
import os
|
|
||||||
import shutil
|
|
||||||
import subprocess
|
|
||||||
import tempfile
|
|
||||||
import unittest
|
|
||||||
from contextlib import redirect_stdout
|
|
||||||
from unittest.mock import patch
|
|
||||||
|
|
||||||
from pkgmgr.actions.install.installers.nix_flake import NixFlakeInstaller
|
|
||||||
|
|
||||||
|
|
||||||
class DummyCtx:
|
|
||||||
"""Minimal context object to satisfy NixFlakeInstaller.run() / supports()."""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
identifier: str,
|
|
||||||
repo_dir: str,
|
|
||||||
preview: bool = False,
|
|
||||||
quiet: bool = False,
|
|
||||||
force_update: bool = False,
|
|
||||||
):
|
|
||||||
self.identifier = identifier
|
|
||||||
self.repo_dir = repo_dir
|
|
||||||
self.preview = preview
|
|
||||||
self.quiet = quiet
|
|
||||||
self.force_update = force_update
|
|
||||||
|
|
||||||
|
|
||||||
class TestNixFlakeInstaller(unittest.TestCase):
|
|
||||||
def setUp(self) -> None:
|
|
||||||
# Create a temporary repository directory with a flake.nix file
|
|
||||||
self._tmpdir = tempfile.mkdtemp(prefix="nix_flake_test_")
|
|
||||||
self.repo_dir = self._tmpdir
|
|
||||||
flake_path = os.path.join(self.repo_dir, "flake.nix")
|
|
||||||
with open(flake_path, "w", encoding="utf-8") as f:
|
|
||||||
f.write("{}\n")
|
|
||||||
|
|
||||||
# Ensure the disable env var is not set by default
|
|
||||||
os.environ.pop("PKGMGR_DISABLE_NIX_FLAKE_INSTALLER", None)
|
|
||||||
|
|
||||||
def tearDown(self) -> None:
|
|
||||||
if os.path.isdir(self._tmpdir):
|
|
||||||
shutil.rmtree(self._tmpdir, ignore_errors=True)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _cp(code: int) -> subprocess.CompletedProcess:
|
|
||||||
# stdout/stderr are irrelevant here, but keep shape realistic
|
|
||||||
return subprocess.CompletedProcess(args=["nix"], returncode=code, stdout="", stderr="")
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _enable_nix_in_module(which_patch) -> None:
|
|
||||||
"""Ensure shutil.which('nix') in nix_flake module returns a path."""
|
|
||||||
which_patch.return_value = "/usr/bin/nix"
|
|
||||||
|
|
||||||
def test_nix_flake_run_success(self) -> None:
|
|
||||||
"""
|
|
||||||
When run_command returns success (returncode 0), installer
|
|
||||||
should report success and not raise.
|
|
||||||
"""
|
|
||||||
ctx = DummyCtx(identifier="some-lib", repo_dir=self.repo_dir)
|
|
||||||
installer = NixFlakeInstaller()
|
|
||||||
|
|
||||||
buf = io.StringIO()
|
|
||||||
with patch("pkgmgr.actions.install.installers.nix_flake.shutil.which") as which_mock, patch(
|
|
||||||
"pkgmgr.actions.install.installers.nix_flake.subprocess.run"
|
|
||||||
) as subproc_mock, patch(
|
|
||||||
"pkgmgr.actions.install.installers.nix_flake.run_command"
|
|
||||||
) as run_cmd_mock, redirect_stdout(buf):
|
|
||||||
self._enable_nix_in_module(which_mock)
|
|
||||||
|
|
||||||
# For profile list JSON (used only on failure paths, but keep deterministic)
|
|
||||||
subproc_mock.return_value = subprocess.CompletedProcess(
|
|
||||||
args=["nix", "profile", "list", "--json"],
|
|
||||||
returncode=0,
|
|
||||||
stdout='{"elements": []}',
|
|
||||||
stderr="",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Install succeeds
|
|
||||||
run_cmd_mock.return_value = self._cp(0)
|
|
||||||
|
|
||||||
self.assertTrue(installer.supports(ctx))
|
|
||||||
installer.run(ctx)
|
|
||||||
|
|
||||||
out = buf.getvalue()
|
|
||||||
self.assertIn("[nix] install: nix profile install", out)
|
|
||||||
self.assertIn("[nix] output 'default' successfully installed.", out)
|
|
||||||
|
|
||||||
run_cmd_mock.assert_called_with(
|
|
||||||
f"nix profile install {self.repo_dir}#default",
|
|
||||||
cwd=self.repo_dir,
|
|
||||||
preview=False,
|
|
||||||
allow_failure=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_nix_flake_run_mandatory_failure_raises(self) -> None:
|
|
||||||
"""
|
|
||||||
For a generic repository, 'default' is mandatory.
|
|
||||||
A non-zero return code must raise SystemExit with that code.
|
|
||||||
"""
|
|
||||||
ctx = DummyCtx(identifier="some-lib", repo_dir=self.repo_dir)
|
|
||||||
installer = NixFlakeInstaller()
|
|
||||||
|
|
||||||
buf = io.StringIO()
|
|
||||||
with patch("pkgmgr.actions.install.installers.nix_flake.shutil.which") as which_mock, patch(
|
|
||||||
"pkgmgr.actions.install.installers.nix_flake.subprocess.run"
|
|
||||||
) as subproc_mock, patch(
|
|
||||||
"pkgmgr.actions.install.installers.nix_flake.run_command"
|
|
||||||
) as run_cmd_mock, redirect_stdout(buf):
|
|
||||||
self._enable_nix_in_module(which_mock)
|
|
||||||
|
|
||||||
# No indices available (empty list)
|
|
||||||
subproc_mock.return_value = subprocess.CompletedProcess(
|
|
||||||
args=["nix", "profile", "list", "--json"],
|
|
||||||
returncode=0,
|
|
||||||
stdout='{"elements": []}',
|
|
||||||
stderr="",
|
|
||||||
)
|
|
||||||
|
|
||||||
# First install fails, retry fails -> should raise SystemExit(1)
|
|
||||||
run_cmd_mock.side_effect = [self._cp(1), self._cp(1)]
|
|
||||||
|
|
||||||
self.assertTrue(installer.supports(ctx))
|
|
||||||
with self.assertRaises(SystemExit) as cm:
|
|
||||||
installer.run(ctx)
|
|
||||||
|
|
||||||
self.assertEqual(cm.exception.code, 1)
|
|
||||||
out = buf.getvalue()
|
|
||||||
self.assertIn("[nix] install: nix profile install", out)
|
|
||||||
self.assertIn("[ERROR] Failed to install Nix flake output 'default' (exit 1)", out)
|
|
||||||
|
|
||||||
def test_nix_flake_run_optional_failure_does_not_raise(self) -> None:
|
|
||||||
"""
|
|
||||||
For pkgmgr/package-manager repositories:
|
|
||||||
- 'pkgmgr' output is mandatory
|
|
||||||
- 'default' output is optional
|
|
||||||
Failure of optional output must not raise.
|
|
||||||
"""
|
|
||||||
ctx = DummyCtx(identifier="pkgmgr", repo_dir=self.repo_dir)
|
|
||||||
installer = NixFlakeInstaller()
|
|
||||||
|
|
||||||
buf = io.StringIO()
|
|
||||||
with patch("pkgmgr.actions.install.installers.nix_flake.shutil.which") as which_mock, patch(
|
|
||||||
"pkgmgr.actions.install.installers.nix_flake.subprocess.run"
|
|
||||||
) as subproc_mock, patch(
|
|
||||||
"pkgmgr.actions.install.installers.nix_flake.run_command"
|
|
||||||
) as run_cmd_mock, redirect_stdout(buf):
|
|
||||||
self._enable_nix_in_module(which_mock)
|
|
||||||
|
|
||||||
# No indices available (empty list)
|
|
||||||
subproc_mock.return_value = subprocess.CompletedProcess(
|
|
||||||
args=["nix", "profile", "list", "--json"],
|
|
||||||
returncode=0,
|
|
||||||
stdout='{"elements": []}',
|
|
||||||
stderr="",
|
|
||||||
)
|
|
||||||
|
|
||||||
# pkgmgr install ok; default fails twice (initial + retry)
|
|
||||||
run_cmd_mock.side_effect = [self._cp(0), self._cp(1), self._cp(1)]
|
|
||||||
|
|
||||||
self.assertTrue(installer.supports(ctx))
|
|
||||||
|
|
||||||
# Must NOT raise despite optional failure
|
|
||||||
installer.run(ctx)
|
|
||||||
|
|
||||||
out = buf.getvalue()
|
|
||||||
|
|
||||||
# Should announce both outputs
|
|
||||||
self.assertIn("ensuring outputs: pkgmgr, default", out)
|
|
||||||
|
|
||||||
# First output ok
|
|
||||||
self.assertIn("[nix] output 'pkgmgr' successfully installed.", out)
|
|
||||||
|
|
||||||
# Second output failed but no raise
|
|
||||||
self.assertIn("[ERROR] Failed to install Nix flake output 'default' (exit 1)", out)
|
|
||||||
self.assertIn("[WARNING] Continuing despite failure of optional output 'default'.", out)
|
|
||||||
|
|
||||||
# Verify run_command was called for both outputs (default twice due to retry)
|
|
||||||
expected_calls = [
|
|
||||||
(f"nix profile install {self.repo_dir}#pkgmgr",),
|
|
||||||
(f"nix profile install {self.repo_dir}#default",),
|
|
||||||
(f"nix profile install {self.repo_dir}#default",),
|
|
||||||
]
|
|
||||||
actual_cmds = [c.args[0] for c in run_cmd_mock.call_args_list]
|
|
||||||
self.assertEqual(actual_cmds, [e[0] for e in expected_calls])
|
|
||||||
|
|
||||||
def test_nix_flake_supports_respects_disable_env(self) -> None:
|
|
||||||
"""
|
|
||||||
PKGMGR_DISABLE_NIX_FLAKE_INSTALLER=1 must disable the installer,
|
|
||||||
even if flake.nix exists and nix is available.
|
|
||||||
"""
|
|
||||||
ctx = DummyCtx(identifier="pkgmgr", repo_dir=self.repo_dir, quiet=False)
|
|
||||||
installer = NixFlakeInstaller()
|
|
||||||
|
|
||||||
with patch("pkgmgr.actions.install.installers.nix_flake.shutil.which") as which_mock:
|
|
||||||
self._enable_nix_in_module(which_mock)
|
|
||||||
os.environ["PKGMGR_DISABLE_NIX_FLAKE_INSTALLER"] = "1"
|
|
||||||
self.assertFalse(installer.supports(ctx))
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
unittest.main()
|
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
# Unit test package for pkgmgr.actions.mirror
|
||||||
|
|||||||
51
tests/unit/pkgmgr/actions/mirror/test_context.py
Normal file
51
tests/unit/pkgmgr/actions/mirror/test_context.py
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
from unittest.mock import patch
|
||||||
|
|
||||||
|
from pkgmgr.actions.mirror.context import build_context
|
||||||
|
|
||||||
|
|
||||||
|
class TestMirrorContext(unittest.TestCase):
|
||||||
|
"""
|
||||||
|
Unit tests for building RepoMirrorContext from repo + filesystem.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@patch("pkgmgr.actions.mirror.context.read_mirrors_file")
|
||||||
|
@patch("pkgmgr.actions.mirror.context.load_config_mirrors")
|
||||||
|
@patch("pkgmgr.actions.mirror.context.get_repo_dir")
|
||||||
|
@patch("pkgmgr.actions.mirror.context.get_repo_identifier")
|
||||||
|
def test_build_context_bundles_config_and_file_mirrors(
|
||||||
|
self,
|
||||||
|
mock_identifier,
|
||||||
|
mock_repo_dir,
|
||||||
|
mock_load_config,
|
||||||
|
mock_read_file,
|
||||||
|
) -> None:
|
||||||
|
mock_identifier.return_value = "id"
|
||||||
|
mock_repo_dir.return_value = "/tmp/repo"
|
||||||
|
mock_load_config.return_value = {"origin": "git@github.com:alice/repo.git"}
|
||||||
|
mock_read_file.return_value = {"backup": "ssh://git@backup/alice/repo.git"}
|
||||||
|
|
||||||
|
repo = {"provider": "github.com", "account": "alice", "repository": "repo"}
|
||||||
|
|
||||||
|
ctx = build_context(repo, repositories_base_dir="/base", all_repos=[repo])
|
||||||
|
|
||||||
|
self.assertEqual(ctx.identifier, "id")
|
||||||
|
self.assertEqual(ctx.repo_dir, "/tmp/repo")
|
||||||
|
self.assertEqual(ctx.config_mirrors, {"origin": "git@github.com:alice/repo.git"})
|
||||||
|
self.assertEqual(ctx.file_mirrors, {"backup": "ssh://git@backup/alice/repo.git"})
|
||||||
|
self.assertEqual(
|
||||||
|
ctx.resolved_mirrors,
|
||||||
|
{
|
||||||
|
"origin": "git@github.com:alice/repo.git",
|
||||||
|
"backup": "ssh://git@backup/alice/repo.git",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
unittest.main()
|
||||||
77
tests/unit/pkgmgr/actions/mirror/test_diff_cmd.py
Normal file
77
tests/unit/pkgmgr/actions/mirror/test_diff_cmd.py
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import io
|
||||||
|
import unittest
|
||||||
|
from contextlib import redirect_stdout
|
||||||
|
from unittest.mock import MagicMock, PropertyMock, patch
|
||||||
|
|
||||||
|
from pkgmgr.actions.mirror.diff_cmd import diff_mirrors
|
||||||
|
|
||||||
|
|
||||||
|
class TestDiffCmd(unittest.TestCase):
|
||||||
|
"""
|
||||||
|
Unit tests for mirror diff output.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@patch("pkgmgr.actions.mirror.diff_cmd.build_context")
|
||||||
|
def test_diff_mirrors_reports_only_in_config_and_only_in_file(self, mock_build_context) -> None:
|
||||||
|
ctx = MagicMock()
|
||||||
|
ctx.identifier = "id"
|
||||||
|
ctx.repo_dir = "/tmp/repo"
|
||||||
|
ctx.config_mirrors = {"origin": "a", "cfgonly": "b"}
|
||||||
|
ctx.file_mirrors = {"origin": "a", "fileonly": "c"}
|
||||||
|
type(ctx).resolved_mirrors = PropertyMock(
|
||||||
|
return_value={"origin": "a", "cfgonly": "b", "fileonly": "c"}
|
||||||
|
)
|
||||||
|
mock_build_context.return_value = ctx
|
||||||
|
|
||||||
|
buf = io.StringIO()
|
||||||
|
with redirect_stdout(buf):
|
||||||
|
diff_mirrors(selected_repos=[{}], repositories_base_dir="/base", all_repos=[])
|
||||||
|
|
||||||
|
out = buf.getvalue()
|
||||||
|
self.assertIn("[ONLY IN CONFIG] cfgonly: b", out)
|
||||||
|
self.assertIn("[ONLY IN FILE] fileonly: c", out)
|
||||||
|
|
||||||
|
@patch("pkgmgr.actions.mirror.diff_cmd.build_context")
|
||||||
|
def test_diff_mirrors_reports_url_mismatch(self, mock_build_context) -> None:
|
||||||
|
ctx = MagicMock()
|
||||||
|
ctx.identifier = "id"
|
||||||
|
ctx.repo_dir = "/tmp/repo"
|
||||||
|
ctx.config_mirrors = {"origin": "a"}
|
||||||
|
ctx.file_mirrors = {"origin": "different"}
|
||||||
|
type(ctx).resolved_mirrors = PropertyMock(return_value={"origin": "different"})
|
||||||
|
mock_build_context.return_value = ctx
|
||||||
|
|
||||||
|
buf = io.StringIO()
|
||||||
|
with redirect_stdout(buf):
|
||||||
|
diff_mirrors(selected_repos=[{}], repositories_base_dir="/base", all_repos=[])
|
||||||
|
|
||||||
|
out = buf.getvalue()
|
||||||
|
self.assertIn("[URL MISMATCH]", out)
|
||||||
|
self.assertIn("config: a", out)
|
||||||
|
self.assertIn("file: different", out)
|
||||||
|
|
||||||
|
@patch("pkgmgr.actions.mirror.diff_cmd.build_context")
|
||||||
|
def test_diff_mirrors_reports_in_sync(self, mock_build_context) -> None:
|
||||||
|
ctx = MagicMock()
|
||||||
|
ctx.identifier = "id"
|
||||||
|
ctx.repo_dir = "/tmp/repo"
|
||||||
|
ctx.config_mirrors = {"origin": "a"}
|
||||||
|
ctx.file_mirrors = {"origin": "a"}
|
||||||
|
type(ctx).resolved_mirrors = PropertyMock(return_value={"origin": "a"})
|
||||||
|
mock_build_context.return_value = ctx
|
||||||
|
|
||||||
|
buf = io.StringIO()
|
||||||
|
with redirect_stdout(buf):
|
||||||
|
diff_mirrors(selected_repos=[{}], repositories_base_dir="/base", all_repos=[])
|
||||||
|
|
||||||
|
out = buf.getvalue()
|
||||||
|
self.assertIn("[OK] Mirrors in config and MIRRORS file are in sync.", out)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
unittest.main()
|
||||||
@@ -4,10 +4,13 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import unittest
|
import unittest
|
||||||
|
from unittest.mock import patch
|
||||||
|
|
||||||
from pkgmgr.actions.mirror.git_remote import (
|
from pkgmgr.actions.mirror.git_remote import (
|
||||||
build_default_ssh_url,
|
build_default_ssh_url,
|
||||||
determine_primary_remote_url,
|
determine_primary_remote_url,
|
||||||
|
current_origin_url,
|
||||||
|
has_origin_remote,
|
||||||
)
|
)
|
||||||
from pkgmgr.actions.mirror.types import MirrorMap, Repository
|
from pkgmgr.actions.mirror.types import MirrorMap, Repository
|
||||||
|
|
||||||
@@ -25,10 +28,7 @@ class TestMirrorGitRemote(unittest.TestCase):
|
|||||||
}
|
}
|
||||||
|
|
||||||
url = build_default_ssh_url(repo)
|
url = build_default_ssh_url(repo)
|
||||||
self.assertEqual(
|
self.assertEqual(url, "git@github.com:kevinveenbirkenbach/package-manager.git")
|
||||||
url,
|
|
||||||
"git@github.com:kevinveenbirkenbach/package-manager.git",
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_build_default_ssh_url_with_port(self) -> None:
|
def test_build_default_ssh_url_with_port(self) -> None:
|
||||||
repo: Repository = {
|
repo: Repository = {
|
||||||
@@ -39,24 +39,18 @@ class TestMirrorGitRemote(unittest.TestCase):
|
|||||||
}
|
}
|
||||||
|
|
||||||
url = build_default_ssh_url(repo)
|
url = build_default_ssh_url(repo)
|
||||||
self.assertEqual(
|
self.assertEqual(url, "ssh://git@code.cymais.cloud:2201/kevinveenbirkenbach/pkgmgr.git")
|
||||||
url,
|
|
||||||
"ssh://git@code.cymais.cloud:2201/kevinveenbirkenbach/pkgmgr.git",
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_build_default_ssh_url_missing_fields_returns_none(self) -> None:
|
def test_build_default_ssh_url_missing_fields_returns_none(self) -> None:
|
||||||
repo: Repository = {
|
repo: Repository = {
|
||||||
"provider": "github.com",
|
"provider": "github.com",
|
||||||
"account": "kevinveenbirkenbach",
|
"account": "kevinveenbirkenbach",
|
||||||
# "repository" fehlt absichtlich
|
|
||||||
}
|
}
|
||||||
|
|
||||||
url = build_default_ssh_url(repo)
|
url = build_default_ssh_url(repo)
|
||||||
self.assertIsNone(url)
|
self.assertIsNone(url)
|
||||||
|
|
||||||
def test_determine_primary_remote_url_prefers_origin_in_resolved_mirrors(
|
def test_determine_primary_remote_url_prefers_origin_in_resolved_mirrors(self) -> None:
|
||||||
self,
|
|
||||||
) -> None:
|
|
||||||
repo: Repository = {
|
repo: Repository = {
|
||||||
"provider": "github.com",
|
"provider": "github.com",
|
||||||
"account": "kevinveenbirkenbach",
|
"account": "kevinveenbirkenbach",
|
||||||
@@ -68,10 +62,7 @@ class TestMirrorGitRemote(unittest.TestCase):
|
|||||||
}
|
}
|
||||||
|
|
||||||
url = determine_primary_remote_url(repo, mirrors)
|
url = determine_primary_remote_url(repo, mirrors)
|
||||||
self.assertEqual(
|
self.assertEqual(url, "git@github.com:kevinveenbirkenbach/package-manager.git")
|
||||||
url,
|
|
||||||
"git@github.com:kevinveenbirkenbach/package-manager.git",
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_determine_primary_remote_url_uses_any_mirror_if_no_origin(self) -> None:
|
def test_determine_primary_remote_url_uses_any_mirror_if_no_origin(self) -> None:
|
||||||
repo: Repository = {
|
repo: Repository = {
|
||||||
@@ -85,11 +76,7 @@ class TestMirrorGitRemote(unittest.TestCase):
|
|||||||
}
|
}
|
||||||
|
|
||||||
url = determine_primary_remote_url(repo, mirrors)
|
url = determine_primary_remote_url(repo, mirrors)
|
||||||
# Alphabetisch sortiert: backup, mirror2 → backup gewinnt
|
self.assertEqual(url, "ssh://git@git.veen.world:2201/kevinveenbirkenbach/pkgmgr.git")
|
||||||
self.assertEqual(
|
|
||||||
url,
|
|
||||||
"ssh://git@git.veen.world:2201/kevinveenbirkenbach/pkgmgr.git",
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_determine_primary_remote_url_falls_back_to_default_ssh(self) -> None:
|
def test_determine_primary_remote_url_falls_back_to_default_ssh(self) -> None:
|
||||||
repo: Repository = {
|
repo: Repository = {
|
||||||
@@ -100,10 +87,38 @@ class TestMirrorGitRemote(unittest.TestCase):
|
|||||||
mirrors: MirrorMap = {}
|
mirrors: MirrorMap = {}
|
||||||
|
|
||||||
url = determine_primary_remote_url(repo, mirrors)
|
url = determine_primary_remote_url(repo, mirrors)
|
||||||
self.assertEqual(
|
self.assertEqual(url, "git@github.com:kevinveenbirkenbach/package-manager.git")
|
||||||
url,
|
|
||||||
"git@github.com:kevinveenbirkenbach/package-manager.git",
|
@patch("pkgmgr.actions.mirror.git_remote.run_git")
|
||||||
)
|
def test_current_origin_url_returns_value(self, mock_run_git) -> None:
|
||||||
|
mock_run_git.return_value = "git@github.com:alice/repo.git\n"
|
||||||
|
self.assertEqual(current_origin_url("/tmp/repo"), "git@github.com:alice/repo.git")
|
||||||
|
mock_run_git.assert_called_once_with(["remote", "get-url", "origin"], cwd="/tmp/repo")
|
||||||
|
|
||||||
|
@patch("pkgmgr.actions.mirror.git_remote.run_git")
|
||||||
|
def test_current_origin_url_returns_none_on_git_error(self, mock_run_git) -> None:
|
||||||
|
from pkgmgr.core.git import GitError
|
||||||
|
|
||||||
|
mock_run_git.side_effect = GitError("fail")
|
||||||
|
self.assertIsNone(current_origin_url("/tmp/repo"))
|
||||||
|
|
||||||
|
@patch("pkgmgr.actions.mirror.git_remote.run_git")
|
||||||
|
def test_has_origin_remote_true(self, mock_run_git) -> None:
|
||||||
|
mock_run_git.return_value = "origin\nupstream\n"
|
||||||
|
self.assertTrue(has_origin_remote("/tmp/repo"))
|
||||||
|
mock_run_git.assert_called_once_with(["remote"], cwd="/tmp/repo")
|
||||||
|
|
||||||
|
@patch("pkgmgr.actions.mirror.git_remote.run_git")
|
||||||
|
def test_has_origin_remote_false_on_missing_remote(self, mock_run_git) -> None:
|
||||||
|
mock_run_git.return_value = "upstream\n"
|
||||||
|
self.assertFalse(has_origin_remote("/tmp/repo"))
|
||||||
|
|
||||||
|
@patch("pkgmgr.actions.mirror.git_remote.run_git")
|
||||||
|
def test_has_origin_remote_false_on_git_error(self, mock_run_git) -> None:
|
||||||
|
from pkgmgr.core.git import GitError
|
||||||
|
|
||||||
|
mock_run_git.side_effect = GitError("fail")
|
||||||
|
self.assertFalse(has_origin_remote("/tmp/repo"))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
@@ -7,10 +7,7 @@ import os
|
|||||||
import tempfile
|
import tempfile
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
from pkgmgr.actions.mirror.io import (
|
from pkgmgr.actions.mirror.io import load_config_mirrors, read_mirrors_file, write_mirrors_file
|
||||||
load_config_mirrors,
|
|
||||||
read_mirrors_file,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class TestMirrorIO(unittest.TestCase):
|
class TestMirrorIO(unittest.TestCase):
|
||||||
@@ -18,117 +15,96 @@ class TestMirrorIO(unittest.TestCase):
|
|||||||
Unit tests for pkgmgr.actions.mirror.io helpers.
|
Unit tests for pkgmgr.actions.mirror.io helpers.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
def test_load_config_mirrors_from_dict_filters_empty(self) -> None:
|
||||||
# load_config_mirrors
|
|
||||||
# ------------------------------------------------------------------
|
|
||||||
def test_load_config_mirrors_from_dict(self) -> None:
|
|
||||||
repo = {
|
repo = {
|
||||||
"mirrors": {
|
"mirrors": {
|
||||||
"origin": "ssh://git@example.com/account/repo.git",
|
"origin": "ssh://git@example.com/account/repo.git",
|
||||||
"backup": "ssh://git@backup/account/repo.git",
|
"backup": "",
|
||||||
"empty": "",
|
"invalid": None,
|
||||||
"none": None,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
mirrors = load_config_mirrors(repo)
|
mirrors = load_config_mirrors(repo)
|
||||||
|
self.assertEqual(mirrors, {"origin": "ssh://git@example.com/account/repo.git"})
|
||||||
|
|
||||||
self.assertEqual(
|
def test_load_config_mirrors_from_list_filters_invalid_entries(self) -> None:
|
||||||
mirrors,
|
|
||||||
{
|
|
||||||
"origin": "ssh://git@example.com/account/repo.git",
|
|
||||||
"backup": "ssh://git@backup/account/repo.git",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_load_config_mirrors_from_list(self) -> None:
|
|
||||||
repo = {
|
repo = {
|
||||||
"mirrors": [
|
"mirrors": [
|
||||||
{"name": "origin", "url": "ssh://git@example.com/account/repo.git"},
|
{"name": "origin", "url": "ssh://git@example.com/account/repo.git"},
|
||||||
{"name": "backup", "url": "ssh://git@backup/account/repo.git"},
|
{"name": "backup", "url": ""},
|
||||||
{"name": "", "url": "ssh://git@invalid/ignored.git"},
|
{"name": "", "url": "ssh://git@example.com/empty-name.git"},
|
||||||
{"name": "missing-url"},
|
{"url": "ssh://git@example.com/missing-name.git"},
|
||||||
"not-a-dict",
|
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
mirrors = load_config_mirrors(repo)
|
mirrors = load_config_mirrors(repo)
|
||||||
|
self.assertEqual(mirrors, {"origin": "ssh://git@example.com/account/repo.git"})
|
||||||
self.assertEqual(
|
|
||||||
mirrors,
|
|
||||||
{
|
|
||||||
"origin": "ssh://git@example.com/account/repo.git",
|
|
||||||
"backup": "ssh://git@backup/account/repo.git",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_load_config_mirrors_empty_when_missing(self) -> None:
|
def test_load_config_mirrors_empty_when_missing(self) -> None:
|
||||||
repo = {}
|
self.assertEqual(load_config_mirrors({}), {})
|
||||||
mirrors = load_config_mirrors(repo)
|
|
||||||
self.assertEqual(mirrors, {})
|
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
def test_read_mirrors_file_parses_named_entries(self) -> None:
|
||||||
# read_mirrors_file
|
|
||||||
# ------------------------------------------------------------------
|
|
||||||
def test_read_mirrors_file_with_named_and_url_only_entries(self) -> None:
|
|
||||||
"""
|
|
||||||
Ensure that the MIRRORS file format is parsed correctly:
|
|
||||||
|
|
||||||
- 'name url' → exact name
|
|
||||||
- 'url' → auto name derived from netloc (host[:port]),
|
|
||||||
with numeric suffix if duplicated.
|
|
||||||
"""
|
|
||||||
with tempfile.TemporaryDirectory() as tmpdir:
|
with tempfile.TemporaryDirectory() as tmpdir:
|
||||||
mirrors_path = os.path.join(tmpdir, "MIRRORS")
|
p = os.path.join(tmpdir, "MIRRORS")
|
||||||
content = "\n".join(
|
with open(p, "w", encoding="utf-8") as fh:
|
||||||
[
|
fh.write("origin ssh://git@example.com/account/repo.git\n")
|
||||||
"# comment",
|
|
||||||
"",
|
|
||||||
"origin ssh://git@example.com/account/repo.git",
|
|
||||||
"https://github.com/kevinveenbirkenbach/package-manager",
|
|
||||||
"https://github.com/kevinveenbirkenbach/another-repo",
|
|
||||||
"ssh://git@git.veen.world:2201/kevinveenbirkenbach/pkgmgr.git",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
with open(mirrors_path, "w", encoding="utf-8") as fh:
|
|
||||||
fh.write(content + "\n")
|
|
||||||
|
|
||||||
mirrors = read_mirrors_file(tmpdir)
|
mirrors = read_mirrors_file(tmpdir)
|
||||||
|
|
||||||
# 'origin' is preserved as given
|
self.assertEqual(mirrors, {"origin": "ssh://git@example.com/account/repo.git"})
|
||||||
self.assertIn("origin", mirrors)
|
|
||||||
self.assertEqual(
|
def test_read_mirrors_file_url_only_uses_netloc_basename_and_suffix(self) -> None:
|
||||||
mirrors["origin"],
|
with tempfile.TemporaryDirectory() as tmpdir:
|
||||||
"ssh://git@example.com/account/repo.git",
|
p = os.path.join(tmpdir, "MIRRORS")
|
||||||
|
with open(p, "w", encoding="utf-8") as fh:
|
||||||
|
fh.write(
|
||||||
|
"\n".join(
|
||||||
|
[
|
||||||
|
"https://github.com/alice/repo1",
|
||||||
|
"https://github.com/alice/repo2",
|
||||||
|
"ssh://git@git.veen.world:2201/alice/repo3.git",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
+ "\n"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Two GitHub URLs → auto names: github.com, github.com2
|
mirrors = read_mirrors_file(tmpdir)
|
||||||
github_urls = {
|
|
||||||
mirrors.get("github.com"),
|
self.assertIn("github.com", mirrors)
|
||||||
mirrors.get("github.com2"),
|
self.assertIn("github.com2", mirrors)
|
||||||
}
|
self.assertEqual(mirrors["github.com"], "https://github.com/alice/repo1")
|
||||||
self.assertIn(
|
self.assertEqual(mirrors["github.com2"], "https://github.com/alice/repo2")
|
||||||
"https://github.com/kevinveenbirkenbach/package-manager",
|
|
||||||
github_urls,
|
|
||||||
)
|
|
||||||
self.assertIn(
|
|
||||||
"https://github.com/kevinveenbirkenbach/another-repo",
|
|
||||||
github_urls,
|
|
||||||
)
|
|
||||||
|
|
||||||
# SSH-URL mit User-Teil → netloc ist "git@git.veen.world:2201"
|
|
||||||
# → host = "git@git.veen.world"
|
|
||||||
self.assertIn("git@git.veen.world", mirrors)
|
self.assertIn("git@git.veen.world", mirrors)
|
||||||
self.assertEqual(
|
self.assertEqual(mirrors["git@git.veen.world"], "ssh://git@git.veen.world:2201/alice/repo3.git")
|
||||||
mirrors["git@git.veen.world"],
|
|
||||||
"ssh://git@git.veen.world:2201/kevinveenbirkenbach/pkgmgr.git",
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_read_mirrors_file_missing_returns_empty(self) -> None:
|
def test_read_mirrors_file_missing_returns_empty(self) -> None:
|
||||||
with tempfile.TemporaryDirectory() as tmpdir:
|
with tempfile.TemporaryDirectory() as tmpdir:
|
||||||
mirrors = read_mirrors_file(tmpdir) # no MIRRORS file
|
self.assertEqual(read_mirrors_file(tmpdir), {})
|
||||||
self.assertEqual(mirrors, {})
|
|
||||||
|
def test_write_mirrors_file_writes_sorted_lines(self) -> None:
|
||||||
|
with tempfile.TemporaryDirectory() as tmpdir:
|
||||||
|
mirrors = {
|
||||||
|
"b": "ssh://b.example/repo.git",
|
||||||
|
"a": "ssh://a.example/repo.git",
|
||||||
|
}
|
||||||
|
write_mirrors_file(tmpdir, mirrors, preview=False)
|
||||||
|
|
||||||
|
p = os.path.join(tmpdir, "MIRRORS")
|
||||||
|
self.assertTrue(os.path.exists(p))
|
||||||
|
|
||||||
|
with open(p, "r", encoding="utf-8") as fh:
|
||||||
|
content = fh.read()
|
||||||
|
|
||||||
|
self.assertEqual(content, "a ssh://a.example/repo.git\nb ssh://b.example/repo.git\n")
|
||||||
|
|
||||||
|
def test_write_mirrors_file_preview_does_not_create_file(self) -> None:
|
||||||
|
with tempfile.TemporaryDirectory() as tmpdir:
|
||||||
|
mirrors = {"a": "ssh://a.example/repo.git"}
|
||||||
|
write_mirrors_file(tmpdir, mirrors, preview=True)
|
||||||
|
|
||||||
|
p = os.path.join(tmpdir, "MIRRORS")
|
||||||
|
self.assertFalse(os.path.exists(p))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
72
tests/unit/pkgmgr/actions/mirror/test_list_cmd.py
Normal file
72
tests/unit/pkgmgr/actions/mirror/test_list_cmd.py
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import io
|
||||||
|
import unittest
|
||||||
|
from contextlib import redirect_stdout
|
||||||
|
from unittest.mock import MagicMock, PropertyMock, patch
|
||||||
|
|
||||||
|
from pkgmgr.actions.mirror.list_cmd import list_mirrors
|
||||||
|
|
||||||
|
|
||||||
|
class TestListCmd(unittest.TestCase):
|
||||||
|
"""
|
||||||
|
Unit tests for mirror list output.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@patch("pkgmgr.actions.mirror.list_cmd.build_context")
|
||||||
|
def test_list_mirrors_all_sources_prints_sections(self, mock_build_context) -> None:
|
||||||
|
ctx = MagicMock()
|
||||||
|
ctx.identifier = "id"
|
||||||
|
ctx.repo_dir = "/tmp/repo"
|
||||||
|
ctx.config_mirrors = {"origin": "a"}
|
||||||
|
ctx.file_mirrors = {"backup": "b"}
|
||||||
|
type(ctx).resolved_mirrors = PropertyMock(return_value={"origin": "a", "backup": "b"})
|
||||||
|
mock_build_context.return_value = ctx
|
||||||
|
|
||||||
|
buf = io.StringIO()
|
||||||
|
with redirect_stdout(buf):
|
||||||
|
list_mirrors(
|
||||||
|
selected_repos=[{}],
|
||||||
|
repositories_base_dir="/base",
|
||||||
|
all_repos=[],
|
||||||
|
source="all",
|
||||||
|
)
|
||||||
|
|
||||||
|
out = buf.getvalue()
|
||||||
|
self.assertIn("[config mirrors]", out)
|
||||||
|
self.assertIn("[MIRRORS file]", out)
|
||||||
|
self.assertIn("[resolved mirrors]", out)
|
||||||
|
self.assertIn("origin: a", out)
|
||||||
|
self.assertIn("backup: b", out)
|
||||||
|
|
||||||
|
@patch("pkgmgr.actions.mirror.list_cmd.build_context")
|
||||||
|
def test_list_mirrors_config_only(self, mock_build_context) -> None:
|
||||||
|
ctx = MagicMock()
|
||||||
|
ctx.identifier = "id"
|
||||||
|
ctx.repo_dir = "/tmp/repo"
|
||||||
|
ctx.config_mirrors = {"origin": "a"}
|
||||||
|
ctx.file_mirrors = {"backup": "b"}
|
||||||
|
type(ctx).resolved_mirrors = PropertyMock(return_value={"origin": "a", "backup": "b"})
|
||||||
|
mock_build_context.return_value = ctx
|
||||||
|
|
||||||
|
buf = io.StringIO()
|
||||||
|
with redirect_stdout(buf):
|
||||||
|
list_mirrors(
|
||||||
|
selected_repos=[{}],
|
||||||
|
repositories_base_dir="/base",
|
||||||
|
all_repos=[],
|
||||||
|
source="config",
|
||||||
|
)
|
||||||
|
|
||||||
|
out = buf.getvalue()
|
||||||
|
self.assertIn("[config mirrors]", out)
|
||||||
|
self.assertIn("origin: a", out)
|
||||||
|
self.assertNotIn("[MIRRORS file]", out)
|
||||||
|
self.assertNotIn("[resolved mirrors]", out)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
unittest.main()
|
||||||
52
tests/unit/pkgmgr/actions/mirror/test_remote_check.py
Normal file
52
tests/unit/pkgmgr/actions/mirror/test_remote_check.py
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
from unittest.mock import patch
|
||||||
|
|
||||||
|
from pkgmgr.actions.mirror.remote_check import probe_mirror
|
||||||
|
from pkgmgr.core.git import GitError
|
||||||
|
|
||||||
|
|
||||||
|
class TestRemoteCheck(unittest.TestCase):
|
||||||
|
"""
|
||||||
|
Unit tests for non-destructive remote probing (git ls-remote).
|
||||||
|
"""
|
||||||
|
|
||||||
|
@patch("pkgmgr.actions.mirror.remote_check.run_git")
|
||||||
|
def test_probe_mirror_success_returns_true_and_empty_message(self, mock_run_git) -> None:
|
||||||
|
mock_run_git.return_value = "dummy-output"
|
||||||
|
|
||||||
|
ok, message = probe_mirror(
|
||||||
|
"ssh://git@code.example.org:2201/alice/repo.git",
|
||||||
|
"/tmp/some-repo",
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertTrue(ok)
|
||||||
|
self.assertEqual(message, "")
|
||||||
|
mock_run_git.assert_called_once_with(
|
||||||
|
["ls-remote", "ssh://git@code.example.org:2201/alice/repo.git"],
|
||||||
|
cwd="/tmp/some-repo",
|
||||||
|
)
|
||||||
|
|
||||||
|
@patch("pkgmgr.actions.mirror.remote_check.run_git")
|
||||||
|
def test_probe_mirror_failure_returns_false_and_error_message(self, mock_run_git) -> None:
|
||||||
|
mock_run_git.side_effect = GitError("Git command failed (simulated)")
|
||||||
|
|
||||||
|
ok, message = probe_mirror(
|
||||||
|
"ssh://git@code.example.org:2201/alice/repo.git",
|
||||||
|
"/tmp/some-repo",
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertFalse(ok)
|
||||||
|
self.assertIn("Git command failed", message)
|
||||||
|
mock_run_git.assert_called_once_with(
|
||||||
|
["ls-remote", "ssh://git@code.example.org:2201/alice/repo.git"],
|
||||||
|
cwd="/tmp/some-repo",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
unittest.main()
|
||||||
114
tests/unit/pkgmgr/actions/mirror/test_remote_provision.py
Normal file
114
tests/unit/pkgmgr/actions/mirror/test_remote_provision.py
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
from unittest.mock import MagicMock, PropertyMock, patch
|
||||||
|
|
||||||
|
from pkgmgr.actions.mirror.remote_provision import ensure_remote_repository
|
||||||
|
|
||||||
|
|
||||||
|
class TestRemoteProvision(unittest.TestCase):
|
||||||
|
"""
|
||||||
|
Unit tests for remote provisioning wrapper logic (action layer).
|
||||||
|
"""
|
||||||
|
|
||||||
|
@patch("pkgmgr.actions.mirror.remote_provision.ensure_remote_repo")
|
||||||
|
@patch("pkgmgr.actions.mirror.remote_provision.determine_primary_remote_url")
|
||||||
|
@patch("pkgmgr.actions.mirror.remote_provision.build_context")
|
||||||
|
def test_ensure_remote_repository_builds_spec_from_url_and_calls_core(
|
||||||
|
self,
|
||||||
|
mock_build_context,
|
||||||
|
mock_determine_primary,
|
||||||
|
mock_ensure_remote_repo,
|
||||||
|
) -> None:
|
||||||
|
ctx = MagicMock()
|
||||||
|
type(ctx).resolved_mirrors = PropertyMock(
|
||||||
|
return_value={"origin": "ssh://git@git.veen.world:2201/alice/repo.git"}
|
||||||
|
)
|
||||||
|
ctx.identifier = "repo-id"
|
||||||
|
mock_build_context.return_value = ctx
|
||||||
|
|
||||||
|
mock_determine_primary.return_value = "ssh://git@git.veen.world:2201/alice/repo.git"
|
||||||
|
|
||||||
|
result = MagicMock()
|
||||||
|
result.status = "created"
|
||||||
|
result.message = "Repository created (user)."
|
||||||
|
result.url = "https://git.veen.world/alice/repo"
|
||||||
|
mock_ensure_remote_repo.return_value = result
|
||||||
|
|
||||||
|
repo = {
|
||||||
|
"provider": "gitea",
|
||||||
|
"account": "SHOULD_NOT_BE_USED_ANYMORE",
|
||||||
|
"repository": "SHOULD_NOT_BE_USED_ANYMORE",
|
||||||
|
"private": True,
|
||||||
|
"description": "desc",
|
||||||
|
}
|
||||||
|
|
||||||
|
ensure_remote_repository(
|
||||||
|
repo=repo,
|
||||||
|
repositories_base_dir="/base",
|
||||||
|
all_repos=[],
|
||||||
|
preview=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertTrue(mock_ensure_remote_repo.called)
|
||||||
|
called_spec = mock_ensure_remote_repo.call_args[0][0]
|
||||||
|
self.assertEqual(called_spec.host, "git.veen.world")
|
||||||
|
self.assertEqual(called_spec.owner, "alice")
|
||||||
|
self.assertEqual(called_spec.name, "repo")
|
||||||
|
|
||||||
|
@patch("pkgmgr.actions.mirror.remote_provision.ensure_remote_repo")
|
||||||
|
@patch("pkgmgr.actions.mirror.remote_provision.determine_primary_remote_url")
|
||||||
|
@patch("pkgmgr.actions.mirror.remote_provision.build_context")
|
||||||
|
def test_ensure_remote_repository_skips_when_no_primary_url(
|
||||||
|
self,
|
||||||
|
mock_build_context,
|
||||||
|
mock_determine_primary,
|
||||||
|
mock_ensure_remote_repo,
|
||||||
|
) -> None:
|
||||||
|
ctx = MagicMock()
|
||||||
|
type(ctx).resolved_mirrors = PropertyMock(return_value={})
|
||||||
|
ctx.identifier = "repo-id"
|
||||||
|
mock_build_context.return_value = ctx
|
||||||
|
mock_determine_primary.return_value = None
|
||||||
|
|
||||||
|
ensure_remote_repository(
|
||||||
|
repo={"provider": "gitea"},
|
||||||
|
repositories_base_dir="/base",
|
||||||
|
all_repos=[],
|
||||||
|
preview=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
mock_ensure_remote_repo.assert_not_called()
|
||||||
|
|
||||||
|
@patch("pkgmgr.actions.mirror.remote_provision.ensure_remote_repo")
|
||||||
|
@patch("pkgmgr.actions.mirror.remote_provision.determine_primary_remote_url")
|
||||||
|
@patch("pkgmgr.actions.mirror.remote_provision.build_context")
|
||||||
|
def test_ensure_remote_repository_skips_when_url_not_parseable(
|
||||||
|
self,
|
||||||
|
mock_build_context,
|
||||||
|
mock_determine_primary,
|
||||||
|
mock_ensure_remote_repo,
|
||||||
|
) -> None:
|
||||||
|
ctx = MagicMock()
|
||||||
|
type(ctx).resolved_mirrors = PropertyMock(
|
||||||
|
return_value={"origin": "ssh://git@host:2201/not-enough-parts"}
|
||||||
|
)
|
||||||
|
ctx.identifier = "repo-id"
|
||||||
|
mock_build_context.return_value = ctx
|
||||||
|
mock_determine_primary.return_value = "ssh://git@host:2201/not-enough-parts"
|
||||||
|
|
||||||
|
ensure_remote_repository(
|
||||||
|
repo={"provider": "gitea"},
|
||||||
|
repositories_base_dir="/base",
|
||||||
|
all_repos=[],
|
||||||
|
preview=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
mock_ensure_remote_repo.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
unittest.main()
|
||||||
@@ -4,55 +4,120 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import unittest
|
import unittest
|
||||||
from unittest.mock import patch
|
from unittest.mock import MagicMock, PropertyMock, patch
|
||||||
|
|
||||||
from pkgmgr.actions.mirror.setup_cmd import _probe_mirror
|
from pkgmgr.actions.mirror.setup_cmd import setup_mirrors
|
||||||
from pkgmgr.core.git import GitError
|
|
||||||
|
|
||||||
|
|
||||||
class TestMirrorSetupCmd(unittest.TestCase):
|
class TestMirrorSetupCmd(unittest.TestCase):
|
||||||
"""
|
"""
|
||||||
Unit tests for the non-destructive remote probing logic in setup_cmd.
|
Unit tests for mirror setup orchestration (local + remote).
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@patch("pkgmgr.actions.mirror.setup_cmd.run_git")
|
@patch("pkgmgr.actions.mirror.setup_cmd.ensure_origin_remote")
|
||||||
def test_probe_mirror_success_returns_true_and_empty_message(
|
@patch("pkgmgr.actions.mirror.setup_cmd.build_context")
|
||||||
|
def test_setup_mirrors_local_calls_ensure_origin_remote(
|
||||||
self,
|
self,
|
||||||
mock_run_git,
|
mock_build_context,
|
||||||
|
mock_ensure_origin,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""
|
ctx = MagicMock()
|
||||||
If run_git returns successfully, _probe_mirror must report (True, "").
|
ctx.identifier = "repo-id"
|
||||||
"""
|
ctx.repo_dir = "/tmp/repo"
|
||||||
mock_run_git.return_value = "dummy-output"
|
ctx.config_mirrors = {}
|
||||||
|
ctx.file_mirrors = {}
|
||||||
|
type(ctx).resolved_mirrors = PropertyMock(return_value={})
|
||||||
|
mock_build_context.return_value = ctx
|
||||||
|
|
||||||
ok, message = _probe_mirror(
|
repo = {"provider": "github.com", "account": "alice", "repository": "repo"}
|
||||||
"ssh://git@code.cymais.cloud:2201/kevinveenbirkenbach/pkgmgr.git",
|
|
||||||
"/tmp/some-repo",
|
setup_mirrors(
|
||||||
|
selected_repos=[repo],
|
||||||
|
repositories_base_dir="/base",
|
||||||
|
all_repos=[repo],
|
||||||
|
preview=True,
|
||||||
|
local=True,
|
||||||
|
remote=False,
|
||||||
|
ensure_remote=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
self.assertTrue(ok)
|
mock_ensure_origin.assert_called_once()
|
||||||
self.assertEqual(message, "")
|
args, kwargs = mock_ensure_origin.call_args
|
||||||
mock_run_git.assert_called_once()
|
self.assertEqual(args[0], repo)
|
||||||
|
self.assertEqual(kwargs.get("preview"), True)
|
||||||
|
|
||||||
@patch("pkgmgr.actions.mirror.setup_cmd.run_git")
|
@patch("pkgmgr.actions.mirror.setup_cmd.ensure_remote_repository")
|
||||||
def test_probe_mirror_failure_returns_false_and_error_message(
|
@patch("pkgmgr.actions.mirror.setup_cmd.probe_mirror")
|
||||||
|
@patch("pkgmgr.actions.mirror.setup_cmd.build_context")
|
||||||
|
def test_setup_mirrors_remote_provisions_when_enabled(
|
||||||
self,
|
self,
|
||||||
mock_run_git,
|
mock_build_context,
|
||||||
|
mock_probe,
|
||||||
|
mock_ensure_remote_repository,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""
|
ctx = MagicMock()
|
||||||
If run_git raises GitError, _probe_mirror must report (False, <message>),
|
ctx.identifier = "repo-id"
|
||||||
and not re-raise the exception.
|
ctx.repo_dir = "/tmp/repo"
|
||||||
"""
|
ctx.config_mirrors = {"origin": "git@github.com:alice/repo.git"}
|
||||||
mock_run_git.side_effect = GitError("Git command failed (simulated)")
|
ctx.file_mirrors = {}
|
||||||
|
type(ctx).resolved_mirrors = PropertyMock(return_value={"origin": "git@github.com:alice/repo.git"})
|
||||||
|
mock_build_context.return_value = ctx
|
||||||
|
|
||||||
ok, message = _probe_mirror(
|
mock_probe.return_value = (True, "")
|
||||||
"ssh://git@code.cymais.cloud:2201/kevinveenbirkenbach/pkgmgr.git",
|
|
||||||
"/tmp/some-repo",
|
repo = {"provider": "github.com", "account": "alice", "repository": "repo"}
|
||||||
|
|
||||||
|
setup_mirrors(
|
||||||
|
selected_repos=[repo],
|
||||||
|
repositories_base_dir="/base",
|
||||||
|
all_repos=[repo],
|
||||||
|
preview=False,
|
||||||
|
local=False,
|
||||||
|
remote=True,
|
||||||
|
ensure_remote=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
self.assertFalse(ok)
|
mock_ensure_remote_repository.assert_called_once()
|
||||||
self.assertIn("Git command failed", message)
|
mock_probe.assert_called_once()
|
||||||
mock_run_git.assert_called_once()
|
|
||||||
|
@patch("pkgmgr.actions.mirror.setup_cmd.ensure_remote_repository")
|
||||||
|
@patch("pkgmgr.actions.mirror.setup_cmd.probe_mirror")
|
||||||
|
@patch("pkgmgr.actions.mirror.setup_cmd.build_context")
|
||||||
|
def test_setup_mirrors_remote_probes_all_resolved_mirrors(
|
||||||
|
self,
|
||||||
|
mock_build_context,
|
||||||
|
mock_probe,
|
||||||
|
mock_ensure_remote_repository,
|
||||||
|
) -> None:
|
||||||
|
ctx = MagicMock()
|
||||||
|
ctx.identifier = "repo-id"
|
||||||
|
ctx.repo_dir = "/tmp/repo"
|
||||||
|
ctx.config_mirrors = {}
|
||||||
|
ctx.file_mirrors = {}
|
||||||
|
type(ctx).resolved_mirrors = PropertyMock(
|
||||||
|
return_value={
|
||||||
|
"mirror": "git@github.com:alice/repo.git",
|
||||||
|
"backup": "ssh://git@git.veen.world:2201/alice/repo.git",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
mock_build_context.return_value = ctx
|
||||||
|
|
||||||
|
mock_probe.return_value = (True, "")
|
||||||
|
|
||||||
|
repo = {"provider": "github.com", "account": "alice", "repository": "repo"}
|
||||||
|
|
||||||
|
setup_mirrors(
|
||||||
|
selected_repos=[repo],
|
||||||
|
repositories_base_dir="/base",
|
||||||
|
all_repos=[repo],
|
||||||
|
preview=False,
|
||||||
|
local=False,
|
||||||
|
remote=True,
|
||||||
|
ensure_remote=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
mock_ensure_remote_repository.assert_not_called()
|
||||||
|
self.assertEqual(mock_probe.call_count, 2)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
77
tests/unit/pkgmgr/actions/mirror/test_url_utils.py
Normal file
77
tests/unit/pkgmgr/actions/mirror/test_url_utils.py
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from pkgmgr.actions.mirror.url_utils import hostport_from_git_url, normalize_provider_host, parse_repo_from_git_url
|
||||||
|
|
||||||
|
|
||||||
|
class TestUrlUtils(unittest.TestCase):
|
||||||
|
"""
|
||||||
|
Unit tests for URL parsing helpers used in mirror setup/provisioning.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def test_hostport_from_git_url_ssh_url_with_port(self) -> None:
|
||||||
|
host, port = hostport_from_git_url("ssh://git@code.example.org:2201/alice/repo.git")
|
||||||
|
self.assertEqual(host, "code.example.org")
|
||||||
|
self.assertEqual(port, "2201")
|
||||||
|
|
||||||
|
def test_hostport_from_git_url_https_url_no_port(self) -> None:
|
||||||
|
host, port = hostport_from_git_url("https://github.com/alice/repo.git")
|
||||||
|
self.assertEqual(host, "github.com")
|
||||||
|
self.assertIsNone(port)
|
||||||
|
|
||||||
|
def test_hostport_from_git_url_scp_like(self) -> None:
|
||||||
|
host, port = hostport_from_git_url("git@github.com:alice/repo.git")
|
||||||
|
self.assertEqual(host, "github.com")
|
||||||
|
self.assertIsNone(port)
|
||||||
|
|
||||||
|
def test_hostport_from_git_url_empty(self) -> None:
|
||||||
|
host, port = hostport_from_git_url("")
|
||||||
|
self.assertEqual(host, "")
|
||||||
|
self.assertIsNone(port)
|
||||||
|
|
||||||
|
def test_normalize_provider_host_strips_port_and_lowercases(self) -> None:
|
||||||
|
self.assertEqual(normalize_provider_host("GIT.VEEN.WORLD:2201"), "git.veen.world")
|
||||||
|
|
||||||
|
def test_normalize_provider_host_ipv6_brackets(self) -> None:
|
||||||
|
self.assertEqual(normalize_provider_host("[::1]"), "::1")
|
||||||
|
|
||||||
|
def test_normalize_provider_host_empty(self) -> None:
|
||||||
|
self.assertEqual(normalize_provider_host(""), "")
|
||||||
|
|
||||||
|
def test_parse_repo_from_git_url_ssh_url(self) -> None:
|
||||||
|
host, owner, name = parse_repo_from_git_url("ssh://git@code.example.org:2201/alice/repo.git")
|
||||||
|
self.assertEqual(host, "code.example.org")
|
||||||
|
self.assertEqual(owner, "alice")
|
||||||
|
self.assertEqual(name, "repo")
|
||||||
|
|
||||||
|
def test_parse_repo_from_git_url_https_url(self) -> None:
|
||||||
|
host, owner, name = parse_repo_from_git_url("https://github.com/alice/repo.git")
|
||||||
|
self.assertEqual(host, "github.com")
|
||||||
|
self.assertEqual(owner, "alice")
|
||||||
|
self.assertEqual(name, "repo")
|
||||||
|
|
||||||
|
def test_parse_repo_from_git_url_scp_like(self) -> None:
|
||||||
|
host, owner, name = parse_repo_from_git_url("git@github.com:alice/repo.git")
|
||||||
|
self.assertEqual(host, "github.com")
|
||||||
|
self.assertEqual(owner, "alice")
|
||||||
|
self.assertEqual(name, "repo")
|
||||||
|
|
||||||
|
def test_parse_repo_from_git_url_best_effort_host_owner_repo(self) -> None:
|
||||||
|
host, owner, name = parse_repo_from_git_url("git.veen.world/alice/repo.git")
|
||||||
|
self.assertEqual(host, "git.veen.world")
|
||||||
|
self.assertEqual(owner, "alice")
|
||||||
|
self.assertEqual(name, "repo")
|
||||||
|
|
||||||
|
def test_parse_repo_from_git_url_missing_owner_repo_returns_none(self) -> None:
|
||||||
|
host, owner, name = parse_repo_from_git_url("https://github.com/")
|
||||||
|
self.assertEqual(host, "github.com")
|
||||||
|
self.assertIsNone(owner)
|
||||||
|
self.assertIsNone(name)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
unittest.main()
|
||||||
Reference in New Issue
Block a user