Compare commits
36 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9485bc9e3f | ||
|
|
dcda23435d | ||
|
|
a69e81c44b | ||
|
|
2ca004d056 | ||
|
|
f7bd5bfd0b | ||
|
|
2c15a4016b | ||
|
|
9e3ce34626 | ||
|
|
1a13fcaa4e | ||
|
|
48a0d1d458 | ||
|
|
783d2b921a | ||
|
|
6effacefef | ||
|
|
65903e740b | ||
|
|
aa80a2ddb4 | ||
|
|
9456ad4475 | ||
|
|
3d7d7e9c09 | ||
|
|
328203ccd7 | ||
|
|
ac16378807 | ||
|
|
f7a86bc353 | ||
|
|
06a6a77a48 | ||
|
|
4883e40812 | ||
|
|
031ae5ac69 | ||
|
|
1c4fc531fa | ||
|
|
33dfbf3a4d | ||
|
|
a3aa7b6394 | ||
|
|
724c262a4a | ||
|
|
dcbe16c5f0 | ||
|
|
f63b0a9f08 | ||
|
|
822c418503 | ||
|
|
562a6da291 | ||
|
|
e61b30d9af | ||
|
|
27c0c7c01f | ||
|
|
0d652d995e | ||
|
|
0e03fbbee2 | ||
|
|
7cfd7e8d5c | ||
|
|
84b6c71748 | ||
|
|
db9aaf920e |
8
.github/workflows/ci.yml
vendored
8
.github/workflows/ci.yml
vendored
@@ -28,8 +28,8 @@ jobs:
|
||||
test-virgin-root:
|
||||
uses: ./.github/workflows/test-virgin-root.yml
|
||||
|
||||
codesniffer-shellcheck:
|
||||
uses: ./.github/workflows/codesniffer-shellcheck.yml
|
||||
lint-shell:
|
||||
uses: ./.github/workflows/lint-shell.yml
|
||||
|
||||
codesniffer-ruff:
|
||||
uses: ./.github/workflows/codesniffer-ruff.yml
|
||||
lint-python:
|
||||
uses: ./.github/workflows/lint-python.yml
|
||||
|
||||
@@ -4,7 +4,7 @@ on:
|
||||
workflow_call:
|
||||
|
||||
jobs:
|
||||
codesniffer-ruff:
|
||||
lint-python:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
@@ -4,7 +4,7 @@ on:
|
||||
workflow_call:
|
||||
|
||||
jobs:
|
||||
codesniffer-shellcheck:
|
||||
lint-shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
12
.github/workflows/mark-stable.yml
vendored
12
.github/workflows/mark-stable.yml
vendored
@@ -29,16 +29,16 @@ jobs:
|
||||
test-virgin-root:
|
||||
uses: ./.github/workflows/test-virgin-root.yml
|
||||
|
||||
codesniffer-shellcheck:
|
||||
uses: ./.github/workflows/codesniffer-shellcheck.yml
|
||||
lint-shell:
|
||||
uses: ./.github/workflows/lint-shell.yml
|
||||
|
||||
codesniffer-ruff:
|
||||
uses: ./.github/workflows/codesniffer-ruff.yml
|
||||
lint-python:
|
||||
uses: ./.github/workflows/lint-python.yml
|
||||
|
||||
mark-stable:
|
||||
needs:
|
||||
- codesniffer-shellcheck
|
||||
- codesniffer-ruff
|
||||
- lint-shell
|
||||
- lint-python
|
||||
- test-unit
|
||||
- test-integration
|
||||
- test-env-nix
|
||||
|
||||
12
.github/workflows/publish-containers.yml
vendored
12
.github/workflows/publish-containers.yml
vendored
@@ -19,7 +19,6 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
fetch-tags: true
|
||||
|
||||
- name: Checkout workflow_run commit and refresh tags
|
||||
run: |
|
||||
@@ -35,22 +34,30 @@ jobs:
|
||||
SHA="$(git rev-parse HEAD)"
|
||||
|
||||
V_TAG="$(git tag --points-at "${SHA}" --list 'v*' | sort -V | tail -n1)"
|
||||
[[ -n "$V_TAG" ]] || { echo "No version tag found"; exit 1; }
|
||||
if [[ -z "${V_TAG}" ]]; then
|
||||
echo "No version tag found for ${SHA}. Skipping publish."
|
||||
echo "should_publish=false" >> "$GITHUB_OUTPUT"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
VERSION="${V_TAG#v}"
|
||||
|
||||
STABLE_SHA="$(git rev-parse -q --verify refs/tags/stable^{commit} 2>/dev/null || true)"
|
||||
IS_STABLE=false
|
||||
[[ -n "${STABLE_SHA}" && "${STABLE_SHA}" == "${SHA}" ]] && IS_STABLE=true
|
||||
|
||||
echo "should_publish=true" >> "$GITHUB_OUTPUT"
|
||||
echo "version=${VERSION}" >> "$GITHUB_OUTPUT"
|
||||
echo "is_stable=${IS_STABLE}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
if: ${{ steps.info.outputs.should_publish == 'true' }}
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
use: true
|
||||
|
||||
- name: Login to GHCR
|
||||
if: ${{ steps.info.outputs.should_publish == 'true' }}
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
@@ -58,6 +65,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Publish all images
|
||||
if: ${{ steps.info.outputs.should_publish == 'true' }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
OWNER="${{ github.repository_owner }}" \
|
||||
|
||||
46
CHANGELOG.md
46
CHANGELOG.md
@@ -1,3 +1,49 @@
|
||||
## [1.8.0] - 2025-12-15
|
||||
|
||||
* *** New Features: ***
|
||||
- **Silent Updates**: You can now use the `--silent` flag during installs and updates to suppress error messages for individual repositories and get a single summary at the end. This ensures the process continues even if some repositories fail, while still preserving interactive checks when not in silent mode.
|
||||
- **Repository Scaffolding**: The process for creating new repositories has been improved. You can now use templates to scaffold repositories with a preview and automatic mirror setup.
|
||||
|
||||
*** Bug Fixes: ***
|
||||
- **Pip Installation**: Pip is now installed automatically on all supported systems. This includes `python-pip` for Arch and `python3-pip` for CentOS, Debian, Fedora, and Ubuntu, ensuring that pip is available for Python package installations.
|
||||
- **Pacman Keyring**: Fixed an issue on Arch Linux where package installation would fail due to missing keys. The pacman keyring is now properly initialized before installing packages.
|
||||
|
||||
|
||||
## [1.7.2] - 2025-12-15
|
||||
|
||||
* * Git mirrors are now resolved consistently (origin → MIRRORS file → config → default).
|
||||
* The `origin` remote is always enforced to use the primary URL for both fetch and push.
|
||||
* Additional mirrors are added as extra push targets without duplication.
|
||||
* Local and remote mirror setup behaves more predictably and consistently.
|
||||
* Improved test coverage ensures stable origin and push URL handling.
|
||||
|
||||
|
||||
## [1.7.1] - 2025-12-14
|
||||
|
||||
* Patched package-manager to kpmx to publish on pypi
|
||||
|
||||
|
||||
## [1.7.0] - 2025-12-14
|
||||
|
||||
* * New *pkgmgr publish* command to publish repository artifacts to PyPI based on the *MIRRORS* file.
|
||||
* Automatically selects the current repository when no explicit selection is given.
|
||||
* Publishes only when a semantic version tag is present on *HEAD*; otherwise skips with a clear info message.
|
||||
* Supports non-interactive mode for CI environments via *--non-interactive*.
|
||||
|
||||
|
||||
## [1.6.4] - 2025-12-14
|
||||
|
||||
* * Improved reliability of Nix installs and updates, including automatic resolution of profile conflicts and better handling of GitHub 403 rate limits.
|
||||
* More stable launcher behavior in packaged and virtual-env setups.
|
||||
* Enhanced mirror and remote handling: repository owner/name are derived from URLs, with smoother provisioning and clearer credential handling.
|
||||
* More reliable releases and artifacts due to safer CI behavior when no version tag is present.
|
||||
|
||||
|
||||
## [1.6.3] - 2025-12-14
|
||||
|
||||
* ***Fixed:*** Corrected repository path resolution so release and version logic consistently use the canonical packaging/* layout, preventing changelog and packaging files from being read or updated from incorrect locations.
|
||||
|
||||
|
||||
## [1.6.2] - 2025-12-14
|
||||
|
||||
* **pkgmgr version** now also shows the installed pkgmgr version when run outside a repository.
|
||||
|
||||
3
MIRRORS
3
MIRRORS
@@ -1,3 +1,4 @@
|
||||
git@github.com:kevinveenbirkenbach/package-manager.git
|
||||
ssh://git@git.veen.world:2201/kevinveenbirkenbach/pkgmgr.git
|
||||
ssh://git@code.cymais.cloud:2201/kevinveenbirkenbach/pkgmgr.git
|
||||
ssh://git@code.infinito.nexus:2201/kevinveenbirkenbach/pkgmgr.git
|
||||
https://pypi.org/project/kpmx/
|
||||
|
||||
@@ -32,7 +32,7 @@
|
||||
rec {
|
||||
pkgmgr = pyPkgs.buildPythonApplication {
|
||||
pname = "package-manager";
|
||||
version = "1.6.2";
|
||||
version = "1.8.0";
|
||||
|
||||
# Use the git repo as source
|
||||
src = ./.;
|
||||
@@ -49,6 +49,7 @@
|
||||
# Runtime dependencies (matches [project.dependencies] in pyproject.toml)
|
||||
propagatedBuildInputs = [
|
||||
pyPkgs.pyyaml
|
||||
pyPkgs.jinja2
|
||||
pyPkgs.pip
|
||||
];
|
||||
|
||||
@@ -78,6 +79,7 @@
|
||||
pythonWithDeps = python.withPackages (ps: [
|
||||
ps.pip
|
||||
ps.pyyaml
|
||||
ps.jinja2
|
||||
]);
|
||||
in
|
||||
{
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Maintainer: Kevin Veen-Birkenbach <info@veen.world>
|
||||
|
||||
pkgname=package-manager
|
||||
pkgver=0.9.1
|
||||
pkgver=1.8.0
|
||||
pkgrel=1
|
||||
pkgdesc="Local-flake wrapper for Kevin's package-manager (Nix-based)."
|
||||
arch=('any')
|
||||
|
||||
@@ -1,3 +1,55 @@
|
||||
package-manager (1.8.0-1) unstable; urgency=medium
|
||||
|
||||
* *** New Features: ***
|
||||
- **Silent Updates**: You can now use the `--silent` flag during installs and updates to suppress error messages for individual repositories and get a single summary at the end. This ensures the process continues even if some repositories fail, while still preserving interactive checks when not in silent mode.
|
||||
- **Repository Scaffolding**: The process for creating new repositories has been improved. You can now use templates to scaffold repositories with a preview and automatic mirror setup.
|
||||
|
||||
*** Bug Fixes: ***
|
||||
- **Pip Installation**: Pip is now installed automatically on all supported systems. This includes `python-pip` for Arch and `python3-pip` for CentOS, Debian, Fedora, and Ubuntu, ensuring that pip is available for Python package installations.
|
||||
- **Pacman Keyring**: Fixed an issue on Arch Linux where package installation would fail due to missing keys. The pacman keyring is now properly initialized before installing packages.
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Mon, 15 Dec 2025 13:37:42 +0100
|
||||
|
||||
package-manager (1.7.2-1) unstable; urgency=medium
|
||||
|
||||
* * Git mirrors are now resolved consistently (origin → MIRRORS file → config → default).
|
||||
* The `origin` remote is always enforced to use the primary URL for both fetch and push.
|
||||
* Additional mirrors are added as extra push targets without duplication.
|
||||
* Local and remote mirror setup behaves more predictably and consistently.
|
||||
* Improved test coverage ensures stable origin and push URL handling.
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Mon, 15 Dec 2025 00:53:26 +0100
|
||||
|
||||
package-manager (1.7.1-1) unstable; urgency=medium
|
||||
|
||||
* Patched package-manager to kpmx to publish on pypi
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Sun, 14 Dec 2025 21:19:11 +0100
|
||||
|
||||
package-manager (1.7.0-1) unstable; urgency=medium
|
||||
|
||||
* * New *pkgmgr publish* command to publish repository artifacts to PyPI based on the *MIRRORS* file.
|
||||
* Automatically selects the current repository when no explicit selection is given.
|
||||
* Publishes only when a semantic version tag is present on *HEAD*; otherwise skips with a clear info message.
|
||||
* Supports non-interactive mode for CI environments via *--non-interactive*.
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Sun, 14 Dec 2025 21:10:06 +0100
|
||||
|
||||
package-manager (1.6.4-1) unstable; urgency=medium
|
||||
|
||||
* * Improved reliability of Nix installs and updates, including automatic resolution of profile conflicts and better handling of GitHub 403 rate limits.
|
||||
* More stable launcher behavior in packaged and virtual-env setups.
|
||||
* Enhanced mirror and remote handling: repository owner/name are derived from URLs, with smoother provisioning and clearer credential handling.
|
||||
* More reliable releases and artifacts due to safer CI behavior when no version tag is present.
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Sun, 14 Dec 2025 19:33:07 +0100
|
||||
|
||||
package-manager (1.6.3-1) unstable; urgency=medium
|
||||
|
||||
* ***Fixed:*** Corrected repository path resolution so release and version logic consistently use the canonical packaging/* layout, preventing changelog and packaging files from being read or updated from incorrect locations.
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Sun, 14 Dec 2025 13:39:52 +0100
|
||||
|
||||
package-manager (0.9.1-1) unstable; urgency=medium
|
||||
|
||||
* * Refactored installer: new `venv-create.sh`, cleaner root/user setup flow, updated README with architecture map.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
Name: package-manager
|
||||
Version: 0.9.1
|
||||
Version: 1.8.0
|
||||
Release: 1%{?dist}
|
||||
Summary: Wrapper that runs Kevin's package-manager via Nix flake
|
||||
|
||||
@@ -74,6 +74,40 @@ echo ">>> package-manager removed. Nix itself was not removed."
|
||||
/usr/lib/package-manager/
|
||||
|
||||
%changelog
|
||||
* Mon Dec 15 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.8.0-1
|
||||
- *** New Features: ***
|
||||
- **Silent Updates**: You can now use the `--silent` flag during installs and updates to suppress error messages for individual repositories and get a single summary at the end. This ensures the process continues even if some repositories fail, while still preserving interactive checks when not in silent mode.
|
||||
- **Repository Scaffolding**: The process for creating new repositories has been improved. You can now use templates to scaffold repositories with a preview and automatic mirror setup.
|
||||
|
||||
*** Bug Fixes: ***
|
||||
- **Pip Installation**: Pip is now installed automatically on all supported systems. This includes `python-pip` for Arch and `python3-pip` for CentOS, Debian, Fedora, and Ubuntu, ensuring that pip is available for Python package installations.
|
||||
- **Pacman Keyring**: Fixed an issue on Arch Linux where package installation would fail due to missing keys. The pacman keyring is now properly initialized before installing packages.
|
||||
|
||||
* Mon Dec 15 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.7.2-1
|
||||
- * Git mirrors are now resolved consistently (origin → MIRRORS file → config → default).
|
||||
* The `origin` remote is always enforced to use the primary URL for both fetch and push.
|
||||
* Additional mirrors are added as extra push targets without duplication.
|
||||
* Local and remote mirror setup behaves more predictably and consistently.
|
||||
* Improved test coverage ensures stable origin and push URL handling.
|
||||
|
||||
* Sun Dec 14 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.7.1-1
|
||||
- Patched package-manager to kpmx to publish on pypi
|
||||
|
||||
* Sun Dec 14 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.7.0-1
|
||||
- * New *pkgmgr publish* command to publish repository artifacts to PyPI based on the *MIRRORS* file.
|
||||
* Automatically selects the current repository when no explicit selection is given.
|
||||
* Publishes only when a semantic version tag is present on *HEAD*; otherwise skips with a clear info message.
|
||||
* Supports non-interactive mode for CI environments via *--non-interactive*.
|
||||
|
||||
* Sun Dec 14 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.6.4-1
|
||||
- * Improved reliability of Nix installs and updates, including automatic resolution of profile conflicts and better handling of GitHub 403 rate limits.
|
||||
* More stable launcher behavior in packaged and virtual-env setups.
|
||||
* Enhanced mirror and remote handling: repository owner/name are derived from URLs, with smoother provisioning and clearer credential handling.
|
||||
* More reliable releases and artifacts due to safer CI behavior when no version tag is present.
|
||||
|
||||
* Sun Dec 14 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.6.3-1
|
||||
- ***Fixed:*** Corrected repository path resolution so release and version logic consistently use the canonical packaging/* layout, preventing changelog and packaging files from being read or updated from incorrect locations.
|
||||
|
||||
* Wed Dec 10 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 0.9.1-1
|
||||
- * Refactored installer: new `venv-create.sh`, cleaner root/user setup flow, updated README with architecture map.
|
||||
* Split virgin tests into root/user workflows; stabilized Nix installer across distros; improved test scripts with dynamic distro selection and isolated Nix stores.
|
||||
|
||||
@@ -6,8 +6,8 @@ requires = [
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "package-manager"
|
||||
version = "1.6.2"
|
||||
name = "kpmx"
|
||||
version = "1.8.0"
|
||||
description = "Kevin's package-manager tool (pkgmgr)"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.9"
|
||||
@@ -21,6 +21,7 @@ authors = [
|
||||
dependencies = [
|
||||
"PyYAML>=6.0",
|
||||
"tomli; python_version < \"3.11\"",
|
||||
"jinja2>=3.1"
|
||||
]
|
||||
|
||||
[project.urls]
|
||||
|
||||
@@ -6,6 +6,13 @@ SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
echo "[arch/dependencies] Installing Arch build dependencies..."
|
||||
|
||||
pacman -Syu --noconfirm
|
||||
|
||||
if ! pacman-key --list-sigs &>/dev/null; then
|
||||
echo "[arch/dependencies] Initializing pacman keyring..."
|
||||
pacman-key --init
|
||||
pacman-key --populate archlinux
|
||||
fi
|
||||
|
||||
pacman -S --noconfirm --needed \
|
||||
base-devel \
|
||||
git \
|
||||
@@ -13,6 +20,7 @@ pacman -S --noconfirm --needed \
|
||||
curl \
|
||||
ca-certificates \
|
||||
python \
|
||||
python-pip \
|
||||
xz
|
||||
|
||||
pacman -Scc --noconfirm
|
||||
|
||||
@@ -14,6 +14,7 @@ dnf -y install \
|
||||
curl-minimal \
|
||||
ca-certificates \
|
||||
python3 \
|
||||
python3-pip \
|
||||
sudo \
|
||||
xz
|
||||
|
||||
|
||||
@@ -15,6 +15,7 @@ DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
|
||||
ca-certificates \
|
||||
python3 \
|
||||
python3-venv \
|
||||
python3-pip \
|
||||
xz-utils
|
||||
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
@@ -14,6 +14,7 @@ dnf -y install \
|
||||
curl \
|
||||
ca-certificates \
|
||||
python3 \
|
||||
python3-pip \
|
||||
xz
|
||||
|
||||
dnf clean all
|
||||
|
||||
@@ -17,6 +17,7 @@ DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
|
||||
make \
|
||||
python3 \
|
||||
python3-venv \
|
||||
python3-pip \
|
||||
ca-certificates \
|
||||
xz-utils
|
||||
|
||||
|
||||
@@ -2,6 +2,16 @@
|
||||
set -euo pipefail
|
||||
|
||||
FLAKE_DIR="/usr/lib/package-manager"
|
||||
NIX_LIB_DIR="${FLAKE_DIR}/nix/lib"
|
||||
RETRY_LIB="${NIX_LIB_DIR}/retry_403.sh"
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Hard requirement: retry helper must exist (fail if missing)
|
||||
# ---------------------------------------------------------------------------
|
||||
if [[ ! -f "${RETRY_LIB}" ]]; then
|
||||
echo "[launcher] ERROR: Required retry helper not found: ${RETRY_LIB}" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Try to ensure that "nix" is on PATH (common locations + container user)
|
||||
@@ -32,9 +42,13 @@ if ! command -v nix >/dev/null 2>&1; then
|
||||
fi
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Primary path: use Nix flake if available
|
||||
# Primary path: use Nix flake if available (with GitHub 403 retry)
|
||||
# ---------------------------------------------------------------------------
|
||||
if command -v nix >/dev/null 2>&1; then
|
||||
if declare -F run_with_github_403_retry >/dev/null; then
|
||||
# shellcheck source=./scripts/nix/lib/retry_403.sh
|
||||
source "${RETRY_LIB}"
|
||||
exec run_with_github_403_retry nix run "${FLAKE_DIR}#pkgmgr" -- "$@"
|
||||
else
|
||||
exec nix run "${FLAKE_DIR}#pkgmgr" -- "$@"
|
||||
fi
|
||||
|
||||
|
||||
@@ -1,32 +1,49 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
IMAGE="pkgmgr-$PKGMGR_DISTRO"
|
||||
IMAGE="pkgmgr-${PKGMGR_DISTRO}"
|
||||
|
||||
echo
|
||||
echo "------------------------------------------------------------"
|
||||
echo ">>> Testing VENV: $IMAGE"
|
||||
echo ">>> Testing VENV: ${IMAGE}"
|
||||
echo "------------------------------------------------------------"
|
||||
|
||||
echo "[test-env-virtual] Inspect image metadata:"
|
||||
docker image inspect "$IMAGE" | sed -n '1,40p'
|
||||
|
||||
echo "[test-env-virtual] Running: docker run --rm --entrypoint pkgmgr $IMAGE --help"
|
||||
docker image inspect "${IMAGE}" | sed -n '1,40p'
|
||||
echo
|
||||
|
||||
# Run the command and capture the output
|
||||
# ------------------------------------------------------------
|
||||
# Run VENV-based pkgmgr test inside container
|
||||
# ------------------------------------------------------------
|
||||
if OUTPUT=$(docker run --rm \
|
||||
-e REINSTALL_PKGMGR=1 \
|
||||
-v "pkgmgr_nix_store_${PKGMGR_DISTRO}:/nix" \
|
||||
-v "$(pwd):/src" \
|
||||
-v "pkgmgr_nix_cache_${PKGMGR_DISTRO}:/root/.cache/nix" \
|
||||
"$IMAGE" 2>&1); then
|
||||
-e REINSTALL_PKGMGR=1 \
|
||||
-v "$(pwd):/src" \
|
||||
-w /src \
|
||||
"${IMAGE}" \
|
||||
bash -lc '
|
||||
set -euo pipefail
|
||||
|
||||
echo "[test-env-virtual] Installing pkgmgr (distro package)..."
|
||||
make install
|
||||
|
||||
echo "[test-env-virtual] Setting up Python venv..."
|
||||
make setup-venv
|
||||
|
||||
echo "[test-env-virtual] Activating venv..."
|
||||
. "$HOME/.venvs/pkgmgr/bin/activate"
|
||||
|
||||
echo "[test-env-virtual] Using pkgmgr from:"
|
||||
command -v pkgmgr
|
||||
pkgmgr --help
|
||||
' 2>&1); then
|
||||
|
||||
echo "$OUTPUT"
|
||||
echo
|
||||
echo "[test-env-virtual] SUCCESS: $IMAGE responded to 'pkgmgr --help'"
|
||||
echo "[test-env-virtual] SUCCESS: venv-based pkgmgr works in ${IMAGE}"
|
||||
|
||||
else
|
||||
echo "$OUTPUT"
|
||||
echo
|
||||
echo "[test-env-virtual] ERROR: $IMAGE failed to run 'pkgmgr --help'"
|
||||
echo "[test-env-virtual] ERROR: venv-based pkgmgr failed in ${IMAGE}"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
@@ -16,7 +16,7 @@ Responsibilities:
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from typing import Any, Dict, List, Optional
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
|
||||
from pkgmgr.core.repository.identifier import get_repo_identifier
|
||||
from pkgmgr.core.repository.dir import get_repo_dir
|
||||
@@ -93,6 +93,7 @@ def _verify_repo(
|
||||
repo_dir: str,
|
||||
no_verification: bool,
|
||||
identifier: str,
|
||||
silent: bool,
|
||||
) -> bool:
|
||||
"""
|
||||
Verify a repository using the configured verification data.
|
||||
@@ -111,10 +112,15 @@ def _verify_repo(
|
||||
print(f"Warning: Verification failed for {identifier}:")
|
||||
for err in errors:
|
||||
print(f" - {err}")
|
||||
choice = input("Continue anyway? [y/N]: ").strip().lower()
|
||||
if choice != "y":
|
||||
print(f"Skipping installation for {identifier}.")
|
||||
return False
|
||||
|
||||
if silent:
|
||||
# Non-interactive mode: continue with a warning.
|
||||
print(f"[Warning] Continuing despite verification failure for {identifier} (--silent).")
|
||||
else:
|
||||
choice = input("Continue anyway? [y/N]: ").strip().lower()
|
||||
if choice != "y":
|
||||
print(f"Skipping installation for {identifier}.")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@@ -163,6 +169,8 @@ def install_repos(
|
||||
clone_mode: str,
|
||||
update_dependencies: bool,
|
||||
force_update: bool = False,
|
||||
silent: bool = False,
|
||||
emit_summary: bool = True,
|
||||
) -> None:
|
||||
"""
|
||||
Install one or more repositories according to the configured installers
|
||||
@@ -170,45 +178,72 @@ def install_repos(
|
||||
|
||||
If force_update=True, installers of the currently active layer are allowed
|
||||
to run again (upgrade/refresh), even if that layer is already loaded.
|
||||
|
||||
If silent=True, repository failures are downgraded to warnings and the
|
||||
overall command never exits non-zero because of per-repository failures.
|
||||
"""
|
||||
pipeline = InstallationPipeline(INSTALLERS)
|
||||
failures: List[Tuple[str, str]] = []
|
||||
|
||||
for repo in selected_repos:
|
||||
identifier = get_repo_identifier(repo, all_repos)
|
||||
|
||||
repo_dir = _ensure_repo_dir(
|
||||
repo=repo,
|
||||
repositories_base_dir=repositories_base_dir,
|
||||
all_repos=all_repos,
|
||||
preview=preview,
|
||||
no_verification=no_verification,
|
||||
clone_mode=clone_mode,
|
||||
identifier=identifier,
|
||||
)
|
||||
if not repo_dir:
|
||||
try:
|
||||
repo_dir = _ensure_repo_dir(
|
||||
repo=repo,
|
||||
repositories_base_dir=repositories_base_dir,
|
||||
all_repos=all_repos,
|
||||
preview=preview,
|
||||
no_verification=no_verification,
|
||||
clone_mode=clone_mode,
|
||||
identifier=identifier,
|
||||
)
|
||||
if not repo_dir:
|
||||
failures.append((identifier, "clone/ensure repo directory failed"))
|
||||
continue
|
||||
|
||||
if not _verify_repo(
|
||||
repo=repo,
|
||||
repo_dir=repo_dir,
|
||||
no_verification=no_verification,
|
||||
identifier=identifier,
|
||||
silent=silent,
|
||||
):
|
||||
continue
|
||||
|
||||
ctx = _create_context(
|
||||
repo=repo,
|
||||
identifier=identifier,
|
||||
repo_dir=repo_dir,
|
||||
repositories_base_dir=repositories_base_dir,
|
||||
bin_dir=bin_dir,
|
||||
all_repos=all_repos,
|
||||
no_verification=no_verification,
|
||||
preview=preview,
|
||||
quiet=quiet,
|
||||
clone_mode=clone_mode,
|
||||
update_dependencies=update_dependencies,
|
||||
force_update=force_update,
|
||||
)
|
||||
|
||||
pipeline.run(ctx)
|
||||
|
||||
except SystemExit as exc:
|
||||
code = exc.code if isinstance(exc.code, int) else str(exc.code)
|
||||
failures.append((identifier, f"installer failed (exit={code})"))
|
||||
if not quiet:
|
||||
print(f"[Warning] install: repository {identifier} failed (exit={code}). Continuing...")
|
||||
continue
|
||||
except Exception as exc:
|
||||
failures.append((identifier, f"unexpected error: {exc}"))
|
||||
if not quiet:
|
||||
print(f"[Warning] install: repository {identifier} hit an unexpected error: {exc}. Continuing...")
|
||||
continue
|
||||
|
||||
if not _verify_repo(
|
||||
repo=repo,
|
||||
repo_dir=repo_dir,
|
||||
no_verification=no_verification,
|
||||
identifier=identifier,
|
||||
):
|
||||
continue
|
||||
if failures and emit_summary and not quiet:
|
||||
print("\n[pkgmgr] Installation finished with warnings:")
|
||||
for ident, msg in failures:
|
||||
print(f" - {ident}: {msg}")
|
||||
|
||||
ctx = _create_context(
|
||||
repo=repo,
|
||||
identifier=identifier,
|
||||
repo_dir=repo_dir,
|
||||
repositories_base_dir=repositories_base_dir,
|
||||
bin_dir=bin_dir,
|
||||
all_repos=all_repos,
|
||||
no_verification=no_verification,
|
||||
preview=preview,
|
||||
quiet=quiet,
|
||||
clone_mode=clone_mode,
|
||||
update_dependencies=update_dependencies,
|
||||
force_update=force_update,
|
||||
)
|
||||
|
||||
pipeline.run(ctx)
|
||||
if failures and not silent:
|
||||
raise SystemExit(1)
|
||||
|
||||
100
src/pkgmgr/actions/install/installers/nix/conflicts.py
Normal file
100
src/pkgmgr/actions/install/installers/nix/conflicts.py
Normal file
@@ -0,0 +1,100 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, List
|
||||
|
||||
from .profile import NixProfileInspector
|
||||
from .retry import GitHubRateLimitRetry
|
||||
from .runner import CommandRunner
|
||||
from .textparse import NixConflictTextParser
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pkgmgr.actions.install.context import RepoContext
|
||||
|
||||
|
||||
class NixConflictResolver:
|
||||
"""
|
||||
Resolves nix profile file conflicts by:
|
||||
1. Parsing conflicting store paths from stderr
|
||||
2. Mapping them to profile remove tokens via `nix profile list --json`
|
||||
3. Removing those tokens deterministically
|
||||
4. Retrying install
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
runner: CommandRunner,
|
||||
retry: GitHubRateLimitRetry,
|
||||
profile: NixProfileInspector,
|
||||
) -> None:
|
||||
self._runner = runner
|
||||
self._retry = retry
|
||||
self._profile = profile
|
||||
self._parser = NixConflictTextParser()
|
||||
|
||||
def resolve(
|
||||
self,
|
||||
ctx: "RepoContext",
|
||||
install_cmd: str,
|
||||
stdout: str,
|
||||
stderr: str,
|
||||
*,
|
||||
output: str,
|
||||
max_rounds: int = 10,
|
||||
) -> bool:
|
||||
quiet = bool(getattr(ctx, "quiet", False))
|
||||
combined = f"{stdout}\n{stderr}"
|
||||
|
||||
for _ in range(max_rounds):
|
||||
# 1) Extract conflicting store prefixes from nix error output
|
||||
store_prefixes = self._parser.existing_store_prefixes(combined)
|
||||
|
||||
# 2) Resolve them to concrete remove tokens
|
||||
tokens: List[str] = self._profile.find_remove_tokens_for_store_prefixes(
|
||||
ctx,
|
||||
self._runner,
|
||||
store_prefixes,
|
||||
)
|
||||
|
||||
# 3) Fallback: output-name based lookup (also covers nix suggesting: `nix profile remove pkgmgr`)
|
||||
if not tokens:
|
||||
tokens = self._profile.find_remove_tokens_for_output(ctx, self._runner, output)
|
||||
|
||||
if tokens:
|
||||
if not quiet:
|
||||
print(
|
||||
"[nix] conflict detected; removing existing profile entries: "
|
||||
+ ", ".join(tokens)
|
||||
)
|
||||
|
||||
for t in tokens:
|
||||
# tokens may contain things like "pkgmgr" or "pkgmgr-1" or quoted tokens (we keep raw)
|
||||
self._runner.run(ctx, f"nix profile remove {t}", allow_failure=True)
|
||||
|
||||
res = self._retry.run_with_retry(ctx, self._runner, install_cmd)
|
||||
if res.returncode == 0:
|
||||
return True
|
||||
|
||||
combined = f"{res.stdout}\n{res.stderr}"
|
||||
continue
|
||||
|
||||
# 4) Last-resort fallback: use textual remove tokens from stderr (“nix profile remove X”)
|
||||
tokens = self._parser.remove_tokens(combined)
|
||||
if tokens:
|
||||
if not quiet:
|
||||
print("[nix] fallback remove tokens: " + ", ".join(tokens))
|
||||
|
||||
for t in tokens:
|
||||
self._runner.run(ctx, f"nix profile remove {t}", allow_failure=True)
|
||||
|
||||
res = self._retry.run_with_retry(ctx, self._runner, install_cmd)
|
||||
if res.returncode == 0:
|
||||
return True
|
||||
|
||||
combined = f"{res.stdout}\n{res.stderr}"
|
||||
continue
|
||||
|
||||
if not quiet:
|
||||
print("[nix] conflict detected but could not resolve profile entries to remove.")
|
||||
return False
|
||||
|
||||
return False
|
||||
@@ -1,12 +1,12 @@
|
||||
# src/pkgmgr/actions/install/installers/nix/installer.py
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import shutil
|
||||
from typing import List, Tuple, TYPE_CHECKING
|
||||
from typing import TYPE_CHECKING, List, Tuple
|
||||
|
||||
from pkgmgr.actions.install.installers.base import BaseInstaller
|
||||
|
||||
from .conflicts import NixConflictResolver
|
||||
from .profile import NixProfileInspector
|
||||
from .retry import GitHubRateLimitRetry, RetryPolicy
|
||||
from .runner import CommandRunner
|
||||
@@ -14,6 +14,7 @@ from .runner import CommandRunner
|
||||
if TYPE_CHECKING:
|
||||
from pkgmgr.actions.install.context import RepoContext
|
||||
|
||||
|
||||
class NixFlakeInstaller(BaseInstaller):
|
||||
layer = "nix"
|
||||
FLAKE_FILE = "flake.nix"
|
||||
@@ -22,15 +23,18 @@ class NixFlakeInstaller(BaseInstaller):
|
||||
self._runner = CommandRunner()
|
||||
self._retry = GitHubRateLimitRetry(policy=policy)
|
||||
self._profile = NixProfileInspector()
|
||||
self._conflicts = NixConflictResolver(self._runner, self._retry, self._profile)
|
||||
|
||||
# ------------------------------------------------------------------ #
|
||||
# Compatibility: supports()
|
||||
# ------------------------------------------------------------------ #
|
||||
# Newer nix rejects numeric indices; we learn this at runtime and cache the decision.
|
||||
self._indices_supported: bool | None = None
|
||||
|
||||
def supports(self, ctx: "RepoContext") -> bool:
|
||||
if os.environ.get("PKGMGR_DISABLE_NIX_FLAKE_INSTALLER") == "1":
|
||||
if not ctx.quiet:
|
||||
print("[INFO] PKGMGR_DISABLE_NIX_FLAKE_INSTALLER=1 – skipping NixFlakeInstaller.")
|
||||
print(
|
||||
"[INFO] PKGMGR_DISABLE_NIX_FLAKE_INSTALLER=1 – "
|
||||
"skipping NixFlakeInstaller."
|
||||
)
|
||||
return False
|
||||
|
||||
if shutil.which("nix") is None:
|
||||
@@ -38,20 +42,12 @@ class NixFlakeInstaller(BaseInstaller):
|
||||
|
||||
return os.path.exists(os.path.join(ctx.repo_dir, self.FLAKE_FILE))
|
||||
|
||||
# ------------------------------------------------------------------ #
|
||||
# Compatibility: output selection
|
||||
# ------------------------------------------------------------------ #
|
||||
|
||||
def _profile_outputs(self, ctx: "RepoContext") -> List[Tuple[str, bool]]:
|
||||
# (output_name, allow_failure)
|
||||
if ctx.identifier in {"pkgmgr", "package-manager"}:
|
||||
return [("pkgmgr", False), ("default", True)]
|
||||
return [("default", False)]
|
||||
|
||||
# ------------------------------------------------------------------ #
|
||||
# Compatibility: run()
|
||||
# ------------------------------------------------------------------ #
|
||||
|
||||
def run(self, ctx: "RepoContext") -> None:
|
||||
if not self.supports(ctx):
|
||||
return
|
||||
@@ -59,11 +55,12 @@ class NixFlakeInstaller(BaseInstaller):
|
||||
outputs = self._profile_outputs(ctx)
|
||||
|
||||
if not ctx.quiet:
|
||||
print(
|
||||
msg = (
|
||||
"[nix] flake detected in "
|
||||
f"{ctx.identifier}, ensuring outputs: "
|
||||
+ ", ".join(name for name, _ in outputs)
|
||||
)
|
||||
print(msg)
|
||||
|
||||
for output, allow_failure in outputs:
|
||||
if ctx.force_update:
|
||||
@@ -71,13 +68,13 @@ class NixFlakeInstaller(BaseInstaller):
|
||||
else:
|
||||
self._install_only(ctx, output, allow_failure)
|
||||
|
||||
# ------------------------------------------------------------------ #
|
||||
# Core logic (unchanged semantics)
|
||||
# ------------------------------------------------------------------ #
|
||||
|
||||
def _installable(self, ctx: "RepoContext", output: str) -> str:
|
||||
return f"{ctx.repo_dir}#{output}"
|
||||
|
||||
# ---------------------------------------------------------------------
|
||||
# Core install path
|
||||
# ---------------------------------------------------------------------
|
||||
|
||||
def _install_only(self, ctx: "RepoContext", output: str, allow_failure: bool) -> None:
|
||||
install_cmd = f"nix profile install {self._installable(ctx, output)}"
|
||||
|
||||
@@ -85,35 +82,56 @@ class NixFlakeInstaller(BaseInstaller):
|
||||
print(f"[nix] install: {install_cmd}")
|
||||
|
||||
res = self._retry.run_with_retry(ctx, self._runner, install_cmd)
|
||||
|
||||
if res.returncode == 0:
|
||||
if not ctx.quiet:
|
||||
print(f"[nix] output '{output}' successfully installed.")
|
||||
return
|
||||
|
||||
# Conflict resolver first (handles the common “existing package already provides file” case)
|
||||
if self._conflicts.resolve(
|
||||
ctx,
|
||||
install_cmd,
|
||||
res.stdout,
|
||||
res.stderr,
|
||||
output=output,
|
||||
):
|
||||
if not ctx.quiet:
|
||||
print(f"[nix] output '{output}' successfully installed after conflict cleanup.")
|
||||
return
|
||||
|
||||
if not ctx.quiet:
|
||||
print(
|
||||
f"[nix] install failed for '{output}' (exit {res.returncode}), "
|
||||
"trying index-based upgrade/remove+install..."
|
||||
"trying upgrade/remove+install..."
|
||||
)
|
||||
|
||||
indices = self._profile.find_installed_indices_for_output(ctx, self._runner, output)
|
||||
# If indices are supported, try legacy index-upgrade path.
|
||||
if self._indices_supported is not False:
|
||||
indices = self._profile.find_installed_indices_for_output(ctx, self._runner, output)
|
||||
|
||||
upgraded = False
|
||||
for idx in indices:
|
||||
if self._upgrade_index(ctx, idx):
|
||||
upgraded = True
|
||||
if not ctx.quiet:
|
||||
print(f"[nix] output '{output}' successfully upgraded (index {idx}).")
|
||||
upgraded = False
|
||||
for idx in indices:
|
||||
if self._upgrade_index(ctx, idx):
|
||||
upgraded = True
|
||||
if not ctx.quiet:
|
||||
print(f"[nix] output '{output}' successfully upgraded (index {idx}).")
|
||||
|
||||
if upgraded:
|
||||
return
|
||||
if upgraded:
|
||||
return
|
||||
|
||||
if indices and not ctx.quiet:
|
||||
print(f"[nix] upgrade failed; removing indices {indices} and reinstalling '{output}'.")
|
||||
if indices and not ctx.quiet:
|
||||
print(f"[nix] upgrade failed; removing indices {indices} and reinstalling '{output}'.")
|
||||
|
||||
for idx in indices:
|
||||
self._remove_index(ctx, idx)
|
||||
for idx in indices:
|
||||
self._remove_index(ctx, idx)
|
||||
|
||||
# If we learned indices are unsupported, immediately fall back below
|
||||
if self._indices_supported is False:
|
||||
self._remove_tokens_for_output(ctx, output)
|
||||
|
||||
else:
|
||||
# indices explicitly unsupported
|
||||
self._remove_tokens_for_output(ctx, output)
|
||||
|
||||
final = self._runner.run(ctx, install_cmd, allow_failure=True)
|
||||
if final.returncode == 0:
|
||||
@@ -122,17 +140,24 @@ class NixFlakeInstaller(BaseInstaller):
|
||||
return
|
||||
|
||||
print(f"[ERROR] Failed to install Nix flake output '{output}' (exit {final.returncode})")
|
||||
|
||||
if not allow_failure:
|
||||
raise SystemExit(final.returncode)
|
||||
|
||||
print(f"[WARNING] Continuing despite failure of optional output '{output}'.")
|
||||
|
||||
# ------------------------------------------------------------------ #
|
||||
# force_update path (unchanged semantics)
|
||||
# ------------------------------------------------------------------ #
|
||||
# ---------------------------------------------------------------------
|
||||
# force_update path
|
||||
# ---------------------------------------------------------------------
|
||||
|
||||
def _force_upgrade_output(self, ctx: "RepoContext", output: str, allow_failure: bool) -> None:
|
||||
# Prefer token path if indices unsupported (new nix)
|
||||
if self._indices_supported is False:
|
||||
self._remove_tokens_for_output(ctx, output)
|
||||
self._install_only(ctx, output, allow_failure)
|
||||
if not ctx.quiet:
|
||||
print(f"[nix] output '{output}' successfully upgraded.")
|
||||
return
|
||||
|
||||
indices = self._profile.find_installed_indices_for_output(ctx, self._runner, output)
|
||||
|
||||
upgraded_any = False
|
||||
@@ -143,7 +168,8 @@ class NixFlakeInstaller(BaseInstaller):
|
||||
print(f"[nix] output '{output}' successfully upgraded (index {idx}).")
|
||||
|
||||
if upgraded_any:
|
||||
print(f"[nix] output '{output}' successfully upgraded.")
|
||||
if not ctx.quiet:
|
||||
print(f"[nix] output '{output}' successfully upgraded.")
|
||||
return
|
||||
|
||||
if indices and not ctx.quiet:
|
||||
@@ -152,17 +178,52 @@ class NixFlakeInstaller(BaseInstaller):
|
||||
for idx in indices:
|
||||
self._remove_index(ctx, idx)
|
||||
|
||||
# If we learned indices are unsupported, also remove by token to actually clear conflicts
|
||||
if self._indices_supported is False:
|
||||
self._remove_tokens_for_output(ctx, output)
|
||||
|
||||
self._install_only(ctx, output, allow_failure)
|
||||
|
||||
print(f"[nix] output '{output}' successfully upgraded.")
|
||||
if not ctx.quiet:
|
||||
print(f"[nix] output '{output}' successfully upgraded.")
|
||||
|
||||
# ------------------------------------------------------------------ #
|
||||
# ---------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ------------------------------------------------------------------ #
|
||||
# ---------------------------------------------------------------------
|
||||
|
||||
def _stderr_says_indices_unsupported(self, stderr: str) -> bool:
|
||||
s = (stderr or "").lower()
|
||||
return "no longer supports indices" in s or "does not support indices" in s
|
||||
|
||||
def _upgrade_index(self, ctx: "RepoContext", idx: int) -> bool:
|
||||
res = self._runner.run(ctx, f"nix profile upgrade --refresh {idx}", allow_failure=True)
|
||||
cmd = f"nix profile upgrade --refresh {idx}"
|
||||
res = self._runner.run(ctx, cmd, allow_failure=True)
|
||||
|
||||
if self._stderr_says_indices_unsupported(getattr(res, "stderr", "")):
|
||||
self._indices_supported = False
|
||||
return False
|
||||
|
||||
if self._indices_supported is None:
|
||||
self._indices_supported = True
|
||||
|
||||
return res.returncode == 0
|
||||
|
||||
def _remove_index(self, ctx: "RepoContext", idx: int) -> None:
|
||||
self._runner.run(ctx, f"nix profile remove {idx}", allow_failure=True)
|
||||
res = self._runner.run(ctx, f"nix profile remove {idx}", allow_failure=True)
|
||||
|
||||
if self._stderr_says_indices_unsupported(getattr(res, "stderr", "")):
|
||||
self._indices_supported = False
|
||||
|
||||
if self._indices_supported is None:
|
||||
self._indices_supported = True
|
||||
|
||||
def _remove_tokens_for_output(self, ctx: "RepoContext", output: str) -> None:
|
||||
tokens = self._profile.find_remove_tokens_for_output(ctx, self._runner, output)
|
||||
if not tokens:
|
||||
return
|
||||
|
||||
if not ctx.quiet:
|
||||
print(f"[nix] indices unsupported; removing by token(s): {', '.join(tokens)}")
|
||||
|
||||
for t in tokens:
|
||||
self._runner.run(ctx, f"nix profile remove {t}", allow_failure=True)
|
||||
|
||||
@@ -1,71 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from typing import Any, List, TYPE_CHECKING
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pkgmgr.actions.install.context import RepoContext
|
||||
from .runner import CommandRunner
|
||||
|
||||
class NixProfileInspector:
|
||||
"""
|
||||
Reads and interprets `nix profile list --json` and provides helpers for
|
||||
finding indices matching a given output name.
|
||||
"""
|
||||
|
||||
def find_installed_indices_for_output(self, ctx: "RepoContext", runner: "CommandRunner", output: str) -> List[int]:
|
||||
res = runner.run(ctx, "nix profile list --json", allow_failure=True)
|
||||
if res.returncode != 0:
|
||||
return []
|
||||
|
||||
try:
|
||||
data = json.loads(res.stdout or "{}")
|
||||
except json.JSONDecodeError:
|
||||
return []
|
||||
|
||||
indices: List[int] = []
|
||||
|
||||
elements = data.get("elements")
|
||||
if isinstance(elements, dict):
|
||||
for idx_str, elem in elements.items():
|
||||
try:
|
||||
idx = int(idx_str)
|
||||
except (TypeError, ValueError):
|
||||
continue
|
||||
if self._element_matches_output(elem, output):
|
||||
indices.append(idx)
|
||||
return sorted(indices)
|
||||
|
||||
if isinstance(elements, list):
|
||||
for elem in elements:
|
||||
idx = elem.get("index") if isinstance(elem, dict) else None
|
||||
if isinstance(idx, int) and self._element_matches_output(elem, output):
|
||||
indices.append(idx)
|
||||
return sorted(indices)
|
||||
|
||||
return []
|
||||
|
||||
@staticmethod
|
||||
def element_matches_output(elem: Any, output: str) -> bool:
|
||||
return NixProfileInspector._element_matches_output(elem, output)
|
||||
|
||||
@staticmethod
|
||||
def _element_matches_output(elem: Any, output: str) -> bool:
|
||||
out = (output or "").strip()
|
||||
if not out or not isinstance(elem, dict):
|
||||
return False
|
||||
|
||||
candidates: List[str] = []
|
||||
for k in ("attrPath", "originalUrl", "url", "storePath", "name"):
|
||||
v = elem.get(k)
|
||||
if isinstance(v, str) and v:
|
||||
candidates.append(v)
|
||||
|
||||
for c in candidates:
|
||||
if c == out:
|
||||
return True
|
||||
if f"#{out}" in c:
|
||||
return True
|
||||
|
||||
return False
|
||||
@@ -0,0 +1,4 @@
|
||||
from .inspector import NixProfileInspector
|
||||
from .models import NixProfileEntry
|
||||
|
||||
__all__ = ["NixProfileInspector", "NixProfileEntry"]
|
||||
162
src/pkgmgr/actions/install/installers/nix/profile/inspector.py
Normal file
162
src/pkgmgr/actions/install/installers/nix/profile/inspector.py
Normal file
@@ -0,0 +1,162 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, List, TYPE_CHECKING
|
||||
|
||||
from .matcher import (
|
||||
entry_matches_output,
|
||||
entry_matches_store_path,
|
||||
stable_unique_ints,
|
||||
)
|
||||
from .normalizer import normalize_elements
|
||||
from .parser import parse_profile_list_json
|
||||
from .result import extract_stdout_text
|
||||
|
||||
if TYPE_CHECKING:
|
||||
# Keep these as TYPE_CHECKING-only to avoid runtime import cycles.
|
||||
from pkgmgr.actions.install.context import RepoContext
|
||||
from pkgmgr.core.command.runner import CommandRunner
|
||||
|
||||
|
||||
class NixProfileInspector:
|
||||
"""
|
||||
Reads and inspects the user's Nix profile list (JSON).
|
||||
|
||||
Public API:
|
||||
- list_json()
|
||||
- find_installed_indices_for_output() (legacy; may not work on newer nix)
|
||||
- find_indices_by_store_path() (legacy; may not work on newer nix)
|
||||
- find_remove_tokens_for_output()
|
||||
- find_remove_tokens_for_store_prefixes()
|
||||
"""
|
||||
|
||||
def list_json(self, ctx: "RepoContext", runner: "CommandRunner") -> dict[str, Any]:
|
||||
res = runner.run(ctx, "nix profile list --json", allow_failure=False)
|
||||
raw = extract_stdout_text(res)
|
||||
return parse_profile_list_json(raw)
|
||||
|
||||
# ---------------------------------------------------------------------
|
||||
# Legacy index helpers (still useful on older nix; newer nix may reject indices)
|
||||
# ---------------------------------------------------------------------
|
||||
|
||||
def find_installed_indices_for_output(
|
||||
self,
|
||||
ctx: "RepoContext",
|
||||
runner: "CommandRunner",
|
||||
output: str,
|
||||
) -> List[int]:
|
||||
data = self.list_json(ctx, runner)
|
||||
entries = normalize_elements(data)
|
||||
|
||||
hits: List[int] = []
|
||||
for e in entries:
|
||||
if e.index is None:
|
||||
continue
|
||||
if entry_matches_output(e, output):
|
||||
hits.append(e.index)
|
||||
|
||||
return stable_unique_ints(hits)
|
||||
|
||||
def find_indices_by_store_path(
|
||||
self,
|
||||
ctx: "RepoContext",
|
||||
runner: "CommandRunner",
|
||||
store_path: str,
|
||||
) -> List[int]:
|
||||
needle = (store_path or "").strip()
|
||||
if not needle:
|
||||
return []
|
||||
|
||||
data = self.list_json(ctx, runner)
|
||||
entries = normalize_elements(data)
|
||||
|
||||
hits: List[int] = []
|
||||
for e in entries:
|
||||
if e.index is None:
|
||||
continue
|
||||
if entry_matches_store_path(e, needle):
|
||||
hits.append(e.index)
|
||||
|
||||
return stable_unique_ints(hits)
|
||||
|
||||
# ---------------------------------------------------------------------
|
||||
# New token-based helpers (works with newer nix where indices are rejected)
|
||||
# ---------------------------------------------------------------------
|
||||
|
||||
def find_remove_tokens_for_output(
|
||||
self,
|
||||
ctx: "RepoContext",
|
||||
runner: "CommandRunner",
|
||||
output: str,
|
||||
) -> List[str]:
|
||||
"""
|
||||
Returns profile remove tokens to remove entries matching a given output.
|
||||
|
||||
We always include the raw output token first because nix itself suggests:
|
||||
nix profile remove pkgmgr
|
||||
"""
|
||||
out = (output or "").strip()
|
||||
if not out:
|
||||
return []
|
||||
|
||||
data = self.list_json(ctx, runner)
|
||||
entries = normalize_elements(data)
|
||||
|
||||
tokens: List[str] = [out] # critical: matches nix's own suggestion for conflicts
|
||||
|
||||
for e in entries:
|
||||
if entry_matches_output(e, out):
|
||||
# Prefer removing by key/name (non-index) when possible.
|
||||
# New nix rejects numeric indices; these tokens are safer.
|
||||
k = (e.key or "").strip()
|
||||
n = (e.name or "").strip()
|
||||
|
||||
if k and not k.isdigit():
|
||||
tokens.append(k)
|
||||
elif n and not n.isdigit():
|
||||
tokens.append(n)
|
||||
|
||||
# stable unique preserving order
|
||||
seen: set[str] = set()
|
||||
uniq: List[str] = []
|
||||
for t in tokens:
|
||||
if t and t not in seen:
|
||||
uniq.append(t)
|
||||
seen.add(t)
|
||||
return uniq
|
||||
|
||||
def find_remove_tokens_for_store_prefixes(
|
||||
self,
|
||||
ctx: "RepoContext",
|
||||
runner: "CommandRunner",
|
||||
prefixes: List[str],
|
||||
) -> List[str]:
|
||||
"""
|
||||
Returns remove tokens for entries whose store path matches any prefix.
|
||||
"""
|
||||
prefixes = [(p or "").strip() for p in (prefixes or []) if p]
|
||||
prefixes = [p for p in prefixes if p]
|
||||
if not prefixes:
|
||||
return []
|
||||
|
||||
data = self.list_json(ctx, runner)
|
||||
entries = normalize_elements(data)
|
||||
|
||||
tokens: List[str] = []
|
||||
for e in entries:
|
||||
if not e.store_paths:
|
||||
continue
|
||||
if any(sp == p for sp in e.store_paths for p in prefixes):
|
||||
k = (e.key or "").strip()
|
||||
n = (e.name or "").strip()
|
||||
if k and not k.isdigit():
|
||||
tokens.append(k)
|
||||
elif n and not n.isdigit():
|
||||
tokens.append(n)
|
||||
|
||||
seen: set[str] = set()
|
||||
uniq: List[str] = []
|
||||
for t in tokens:
|
||||
if t and t not in seen:
|
||||
uniq.append(t)
|
||||
seen.add(t)
|
||||
return uniq
|
||||
62
src/pkgmgr/actions/install/installers/nix/profile/matcher.py
Normal file
62
src/pkgmgr/actions/install/installers/nix/profile/matcher.py
Normal file
@@ -0,0 +1,62 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import List
|
||||
|
||||
from .models import NixProfileEntry
|
||||
|
||||
|
||||
def entry_matches_output(entry: NixProfileEntry, output: str) -> bool:
|
||||
"""
|
||||
Heuristic matcher: output is typically a flake output name (e.g. "pkgmgr"),
|
||||
and we match against name/attrPath patterns.
|
||||
"""
|
||||
out = (output or "").strip()
|
||||
if not out:
|
||||
return False
|
||||
|
||||
candidates = [entry.name, entry.attr_path]
|
||||
|
||||
for c in candidates:
|
||||
c = (c or "").strip()
|
||||
if not c:
|
||||
continue
|
||||
|
||||
# Direct match
|
||||
if c == out:
|
||||
return True
|
||||
|
||||
# AttrPath contains "#<output>"
|
||||
if f"#{out}" in c:
|
||||
return True
|
||||
|
||||
# AttrPath ends with ".<output>"
|
||||
if c.endswith(f".{out}"):
|
||||
return True
|
||||
|
||||
# Name pattern "<output>-<n>" (common, e.g. pkgmgr-1)
|
||||
if c.startswith(f"{out}-"):
|
||||
return True
|
||||
|
||||
# Historical special case: repo is "package-manager" but output is "pkgmgr"
|
||||
if out == "pkgmgr" and c.startswith("package-manager-"):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def entry_matches_store_path(entry: NixProfileEntry, store_path: str) -> bool:
|
||||
needle = (store_path or "").strip()
|
||||
if not needle:
|
||||
return False
|
||||
return any((p or "") == needle for p in entry.store_paths)
|
||||
|
||||
|
||||
def stable_unique_ints(values: List[int]) -> List[int]:
|
||||
seen: set[int] = set()
|
||||
uniq: List[int] = []
|
||||
for v in values:
|
||||
if v in seen:
|
||||
continue
|
||||
uniq.append(v)
|
||||
seen.add(v)
|
||||
return uniq
|
||||
17
src/pkgmgr/actions/install/installers/nix/profile/models.py
Normal file
17
src/pkgmgr/actions/install/installers/nix/profile/models.py
Normal file
@@ -0,0 +1,17 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import List, Optional
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class NixProfileEntry:
|
||||
"""
|
||||
Minimal normalized representation of one nix profile element entry.
|
||||
"""
|
||||
|
||||
key: str
|
||||
index: Optional[int]
|
||||
name: str
|
||||
attr_path: str
|
||||
store_paths: List[str]
|
||||
128
src/pkgmgr/actions/install/installers/nix/profile/normalizer.py
Normal file
128
src/pkgmgr/actions/install/installers/nix/profile/normalizer.py
Normal file
@@ -0,0 +1,128 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
from typing import Any, Dict, Iterable, List, Optional
|
||||
|
||||
from .models import NixProfileEntry
|
||||
|
||||
|
||||
def coerce_index(key: str, entry: Dict[str, Any]) -> Optional[int]:
|
||||
"""
|
||||
Nix JSON schema varies:
|
||||
- elements keys might be "0", "1", ...
|
||||
- or might be names like "pkgmgr-1"
|
||||
Some versions include an explicit index field.
|
||||
We try safe options in order.
|
||||
"""
|
||||
k = (key or "").strip()
|
||||
|
||||
# 1) Classic: numeric keys
|
||||
if k.isdigit():
|
||||
try:
|
||||
return int(k)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
# 2) Explicit index fields (schema-dependent)
|
||||
for field in ("index", "id", "position"):
|
||||
v = entry.get(field)
|
||||
if isinstance(v, int):
|
||||
return v
|
||||
if isinstance(v, str) and v.strip().isdigit():
|
||||
try:
|
||||
return int(v.strip())
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# 3) Last resort: extract trailing number from key if it looks like "<name>-<n>"
|
||||
m = re.match(r"^.+-(\d+)$", k)
|
||||
if m:
|
||||
try:
|
||||
return int(m.group(1))
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def iter_store_paths(entry: Dict[str, Any]) -> Iterable[str]:
|
||||
"""
|
||||
Yield all possible store paths from a nix profile JSON entry.
|
||||
|
||||
Nix has had schema shifts. We support common variants:
|
||||
- "storePaths": ["/nix/store/..", ...]
|
||||
- "storePaths": "/nix/store/.." (rare)
|
||||
- "storePath": "/nix/store/.." (some variants)
|
||||
- nested "outputs" dict(s) with store paths (best-effort)
|
||||
"""
|
||||
if not isinstance(entry, dict):
|
||||
return
|
||||
|
||||
sp = entry.get("storePaths")
|
||||
if isinstance(sp, list):
|
||||
for p in sp:
|
||||
if isinstance(p, str):
|
||||
yield p
|
||||
elif isinstance(sp, str):
|
||||
yield sp
|
||||
|
||||
sp2 = entry.get("storePath")
|
||||
if isinstance(sp2, str):
|
||||
yield sp2
|
||||
|
||||
outs = entry.get("outputs")
|
||||
if isinstance(outs, dict):
|
||||
for _, ov in outs.items():
|
||||
if isinstance(ov, dict):
|
||||
p = ov.get("storePath")
|
||||
if isinstance(p, str):
|
||||
yield p
|
||||
|
||||
|
||||
def normalize_store_path(store_path: str) -> str:
|
||||
"""
|
||||
Normalize store path for matching.
|
||||
Currently just strips whitespace; hook for future normalization if needed.
|
||||
"""
|
||||
return (store_path or "").strip()
|
||||
|
||||
|
||||
def normalize_elements(data: Dict[str, Any]) -> List[NixProfileEntry]:
|
||||
"""
|
||||
Converts nix profile list JSON into a list of normalized entries.
|
||||
|
||||
JSON formats observed:
|
||||
- {"elements": {"0": {...}, "1": {...}}}
|
||||
- {"elements": {"pkgmgr-1": {...}, "pkgmgr-2": {...}}}
|
||||
"""
|
||||
elements = data.get("elements")
|
||||
if not isinstance(elements, dict):
|
||||
return []
|
||||
|
||||
normalized: List[NixProfileEntry] = []
|
||||
|
||||
for k, entry in elements.items():
|
||||
if not isinstance(entry, dict):
|
||||
continue
|
||||
|
||||
idx = coerce_index(str(k), entry)
|
||||
name = str(entry.get("name", "") or "")
|
||||
attr = str(entry.get("attrPath", "") or "")
|
||||
|
||||
store_paths: List[str] = []
|
||||
for p in iter_store_paths(entry):
|
||||
sp = normalize_store_path(p)
|
||||
if sp:
|
||||
store_paths.append(sp)
|
||||
|
||||
normalized.append(
|
||||
NixProfileEntry(
|
||||
key=str(k),
|
||||
index=idx,
|
||||
name=name,
|
||||
attr_path=attr,
|
||||
store_paths=store_paths,
|
||||
)
|
||||
)
|
||||
|
||||
return normalized
|
||||
19
src/pkgmgr/actions/install/installers/nix/profile/parser.py
Normal file
19
src/pkgmgr/actions/install/installers/nix/profile/parser.py
Normal file
@@ -0,0 +1,19 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from typing import Any, Dict
|
||||
|
||||
|
||||
def parse_profile_list_json(raw: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Parse JSON output from `nix profile list --json`.
|
||||
|
||||
Raises SystemExit with a helpful excerpt on parse failure.
|
||||
"""
|
||||
try:
|
||||
return json.loads(raw)
|
||||
except json.JSONDecodeError as e:
|
||||
excerpt = (raw or "")[:5000]
|
||||
raise SystemExit(
|
||||
f"[nix] Failed to parse `nix profile list --json`: {e}\n{excerpt}"
|
||||
) from e
|
||||
28
src/pkgmgr/actions/install/installers/nix/profile/result.py
Normal file
28
src/pkgmgr/actions/install/installers/nix/profile/result.py
Normal file
@@ -0,0 +1,28 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
|
||||
def extract_stdout_text(result: Any) -> str:
|
||||
"""
|
||||
Normalize different runner return types to a stdout string.
|
||||
|
||||
Supported patterns:
|
||||
- result is str -> returned as-is
|
||||
- result is bytes/bytearray -> decoded UTF-8 (replace errors)
|
||||
- result has `.stdout` (str or bytes) -> used
|
||||
- fallback: str(result)
|
||||
"""
|
||||
if isinstance(result, str):
|
||||
return result
|
||||
|
||||
if isinstance(result, (bytes, bytearray)):
|
||||
return bytes(result).decode("utf-8", errors="replace")
|
||||
|
||||
stdout = getattr(result, "stdout", None)
|
||||
if isinstance(stdout, str):
|
||||
return stdout
|
||||
if isinstance(stdout, (bytes, bytearray)):
|
||||
return bytes(stdout).decode("utf-8", errors="replace")
|
||||
|
||||
return str(result)
|
||||
69
src/pkgmgr/actions/install/installers/nix/profile_list.py
Normal file
69
src/pkgmgr/actions/install/installers/nix/profile_list.py
Normal file
@@ -0,0 +1,69 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
from typing import TYPE_CHECKING, List, Tuple
|
||||
|
||||
from .runner import CommandRunner
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pkgmgr.actions.install.context import RepoContext
|
||||
|
||||
|
||||
class NixProfileListReader:
|
||||
def __init__(self, runner: CommandRunner) -> None:
|
||||
self._runner = runner
|
||||
|
||||
@staticmethod
|
||||
def _store_prefix(path: str) -> str:
|
||||
raw = (path or "").strip()
|
||||
m = re.match(r"^(/nix/store/[0-9a-z]{32}-[^/ \t]+)", raw)
|
||||
return m.group(1) if m else raw
|
||||
|
||||
def entries(self, ctx: "RepoContext") -> List[Tuple[int, str]]:
|
||||
res = self._runner.run(ctx, "nix profile list", allow_failure=True)
|
||||
if res.returncode != 0:
|
||||
return []
|
||||
|
||||
entries: List[Tuple[int, str]] = []
|
||||
pat = re.compile(
|
||||
r"^\s*(\d+)\s+.*?(/nix/store/[0-9a-z]{32}-[^/ \t]+)",
|
||||
re.MULTILINE,
|
||||
)
|
||||
|
||||
for m in pat.finditer(res.stdout or ""):
|
||||
idx_s = m.group(1)
|
||||
sp = m.group(2)
|
||||
try:
|
||||
idx = int(idx_s)
|
||||
except Exception:
|
||||
continue
|
||||
entries.append((idx, self._store_prefix(sp)))
|
||||
|
||||
seen: set[int] = set()
|
||||
uniq: List[Tuple[int, str]] = []
|
||||
for idx, sp in entries:
|
||||
if idx not in seen:
|
||||
seen.add(idx)
|
||||
uniq.append((idx, sp))
|
||||
|
||||
return uniq
|
||||
|
||||
def indices_matching_store_prefixes(self, ctx: "RepoContext", prefixes: List[str]) -> List[int]:
|
||||
prefixes = [self._store_prefix(p) for p in prefixes if p]
|
||||
prefixes = [p for p in prefixes if p]
|
||||
if not prefixes:
|
||||
return []
|
||||
|
||||
hits: List[int] = []
|
||||
for idx, sp in self.entries(ctx):
|
||||
if any(sp == p for p in prefixes):
|
||||
hits.append(idx)
|
||||
|
||||
seen: set[int] = set()
|
||||
uniq: List[int] = []
|
||||
for i in hits:
|
||||
if i not in seen:
|
||||
seen.add(i)
|
||||
uniq.append(i)
|
||||
|
||||
return uniq
|
||||
76
src/pkgmgr/actions/install/installers/nix/textparse.py
Normal file
76
src/pkgmgr/actions/install/installers/nix/textparse.py
Normal file
@@ -0,0 +1,76 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
from typing import List
|
||||
|
||||
|
||||
class NixConflictTextParser:
|
||||
@staticmethod
|
||||
def _store_prefix(path: str) -> str:
|
||||
raw = (path or "").strip()
|
||||
m = re.match(r"^(/nix/store/[0-9a-z]{32}-[^/ \t]+)", raw)
|
||||
return m.group(1) if m else raw
|
||||
|
||||
def remove_tokens(self, text: str) -> List[str]:
|
||||
pat = re.compile(
|
||||
r"^\s*nix profile remove\s+([^\s'\"`]+|'[^']+'|\"[^\"]+\")\s*$",
|
||||
re.MULTILINE,
|
||||
)
|
||||
|
||||
tokens: List[str] = []
|
||||
for m in pat.finditer(text or ""):
|
||||
t = (m.group(1) or "").strip()
|
||||
if (t.startswith("'") and t.endswith("'")) or (t.startswith('"') and t.endswith('"')):
|
||||
t = t[1:-1]
|
||||
if t:
|
||||
tokens.append(t)
|
||||
|
||||
seen: set[str] = set()
|
||||
uniq: List[str] = []
|
||||
for t in tokens:
|
||||
if t not in seen:
|
||||
seen.add(t)
|
||||
uniq.append(t)
|
||||
|
||||
return uniq
|
||||
|
||||
def existing_store_prefixes(self, text: str) -> List[str]:
|
||||
lines = (text or "").splitlines()
|
||||
prefixes: List[str] = []
|
||||
|
||||
in_existing = False
|
||||
in_new = False
|
||||
|
||||
store_pat = re.compile(r"^\s*(/nix/store/[0-9a-z]{32}-[^ \t]+)")
|
||||
|
||||
for raw in lines:
|
||||
line = raw.strip()
|
||||
|
||||
if "An existing package already provides the following file" in line:
|
||||
in_existing = True
|
||||
in_new = False
|
||||
continue
|
||||
|
||||
if "This is the conflicting file from the new package" in line:
|
||||
in_existing = False
|
||||
in_new = True
|
||||
continue
|
||||
|
||||
if in_existing:
|
||||
m = store_pat.match(raw)
|
||||
if m:
|
||||
prefixes.append(m.group(1))
|
||||
continue
|
||||
|
||||
_ = in_new
|
||||
|
||||
norm = [self._store_prefix(p) for p in prefixes if p]
|
||||
|
||||
seen: set[str] = set()
|
||||
uniq: List[str] = []
|
||||
for p in norm:
|
||||
if p and p not in seen:
|
||||
seen.add(p)
|
||||
uniq.append(p)
|
||||
|
||||
return uniq
|
||||
@@ -1,20 +1,15 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from typing import List, Optional, Set
|
||||
|
||||
from pkgmgr.core.command.run import run_command
|
||||
from pkgmgr.core.git import GitError, run_git
|
||||
from typing import List, Optional, Set
|
||||
|
||||
from .types import MirrorMap, RepoMirrorContext, Repository
|
||||
|
||||
|
||||
def build_default_ssh_url(repo: Repository) -> Optional[str]:
|
||||
"""
|
||||
Build a simple SSH URL from repo config if no explicit mirror is defined.
|
||||
|
||||
Example: git@github.com:account/repository.git
|
||||
"""
|
||||
provider = repo.get("provider")
|
||||
account = repo.get("account")
|
||||
name = repo.get("repository")
|
||||
@@ -23,95 +18,82 @@ def build_default_ssh_url(repo: Repository) -> Optional[str]:
|
||||
if not provider or not account or not name:
|
||||
return None
|
||||
|
||||
provider = str(provider)
|
||||
account = str(account)
|
||||
name = str(name)
|
||||
|
||||
if port:
|
||||
return f"ssh://git@{provider}:{port}/{account}/{name}.git"
|
||||
|
||||
# GitHub-style shorthand
|
||||
return f"git@{provider}:{account}/{name}.git"
|
||||
|
||||
|
||||
def determine_primary_remote_url(
|
||||
repo: Repository,
|
||||
resolved_mirrors: MirrorMap,
|
||||
ctx: RepoMirrorContext,
|
||||
) -> Optional[str]:
|
||||
"""
|
||||
Determine the primary remote URL in a consistent way:
|
||||
|
||||
1. resolved_mirrors["origin"]
|
||||
2. any resolved mirror (first by name)
|
||||
3. default SSH URL from provider/account/repository
|
||||
Priority order:
|
||||
1. origin from resolved mirrors
|
||||
2. MIRRORS file order
|
||||
3. config mirrors order
|
||||
4. default SSH URL
|
||||
"""
|
||||
if "origin" in resolved_mirrors:
|
||||
return resolved_mirrors["origin"]
|
||||
resolved = ctx.resolved_mirrors
|
||||
|
||||
if resolved_mirrors:
|
||||
first_name = sorted(resolved_mirrors.keys())[0]
|
||||
return resolved_mirrors[first_name]
|
||||
if resolved.get("origin"):
|
||||
return resolved["origin"]
|
||||
|
||||
for mirrors in (ctx.file_mirrors, ctx.config_mirrors):
|
||||
for _, url in mirrors.items():
|
||||
if url:
|
||||
return url
|
||||
|
||||
return build_default_ssh_url(repo)
|
||||
|
||||
|
||||
def _safe_git_output(args: List[str], cwd: str) -> Optional[str]:
|
||||
"""
|
||||
Run a Git command via run_git and return its stdout, or None on failure.
|
||||
"""
|
||||
try:
|
||||
return run_git(args, cwd=cwd)
|
||||
except GitError:
|
||||
return None
|
||||
|
||||
|
||||
def current_origin_url(repo_dir: str) -> Optional[str]:
|
||||
"""
|
||||
Return the current URL for remote 'origin', or None if not present.
|
||||
"""
|
||||
output = _safe_git_output(["remote", "get-url", "origin"], cwd=repo_dir)
|
||||
if not output:
|
||||
return None
|
||||
url = output.strip()
|
||||
return url or None
|
||||
|
||||
|
||||
def has_origin_remote(repo_dir: str) -> bool:
|
||||
"""
|
||||
Check whether a remote called 'origin' exists in the repository.
|
||||
"""
|
||||
output = _safe_git_output(["remote"], cwd=repo_dir)
|
||||
if not output:
|
||||
return False
|
||||
names = output.split()
|
||||
return "origin" in names
|
||||
out = _safe_git_output(["remote"], cwd=repo_dir)
|
||||
return bool(out and "origin" in out.split())
|
||||
|
||||
|
||||
def _ensure_push_urls_for_origin(
|
||||
def _set_origin_fetch_and_push(repo_dir: str, url: str, preview: bool) -> None:
|
||||
fetch = f"git remote set-url origin {url}"
|
||||
push = f"git remote set-url --push origin {url}"
|
||||
|
||||
if preview:
|
||||
print(f"[PREVIEW] Would run in {repo_dir!r}: {fetch}")
|
||||
print(f"[PREVIEW] Would run in {repo_dir!r}: {push}")
|
||||
return
|
||||
|
||||
run_command(fetch, cwd=repo_dir, preview=False)
|
||||
run_command(push, cwd=repo_dir, preview=False)
|
||||
|
||||
|
||||
def _ensure_additional_push_urls(
|
||||
repo_dir: str,
|
||||
mirrors: MirrorMap,
|
||||
primary: str,
|
||||
preview: bool,
|
||||
) -> None:
|
||||
"""
|
||||
Ensure that all mirror URLs are present as push URLs on 'origin'.
|
||||
"""
|
||||
desired: Set[str] = {url for url in mirrors.values() if url}
|
||||
desired: Set[str] = {u for u in mirrors.values() if u and u != primary}
|
||||
if not desired:
|
||||
return
|
||||
|
||||
existing_output = _safe_git_output(
|
||||
out = _safe_git_output(
|
||||
["remote", "get-url", "--push", "--all", "origin"],
|
||||
cwd=repo_dir,
|
||||
)
|
||||
existing = set(existing_output.splitlines()) if existing_output else set()
|
||||
existing = set(out.splitlines()) if out else set()
|
||||
|
||||
missing = sorted(desired - existing)
|
||||
for url in missing:
|
||||
for url in sorted(desired - existing):
|
||||
cmd = f"git remote set-url --add --push origin {url}"
|
||||
if preview:
|
||||
print(f"[PREVIEW] Would run in {repo_dir!r}: {cmd}")
|
||||
else:
|
||||
print(f"[INFO] Adding push URL to 'origin': {url}")
|
||||
run_command(cmd, cwd=repo_dir, preview=False)
|
||||
|
||||
|
||||
@@ -120,60 +102,32 @@ def ensure_origin_remote(
|
||||
ctx: RepoMirrorContext,
|
||||
preview: bool,
|
||||
) -> None:
|
||||
"""
|
||||
Ensure that a usable 'origin' remote exists and has all push URLs.
|
||||
"""
|
||||
repo_dir = ctx.repo_dir
|
||||
resolved_mirrors = ctx.resolved_mirrors
|
||||
|
||||
if not os.path.isdir(os.path.join(repo_dir, ".git")):
|
||||
print(f"[WARN] {repo_dir} is not a Git repository (no .git directory).")
|
||||
print(f"[WARN] {repo_dir} is not a Git repository.")
|
||||
return
|
||||
|
||||
url = determine_primary_remote_url(repo, resolved_mirrors)
|
||||
primary = determine_primary_remote_url(repo, ctx)
|
||||
if not primary:
|
||||
print("[WARN] No primary mirror URL could be determined.")
|
||||
return
|
||||
|
||||
if not has_origin_remote(repo_dir):
|
||||
if not url:
|
||||
print(
|
||||
"[WARN] Could not determine URL for 'origin' remote. "
|
||||
"Please configure mirrors or provider/account/repository."
|
||||
)
|
||||
return
|
||||
|
||||
cmd = f"git remote add origin {url}"
|
||||
cmd = f"git remote add origin {primary}"
|
||||
if preview:
|
||||
print(f"[PREVIEW] Would run in {repo_dir!r}: {cmd}")
|
||||
else:
|
||||
print(f"[INFO] Adding 'origin' remote in {repo_dir}: {url}")
|
||||
run_command(cmd, cwd=repo_dir, preview=False)
|
||||
else:
|
||||
current = current_origin_url(repo_dir)
|
||||
if current == url or not url:
|
||||
print(
|
||||
"[INFO] 'origin' already points to "
|
||||
f"{current or '<unknown>'} (no change needed)."
|
||||
)
|
||||
else:
|
||||
# We do not auto-change origin here, only log the mismatch.
|
||||
print(
|
||||
"[INFO] 'origin' exists with URL "
|
||||
f"{current or '<unknown>'}; not changing to {url}."
|
||||
)
|
||||
|
||||
# Ensure all mirrors are present as push URLs
|
||||
_ensure_push_urls_for_origin(repo_dir, resolved_mirrors, preview)
|
||||
_set_origin_fetch_and_push(repo_dir, primary, preview)
|
||||
|
||||
_ensure_additional_push_urls(repo_dir, ctx.resolved_mirrors, primary, preview)
|
||||
|
||||
|
||||
def is_remote_reachable(url: str, cwd: Optional[str] = None) -> bool:
|
||||
"""
|
||||
Check whether a remote repository is reachable via `git ls-remote`.
|
||||
|
||||
This does NOT modify anything; it only probes the remote.
|
||||
"""
|
||||
workdir = cwd or os.getcwd()
|
||||
try:
|
||||
# --exit-code → non-zero exit code if the remote does not exist
|
||||
run_git(["ls-remote", "--exit-code", url], cwd=workdir)
|
||||
run_git(["ls-remote", "--exit-code", url], cwd=cwd or os.getcwd())
|
||||
return True
|
||||
except GitError:
|
||||
return False
|
||||
|
||||
21
src/pkgmgr/actions/mirror/remote_check.py
Normal file
21
src/pkgmgr/actions/mirror/remote_check.py
Normal file
@@ -0,0 +1,21 @@
|
||||
# src/pkgmgr/actions/mirror/remote_check.py
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Tuple
|
||||
|
||||
from pkgmgr.core.git import GitError, run_git
|
||||
|
||||
|
||||
def probe_mirror(url: str, repo_dir: str) -> Tuple[bool, str]:
|
||||
"""
|
||||
Probe a remote mirror URL using `git ls-remote`.
|
||||
|
||||
Returns:
|
||||
(True, "") on success,
|
||||
(False, error_message) on failure.
|
||||
"""
|
||||
try:
|
||||
run_git(["ls-remote", url], cwd=repo_dir)
|
||||
return True, ""
|
||||
except GitError as exc:
|
||||
return False, str(exc)
|
||||
59
src/pkgmgr/actions/mirror/remote_provision.py
Normal file
59
src/pkgmgr/actions/mirror/remote_provision.py
Normal file
@@ -0,0 +1,59 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import List
|
||||
|
||||
from pkgmgr.core.remote_provisioning import ProviderHint, RepoSpec, ensure_remote_repo
|
||||
from pkgmgr.core.remote_provisioning.ensure import EnsureOptions
|
||||
|
||||
from .context import build_context
|
||||
from .git_remote import determine_primary_remote_url
|
||||
from .types import Repository
|
||||
from .url_utils import normalize_provider_host, parse_repo_from_git_url
|
||||
|
||||
|
||||
def ensure_remote_repository(
|
||||
repo: Repository,
|
||||
repositories_base_dir: str,
|
||||
all_repos: List[Repository],
|
||||
preview: bool,
|
||||
) -> None:
|
||||
ctx = build_context(repo, repositories_base_dir, all_repos)
|
||||
|
||||
primary_url = determine_primary_remote_url(repo, ctx)
|
||||
if not primary_url:
|
||||
print("[INFO] No primary URL found; skipping remote provisioning.")
|
||||
return
|
||||
|
||||
host_raw, owner, name = parse_repo_from_git_url(primary_url)
|
||||
host = normalize_provider_host(host_raw)
|
||||
|
||||
if not host or not owner or not name:
|
||||
print("[WARN] Could not parse remote URL:", primary_url)
|
||||
return
|
||||
|
||||
spec = RepoSpec(
|
||||
host=host,
|
||||
owner=owner,
|
||||
name=name,
|
||||
private=bool(repo.get("private", True)),
|
||||
description=str(repo.get("description", "")),
|
||||
)
|
||||
|
||||
provider_kind = str(repo.get("provider", "")).lower() or None
|
||||
|
||||
try:
|
||||
result = ensure_remote_repo(
|
||||
spec,
|
||||
provider_hint=ProviderHint(kind=provider_kind),
|
||||
options=EnsureOptions(
|
||||
preview=preview,
|
||||
interactive=True,
|
||||
allow_prompt=True,
|
||||
save_prompt_token_to_keyring=True,
|
||||
),
|
||||
)
|
||||
print(f"[REMOTE ENSURE] {result.status.upper()}: {result.message}")
|
||||
if result.url:
|
||||
print(f"[REMOTE ENSURE] URL: {result.url}")
|
||||
except Exception as exc: # noqa: BLE001
|
||||
print(f"[ERROR] Remote provisioning failed: {exc}")
|
||||
@@ -1,131 +1,20 @@
|
||||
# src/pkgmgr/actions/mirror/setup_cmd.py
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import List, Tuple
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from pkgmgr.core.git import GitError, run_git
|
||||
from pkgmgr.core.remote_provisioning import ProviderHint, RepoSpec, ensure_remote_repo
|
||||
from pkgmgr.core.remote_provisioning.ensure import EnsureOptions
|
||||
from typing import List
|
||||
|
||||
from .context import build_context
|
||||
from .git_remote import determine_primary_remote_url, ensure_origin_remote
|
||||
from .git_remote import ensure_origin_remote, determine_primary_remote_url
|
||||
from .remote_check import probe_mirror
|
||||
from .remote_provision import ensure_remote_repository
|
||||
from .types import Repository
|
||||
|
||||
|
||||
def _probe_mirror(url: str, repo_dir: str) -> Tuple[bool, str]:
|
||||
"""
|
||||
Probe a remote mirror URL using `git ls-remote`.
|
||||
|
||||
Returns:
|
||||
(True, "") on success,
|
||||
(False, error_message) on failure.
|
||||
"""
|
||||
try:
|
||||
run_git(["ls-remote", url], cwd=repo_dir)
|
||||
return True, ""
|
||||
except GitError as exc:
|
||||
return False, str(exc)
|
||||
|
||||
|
||||
def _host_from_git_url(url: str) -> str:
|
||||
url = (url or "").strip()
|
||||
if not url:
|
||||
return ""
|
||||
|
||||
if "://" in url:
|
||||
parsed = urlparse(url)
|
||||
netloc = (parsed.netloc or "").strip()
|
||||
if "@" in netloc:
|
||||
netloc = netloc.split("@", 1)[1]
|
||||
# keep optional :port
|
||||
return netloc
|
||||
|
||||
# scp-like: git@host:owner/repo.git
|
||||
if "@" in url and ":" in url:
|
||||
after_at = url.split("@", 1)[1]
|
||||
host = after_at.split(":", 1)[0]
|
||||
return host.strip()
|
||||
|
||||
return url.split("/", 1)[0].strip()
|
||||
|
||||
def _ensure_remote_repository(
|
||||
repo: Repository,
|
||||
repositories_base_dir: str,
|
||||
all_repos: List[Repository],
|
||||
preview: bool,
|
||||
) -> None:
|
||||
"""
|
||||
Ensure that the remote repository exists using provider APIs.
|
||||
|
||||
This is ONLY called when ensure_remote=True.
|
||||
"""
|
||||
ctx = build_context(repo, repositories_base_dir, all_repos)
|
||||
resolved_mirrors = ctx.resolved_mirrors
|
||||
|
||||
primary_url = determine_primary_remote_url(repo, resolved_mirrors)
|
||||
if not primary_url:
|
||||
print("[INFO] No remote URL could be derived; skipping remote provisioning.")
|
||||
return
|
||||
|
||||
# IMPORTANT:
|
||||
# - repo["provider"] is typically a provider *kind* (e.g. "github" / "gitea"),
|
||||
# NOT a hostname. We derive the actual host from the remote URL.
|
||||
host = _host_from_git_url(primary_url)
|
||||
owner = repo.get("account")
|
||||
name = repo.get("repository")
|
||||
|
||||
if not host or not owner or not name:
|
||||
print("[WARN] Missing host/account/repository; cannot ensure remote repo.")
|
||||
print(f" host={host!r}, account={owner!r}, repository={name!r}")
|
||||
return
|
||||
|
||||
print("------------------------------------------------------------")
|
||||
print(f"[REMOTE ENSURE] {ctx.identifier}")
|
||||
print(f"[REMOTE ENSURE] host: {host}")
|
||||
print("------------------------------------------------------------")
|
||||
|
||||
spec = RepoSpec(
|
||||
host=str(host),
|
||||
owner=str(owner),
|
||||
name=str(name),
|
||||
private=bool(repo.get("private", True)),
|
||||
description=str(repo.get("description", "")),
|
||||
)
|
||||
|
||||
provider_kind = str(repo.get("provider", "")).strip().lower() or None
|
||||
|
||||
try:
|
||||
result = ensure_remote_repo(
|
||||
spec,
|
||||
provider_hint=ProviderHint(kind=provider_kind),
|
||||
options=EnsureOptions(
|
||||
preview=preview,
|
||||
interactive=True,
|
||||
allow_prompt=True,
|
||||
save_prompt_token_to_keyring=True,
|
||||
),
|
||||
)
|
||||
print(f"[REMOTE ENSURE] {result.status.upper()}: {result.message}")
|
||||
if result.url:
|
||||
print(f"[REMOTE ENSURE] URL: {result.url}")
|
||||
except Exception as exc: # noqa: BLE001
|
||||
# Keep action layer resilient
|
||||
print(f"[ERROR] Remote provisioning failed: {exc}")
|
||||
|
||||
print()
|
||||
|
||||
|
||||
def _setup_local_mirrors_for_repo(
|
||||
repo: Repository,
|
||||
repositories_base_dir: str,
|
||||
all_repos: List[Repository],
|
||||
preview: bool,
|
||||
) -> None:
|
||||
"""
|
||||
Local setup:
|
||||
- Ensure 'origin' remote exists and is sane
|
||||
"""
|
||||
ctx = build_context(repo, repositories_base_dir, all_repos)
|
||||
|
||||
print("------------------------------------------------------------")
|
||||
@@ -133,7 +22,7 @@ def _setup_local_mirrors_for_repo(
|
||||
print(f"[MIRROR SETUP:LOCAL] dir: {ctx.repo_dir}")
|
||||
print("------------------------------------------------------------")
|
||||
|
||||
ensure_origin_remote(repo, ctx, preview=preview)
|
||||
ensure_origin_remote(repo, ctx, preview)
|
||||
print()
|
||||
|
||||
|
||||
@@ -144,19 +33,7 @@ def _setup_remote_mirrors_for_repo(
|
||||
preview: bool,
|
||||
ensure_remote: bool,
|
||||
) -> None:
|
||||
"""
|
||||
Remote-side setup / validation.
|
||||
|
||||
Default behavior:
|
||||
- Non-destructive checks using `git ls-remote`.
|
||||
|
||||
Optional behavior:
|
||||
- If ensure_remote=True:
|
||||
* Attempt to create missing repositories via provider API
|
||||
* Uses TokenResolver (ENV -> keyring -> prompt)
|
||||
"""
|
||||
ctx = build_context(repo, repositories_base_dir, all_repos)
|
||||
resolved_mirrors = ctx.resolved_mirrors
|
||||
|
||||
print("------------------------------------------------------------")
|
||||
print(f"[MIRROR SETUP:REMOTE] {ctx.identifier}")
|
||||
@@ -164,39 +41,30 @@ def _setup_remote_mirrors_for_repo(
|
||||
print("------------------------------------------------------------")
|
||||
|
||||
if ensure_remote:
|
||||
_ensure_remote_repository(
|
||||
ensure_remote_repository(
|
||||
repo,
|
||||
repositories_base_dir=repositories_base_dir,
|
||||
all_repos=all_repos,
|
||||
preview=preview,
|
||||
repositories_base_dir,
|
||||
all_repos,
|
||||
preview,
|
||||
)
|
||||
|
||||
if not resolved_mirrors:
|
||||
primary_url = determine_primary_remote_url(repo, resolved_mirrors)
|
||||
if not primary_url:
|
||||
print("[INFO] No mirrors configured and no primary URL available.")
|
||||
print()
|
||||
if not ctx.resolved_mirrors:
|
||||
primary = determine_primary_remote_url(repo, ctx)
|
||||
if not primary:
|
||||
return
|
||||
|
||||
ok, error_message = _probe_mirror(primary_url, ctx.repo_dir)
|
||||
if ok:
|
||||
print(f"[OK] primary: {primary_url}")
|
||||
else:
|
||||
print(f"[WARN] primary: {primary_url}")
|
||||
for line in error_message.splitlines():
|
||||
print(f" {line}")
|
||||
|
||||
ok, msg = probe_mirror(primary, ctx.repo_dir)
|
||||
print("[OK]" if ok else "[WARN]", primary)
|
||||
if msg:
|
||||
print(msg)
|
||||
print()
|
||||
return
|
||||
|
||||
for name, url in sorted(resolved_mirrors.items()):
|
||||
ok, error_message = _probe_mirror(url, ctx.repo_dir)
|
||||
if ok:
|
||||
print(f"[OK] {name}: {url}")
|
||||
else:
|
||||
print(f"[WARN] {name}: {url}")
|
||||
for line in error_message.splitlines():
|
||||
print(f" {line}")
|
||||
for name, url in ctx.resolved_mirrors.items():
|
||||
ok, msg = probe_mirror(url, ctx.repo_dir)
|
||||
print(f"[OK] {name}: {url}" if ok else f"[WARN] {name}: {url}")
|
||||
if msg:
|
||||
print(msg)
|
||||
|
||||
print()
|
||||
|
||||
@@ -210,33 +78,20 @@ def setup_mirrors(
|
||||
remote: bool = True,
|
||||
ensure_remote: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
Setup mirrors for the selected repositories.
|
||||
|
||||
local:
|
||||
- Configure local Git remotes (ensure 'origin' exists).
|
||||
|
||||
remote:
|
||||
- Non-destructive remote checks using `git ls-remote`.
|
||||
|
||||
ensure_remote:
|
||||
- If True, attempt to create missing remote repositories via provider APIs.
|
||||
- This is explicit and NEVER enabled implicitly.
|
||||
"""
|
||||
for repo in selected_repos:
|
||||
if local:
|
||||
_setup_local_mirrors_for_repo(
|
||||
repo=repo,
|
||||
repositories_base_dir=repositories_base_dir,
|
||||
all_repos=all_repos,
|
||||
preview=preview,
|
||||
repo,
|
||||
repositories_base_dir,
|
||||
all_repos,
|
||||
preview,
|
||||
)
|
||||
|
||||
if remote:
|
||||
_setup_remote_mirrors_for_repo(
|
||||
repo=repo,
|
||||
repositories_base_dir=repositories_base_dir,
|
||||
all_repos=all_repos,
|
||||
preview=preview,
|
||||
ensure_remote=ensure_remote,
|
||||
repo,
|
||||
repositories_base_dir,
|
||||
all_repos,
|
||||
preview,
|
||||
ensure_remote,
|
||||
)
|
||||
|
||||
111
src/pkgmgr/actions/mirror/url_utils.py
Normal file
111
src/pkgmgr/actions/mirror/url_utils.py
Normal file
@@ -0,0 +1,111 @@
|
||||
# src/pkgmgr/actions/mirror/url_utils.py
|
||||
from __future__ import annotations
|
||||
|
||||
from urllib.parse import urlparse
|
||||
from typing import Optional, Tuple
|
||||
|
||||
|
||||
def hostport_from_git_url(url: str) -> Tuple[str, Optional[str]]:
|
||||
url = (url or "").strip()
|
||||
if not url:
|
||||
return "", None
|
||||
|
||||
if "://" in url:
|
||||
parsed = urlparse(url)
|
||||
netloc = (parsed.netloc or "").strip()
|
||||
if "@" in netloc:
|
||||
netloc = netloc.split("@", 1)[1]
|
||||
|
||||
if netloc.startswith("[") and "]" in netloc:
|
||||
host = netloc[1:netloc.index("]")]
|
||||
rest = netloc[netloc.index("]") + 1 :]
|
||||
port = rest[1:] if rest.startswith(":") else None
|
||||
return host.strip(), (port.strip() if port else None)
|
||||
|
||||
if ":" in netloc:
|
||||
host, port = netloc.rsplit(":", 1)
|
||||
return host.strip(), (port.strip() or None)
|
||||
|
||||
return netloc.strip(), None
|
||||
|
||||
if "@" in url and ":" in url:
|
||||
after_at = url.split("@", 1)[1]
|
||||
host = after_at.split(":", 1)[0].strip()
|
||||
return host, None
|
||||
|
||||
host = url.split("/", 1)[0].strip()
|
||||
return host, None
|
||||
|
||||
|
||||
def normalize_provider_host(host: str) -> str:
|
||||
host = (host or "").strip()
|
||||
if not host:
|
||||
return ""
|
||||
|
||||
if host.startswith("[") and "]" in host:
|
||||
host = host[1:host.index("]")]
|
||||
|
||||
if ":" in host and host.count(":") == 1:
|
||||
host = host.rsplit(":", 1)[0]
|
||||
|
||||
return host.strip().lower()
|
||||
|
||||
|
||||
def _strip_dot_git(name: str) -> str:
|
||||
n = (name or "").strip()
|
||||
if n.lower().endswith(".git"):
|
||||
return n[:-4]
|
||||
return n
|
||||
|
||||
|
||||
def parse_repo_from_git_url(url: str) -> Tuple[str, Optional[str], Optional[str]]:
|
||||
"""
|
||||
Parse (host, owner, repo_name) from common Git remote URLs.
|
||||
|
||||
Supports:
|
||||
- ssh://git@host:2201/owner/repo.git
|
||||
- https://host/owner/repo.git
|
||||
- git@host:owner/repo.git
|
||||
- host/owner/repo(.git) (best-effort)
|
||||
|
||||
Returns:
|
||||
(host, owner, repo_name) with owner/repo possibly None if not derivable.
|
||||
"""
|
||||
u = (url or "").strip()
|
||||
if not u:
|
||||
return "", None, None
|
||||
|
||||
# URL-style (ssh://, https://, http://)
|
||||
if "://" in u:
|
||||
parsed = urlparse(u)
|
||||
host = (parsed.hostname or "").strip()
|
||||
path = (parsed.path or "").strip("/")
|
||||
parts = [p for p in path.split("/") if p]
|
||||
if len(parts) >= 2:
|
||||
owner = parts[0]
|
||||
repo_name = _strip_dot_git(parts[1])
|
||||
return host, owner, repo_name
|
||||
return host, None, None
|
||||
|
||||
# SCP-like: git@host:owner/repo.git
|
||||
if "@" in u and ":" in u:
|
||||
after_at = u.split("@", 1)[1]
|
||||
host = after_at.split(":", 1)[0].strip()
|
||||
path = after_at.split(":", 1)[1].strip("/")
|
||||
parts = [p for p in path.split("/") if p]
|
||||
if len(parts) >= 2:
|
||||
owner = parts[0]
|
||||
repo_name = _strip_dot_git(parts[1])
|
||||
return host, owner, repo_name
|
||||
return host, None, None
|
||||
|
||||
# Fallback: host/owner/repo.git
|
||||
host = u.split("/", 1)[0].strip()
|
||||
rest = u.split("/", 1)[1] if "/" in u else ""
|
||||
parts = [p for p in rest.strip("/").split("/") if p]
|
||||
if len(parts) >= 2:
|
||||
owner = parts[0]
|
||||
repo_name = _strip_dot_git(parts[1])
|
||||
return host, owner, repo_name
|
||||
|
||||
return host, None, None
|
||||
5
src/pkgmgr/actions/publish/__init__.py
Normal file
5
src/pkgmgr/actions/publish/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from .workflow import publish
|
||||
|
||||
__all__ = ["publish"]
|
||||
17
src/pkgmgr/actions/publish/git_tags.py
Normal file
17
src/pkgmgr/actions/publish/git_tags.py
Normal file
@@ -0,0 +1,17 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from pkgmgr.core.git import run_git
|
||||
from pkgmgr.core.version.semver import SemVer, is_semver_tag
|
||||
|
||||
|
||||
def head_semver_tags(cwd: str = ".") -> list[str]:
|
||||
out = run_git(["tag", "--points-at", "HEAD"], cwd=cwd)
|
||||
if not out:
|
||||
return []
|
||||
|
||||
tags = [t.strip() for t in out.splitlines() if t.strip()]
|
||||
tags = [t for t in tags if is_semver_tag(t) and t.startswith("v")]
|
||||
if not tags:
|
||||
return []
|
||||
|
||||
return sorted(tags, key=SemVer.parse)
|
||||
24
src/pkgmgr/actions/publish/pypi_url.py
Normal file
24
src/pkgmgr/actions/publish/pypi_url.py
Normal file
@@ -0,0 +1,24 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from .types import PyPITarget
|
||||
|
||||
|
||||
def parse_pypi_project_url(url: str) -> PyPITarget | None:
|
||||
u = (url or "").strip()
|
||||
if not u:
|
||||
return None
|
||||
|
||||
parsed = urlparse(u)
|
||||
host = (parsed.netloc or "").lower()
|
||||
path = (parsed.path or "").strip("/")
|
||||
|
||||
if host not in ("pypi.org", "test.pypi.org"):
|
||||
return None
|
||||
|
||||
parts = [p for p in path.split("/") if p]
|
||||
if len(parts) >= 2 and parts[0] == "project":
|
||||
return PyPITarget(host=host, project=parts[1])
|
||||
|
||||
return None
|
||||
9
src/pkgmgr/actions/publish/types.py
Normal file
9
src/pkgmgr/actions/publish/types.py
Normal file
@@ -0,0 +1,9 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class PyPITarget:
|
||||
host: str
|
||||
project: str
|
||||
112
src/pkgmgr/actions/publish/workflow.py
Normal file
112
src/pkgmgr/actions/publish/workflow.py
Normal file
@@ -0,0 +1,112 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import glob
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
|
||||
from pkgmgr.actions.mirror.io import read_mirrors_file
|
||||
from pkgmgr.actions.mirror.types import Repository
|
||||
from pkgmgr.core.credentials.resolver import ResolutionOptions, TokenResolver
|
||||
from pkgmgr.core.version.semver import SemVer
|
||||
|
||||
from .git_tags import head_semver_tags
|
||||
from .pypi_url import parse_pypi_project_url
|
||||
|
||||
|
||||
def _require_tool(module: str) -> None:
|
||||
try:
|
||||
subprocess.run(
|
||||
["python", "-m", module, "--help"],
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL,
|
||||
check=True,
|
||||
)
|
||||
except Exception as exc:
|
||||
raise RuntimeError(
|
||||
f"Required Python module '{module}' is not available. "
|
||||
f"Install it via: pip install {module}"
|
||||
) from exc
|
||||
|
||||
|
||||
def publish(
|
||||
repo: Repository,
|
||||
repo_dir: str,
|
||||
*,
|
||||
preview: bool = False,
|
||||
interactive: bool = True,
|
||||
allow_prompt: bool = True,
|
||||
) -> None:
|
||||
mirrors = read_mirrors_file(repo_dir)
|
||||
|
||||
targets = []
|
||||
for url in mirrors.values():
|
||||
t = parse_pypi_project_url(url)
|
||||
if t:
|
||||
targets.append(t)
|
||||
|
||||
if not targets:
|
||||
print("[INFO] No PyPI mirror found. Skipping publish.")
|
||||
return
|
||||
|
||||
if len(targets) > 1:
|
||||
raise RuntimeError("Multiple PyPI mirrors found; refusing to publish.")
|
||||
|
||||
tags = head_semver_tags(cwd=repo_dir)
|
||||
if not tags:
|
||||
print("[INFO] No version tag on HEAD. Skipping publish.")
|
||||
return
|
||||
|
||||
tag = max(tags, key=SemVer.parse)
|
||||
target = targets[0]
|
||||
|
||||
print(f"[INFO] Publishing {target.project} for tag {tag}")
|
||||
|
||||
if preview:
|
||||
print("[PREVIEW] Would build and upload to PyPI.")
|
||||
return
|
||||
|
||||
_require_tool("build")
|
||||
_require_tool("twine")
|
||||
|
||||
dist_dir = os.path.join(repo_dir, "dist")
|
||||
if os.path.isdir(dist_dir):
|
||||
shutil.rmtree(dist_dir, ignore_errors=True)
|
||||
|
||||
subprocess.run(
|
||||
["python", "-m", "build"],
|
||||
cwd=repo_dir,
|
||||
check=True,
|
||||
)
|
||||
|
||||
artifacts = sorted(glob.glob(os.path.join(dist_dir, "*")))
|
||||
if not artifacts:
|
||||
raise RuntimeError("No build artifacts found in dist/.")
|
||||
|
||||
resolver = TokenResolver()
|
||||
|
||||
# Store PyPI token per OS user (keyring is already user-scoped).
|
||||
# Do NOT scope by project name.
|
||||
token = resolver.get_token(
|
||||
provider_kind="pypi",
|
||||
host=target.host,
|
||||
owner=None,
|
||||
options=ResolutionOptions(
|
||||
interactive=interactive,
|
||||
allow_prompt=allow_prompt,
|
||||
save_prompt_token_to_keyring=True,
|
||||
),
|
||||
).token
|
||||
|
||||
env = dict(os.environ)
|
||||
env["TWINE_USERNAME"] = "__token__"
|
||||
env["TWINE_PASSWORD"] = token
|
||||
|
||||
subprocess.run(
|
||||
["python", "-m", "twine", "upload", *artifacts],
|
||||
cwd=repo_dir,
|
||||
env=env,
|
||||
check=True,
|
||||
)
|
||||
|
||||
print("[INFO] Publish completed.")
|
||||
@@ -1,10 +1,13 @@
|
||||
# src/pkgmgr/actions/release/workflow.py
|
||||
from __future__ import annotations
|
||||
from typing import Optional
|
||||
|
||||
import os
|
||||
import sys
|
||||
from typing import Optional
|
||||
|
||||
from pkgmgr.actions.branch import close_branch
|
||||
from pkgmgr.core.git import get_current_branch, GitError
|
||||
from pkgmgr.core.repository.paths import resolve_repo_paths
|
||||
|
||||
from .files import (
|
||||
update_changelog,
|
||||
@@ -55,8 +58,12 @@ def _release_impl(
|
||||
print(f"New version: {new_ver_str} ({release_type})")
|
||||
|
||||
repo_root = os.path.dirname(os.path.abspath(pyproject_path))
|
||||
paths = resolve_repo_paths(repo_root)
|
||||
|
||||
# --- Update versioned files ------------------------------------------------
|
||||
|
||||
update_pyproject_version(pyproject_path, new_ver_str, preview=preview)
|
||||
|
||||
changelog_message = update_changelog(
|
||||
changelog_path,
|
||||
new_ver_str,
|
||||
@@ -64,38 +71,46 @@ def _release_impl(
|
||||
preview=preview,
|
||||
)
|
||||
|
||||
flake_path = os.path.join(repo_root, "flake.nix")
|
||||
update_flake_version(flake_path, new_ver_str, preview=preview)
|
||||
update_flake_version(paths.flake_nix, new_ver_str, preview=preview)
|
||||
|
||||
pkgbuild_path = os.path.join(repo_root, "PKGBUILD")
|
||||
update_pkgbuild_version(pkgbuild_path, new_ver_str, preview=preview)
|
||||
if paths.arch_pkgbuild:
|
||||
update_pkgbuild_version(paths.arch_pkgbuild, new_ver_str, preview=preview)
|
||||
else:
|
||||
print("[INFO] No PKGBUILD found (packaging/arch/PKGBUILD or PKGBUILD). Skipping.")
|
||||
|
||||
spec_path = os.path.join(repo_root, "package-manager.spec")
|
||||
update_spec_version(spec_path, new_ver_str, preview=preview)
|
||||
if paths.rpm_spec:
|
||||
update_spec_version(paths.rpm_spec, new_ver_str, preview=preview)
|
||||
else:
|
||||
print("[INFO] No RPM spec file found. Skipping spec version update.")
|
||||
|
||||
effective_message: Optional[str] = message
|
||||
if effective_message is None and isinstance(changelog_message, str):
|
||||
if changelog_message.strip():
|
||||
effective_message = changelog_message.strip()
|
||||
|
||||
debian_changelog_path = os.path.join(repo_root, "debian", "changelog")
|
||||
package_name = os.path.basename(repo_root) or "package-manager"
|
||||
|
||||
update_debian_changelog(
|
||||
debian_changelog_path,
|
||||
package_name=package_name,
|
||||
new_version=new_ver_str,
|
||||
message=effective_message,
|
||||
preview=preview,
|
||||
)
|
||||
if paths.debian_changelog:
|
||||
update_debian_changelog(
|
||||
paths.debian_changelog,
|
||||
package_name=package_name,
|
||||
new_version=new_ver_str,
|
||||
message=effective_message,
|
||||
preview=preview,
|
||||
)
|
||||
else:
|
||||
print("[INFO] No debian changelog found. Skipping debian/changelog update.")
|
||||
|
||||
update_spec_changelog(
|
||||
spec_path=spec_path,
|
||||
package_name=package_name,
|
||||
new_version=new_ver_str,
|
||||
message=effective_message,
|
||||
preview=preview,
|
||||
)
|
||||
if paths.rpm_spec:
|
||||
update_spec_changelog(
|
||||
spec_path=paths.rpm_spec,
|
||||
package_name=package_name,
|
||||
new_version=new_ver_str,
|
||||
message=effective_message,
|
||||
preview=preview,
|
||||
)
|
||||
|
||||
# --- Git commit / tag / push ----------------------------------------------
|
||||
|
||||
commit_msg = f"Release version {new_ver_str}"
|
||||
tag_msg = effective_message or commit_msg
|
||||
@@ -103,12 +118,12 @@ def _release_impl(
|
||||
files_to_add = [
|
||||
pyproject_path,
|
||||
changelog_path,
|
||||
flake_path,
|
||||
pkgbuild_path,
|
||||
spec_path,
|
||||
debian_changelog_path,
|
||||
paths.flake_nix,
|
||||
paths.arch_pkgbuild,
|
||||
paths.rpm_spec,
|
||||
paths.debian_changelog,
|
||||
]
|
||||
existing_files = [p for p in files_to_add if p and os.path.exists(p)]
|
||||
existing_files = [p for p in files_to_add if isinstance(p, str) and p and os.path.exists(p)]
|
||||
|
||||
if preview:
|
||||
for path in existing_files:
|
||||
|
||||
@@ -1,143 +1,257 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Dict, Optional, Tuple
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import yaml
|
||||
|
||||
from pkgmgr.actions.mirror.io import write_mirrors_file
|
||||
from pkgmgr.actions.mirror.setup_cmd import setup_mirrors
|
||||
from pkgmgr.actions.repository.scaffold import render_default_templates
|
||||
from pkgmgr.core.command.alias import generate_alias
|
||||
from pkgmgr.core.config.save import save_user_config
|
||||
|
||||
def create_repo(identifier, config_merged, user_config_path, bin_dir, remote=False, preview=False):
|
||||
"""
|
||||
Creates a new repository by performing the following steps:
|
||||
|
||||
1. Parses the identifier (provider:port/account/repository) and adds a new entry to the user config
|
||||
if it is not already present. The provider part is split into provider and port (if provided).
|
||||
2. Creates the local repository directory and initializes a Git repository.
|
||||
3. If --remote is set, checks for an existing "origin" remote (removing it if found),
|
||||
adds the remote using a URL built from provider, port, account, and repository,
|
||||
creates an initial commit (e.g. with a README.md), and pushes to the remote.
|
||||
The push is attempted on both "main" and "master" branches.
|
||||
"""
|
||||
parts = identifier.split("/")
|
||||
Repository = Dict[str, Any]
|
||||
|
||||
_NAME_RE = re.compile(r"^[a-z0-9_-]+$")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class RepoParts:
|
||||
host: str
|
||||
port: Optional[str]
|
||||
owner: str
|
||||
name: str
|
||||
|
||||
|
||||
def _run(cmd: str, cwd: str, preview: bool) -> None:
|
||||
if preview:
|
||||
print(f"[Preview] Would run in {cwd}: {cmd}")
|
||||
return
|
||||
subprocess.run(cmd, cwd=cwd, shell=True, check=True)
|
||||
|
||||
|
||||
def _git_get(key: str) -> str:
|
||||
try:
|
||||
out = subprocess.run(
|
||||
f"git config --get {key}",
|
||||
shell=True,
|
||||
check=False,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
return (out.stdout or "").strip()
|
||||
except Exception:
|
||||
return ""
|
||||
|
||||
|
||||
def _split_host_port(host_with_port: str) -> Tuple[str, Optional[str]]:
|
||||
if ":" in host_with_port:
|
||||
host, port = host_with_port.split(":", 1)
|
||||
return host, port or None
|
||||
return host_with_port, None
|
||||
|
||||
|
||||
def _strip_git_suffix(name: str) -> str:
|
||||
return name[:-4] if name.endswith(".git") else name
|
||||
|
||||
|
||||
def _parse_git_url(url: str) -> RepoParts:
|
||||
if url.startswith("git@") and "://" not in url:
|
||||
left, right = url.split(":", 1)
|
||||
host = left.split("@", 1)[1]
|
||||
path = right.lstrip("/")
|
||||
owner, name = path.split("/", 1)
|
||||
return RepoParts(host=host, port=None, owner=owner, name=_strip_git_suffix(name))
|
||||
|
||||
parsed = urlparse(url)
|
||||
host = (parsed.hostname or "").strip()
|
||||
port = str(parsed.port) if parsed.port else None
|
||||
path = (parsed.path or "").strip("/")
|
||||
|
||||
if not host or not path or "/" not in path:
|
||||
raise ValueError(f"Could not parse git URL: {url}")
|
||||
|
||||
owner, name = path.split("/", 1)
|
||||
return RepoParts(host=host, port=port, owner=owner, name=_strip_git_suffix(name))
|
||||
|
||||
|
||||
def _parse_identifier(identifier: str) -> RepoParts:
|
||||
ident = identifier.strip()
|
||||
|
||||
if "://" in ident or ident.startswith("git@"):
|
||||
return _parse_git_url(ident)
|
||||
|
||||
parts = ident.split("/")
|
||||
if len(parts) != 3:
|
||||
print("Identifier must be in the format 'provider:port/account/repository' (port is optional).")
|
||||
raise ValueError("Identifier must be URL or 'provider(:port)/owner/repo'.")
|
||||
|
||||
host_with_port, owner, name = parts
|
||||
host, port = _split_host_port(host_with_port)
|
||||
return RepoParts(host=host, port=port, owner=owner, name=name)
|
||||
|
||||
|
||||
def _ensure_valid_repo_name(name: str) -> None:
|
||||
if not name or not _NAME_RE.fullmatch(name):
|
||||
raise ValueError("Repository name must match: lowercase a-z, 0-9, '_' and '-'.")
|
||||
|
||||
|
||||
def _repo_homepage(host: str, owner: str, name: str) -> str:
|
||||
return f"https://{host}/{owner}/{name}"
|
||||
|
||||
|
||||
def _build_default_primary_url(parts: RepoParts) -> str:
|
||||
if parts.port:
|
||||
return f"ssh://git@{parts.host}:{parts.port}/{parts.owner}/{parts.name}.git"
|
||||
return f"git@{parts.host}:{parts.owner}/{parts.name}.git"
|
||||
|
||||
|
||||
def _write_default_mirrors(repo_dir: str, primary: str, name: str, preview: bool) -> None:
|
||||
mirrors = {"origin": primary, "pypi": f"https://pypi.org/project/{name}/"}
|
||||
write_mirrors_file(repo_dir, mirrors, preview=preview)
|
||||
|
||||
|
||||
def _git_init_and_initial_commit(repo_dir: str, preview: bool) -> None:
|
||||
_run("git init", cwd=repo_dir, preview=preview)
|
||||
_run("git add -A", cwd=repo_dir, preview=preview)
|
||||
|
||||
if preview:
|
||||
print(f'[Preview] Would run in {repo_dir}: git commit -m "Initial commit"')
|
||||
return
|
||||
|
||||
provider_with_port, account, repository = parts
|
||||
# Split provider and port if a colon is present.
|
||||
if ":" in provider_with_port:
|
||||
provider_name, port = provider_with_port.split(":", 1)
|
||||
else:
|
||||
provider_name = provider_with_port
|
||||
port = None
|
||||
subprocess.run('git commit -m "Initial commit"', cwd=repo_dir, shell=True, check=False)
|
||||
|
||||
# Check if the repository is already present in the merged config (including port)
|
||||
exists = False
|
||||
for repo in config_merged.get("repositories", []):
|
||||
if (repo.get("provider") == provider_name and
|
||||
repo.get("account") == account and
|
||||
repo.get("repository") == repository):
|
||||
exists = True
|
||||
print(f"Repository {identifier} already exists in the configuration.")
|
||||
break
|
||||
|
||||
def _git_push_main_or_master(repo_dir: str, preview: bool) -> None:
|
||||
_run("git branch -M main", cwd=repo_dir, preview=preview)
|
||||
try:
|
||||
_run("git push -u origin main", cwd=repo_dir, preview=preview)
|
||||
return
|
||||
except subprocess.CalledProcessError:
|
||||
pass
|
||||
|
||||
try:
|
||||
_run("git branch -M master", cwd=repo_dir, preview=preview)
|
||||
_run("git push -u origin master", cwd=repo_dir, preview=preview)
|
||||
except subprocess.CalledProcessError as exc:
|
||||
print(f"[WARN] Push failed: {exc}")
|
||||
|
||||
|
||||
def create_repo(
|
||||
identifier: str,
|
||||
config_merged: Dict[str, Any],
|
||||
user_config_path: str,
|
||||
bin_dir: str,
|
||||
*,
|
||||
remote: bool = False,
|
||||
preview: bool = False,
|
||||
) -> None:
|
||||
parts = _parse_identifier(identifier)
|
||||
_ensure_valid_repo_name(parts.name)
|
||||
|
||||
directories = config_merged.get("directories") or {}
|
||||
base_dir = os.path.expanduser(str(directories.get("repositories", "~/Repositories")))
|
||||
repo_dir = os.path.join(base_dir, parts.host, parts.owner, parts.name)
|
||||
|
||||
author_name = _git_get("user.name") or "Unknown Author"
|
||||
author_email = _git_get("user.email") or "unknown@example.invalid"
|
||||
|
||||
homepage = _repo_homepage(parts.host, parts.owner, parts.name)
|
||||
primary_url = _build_default_primary_url(parts)
|
||||
|
||||
repositories = config_merged.get("repositories") or []
|
||||
exists = any(
|
||||
(
|
||||
r.get("provider") == parts.host
|
||||
and r.get("account") == parts.owner
|
||||
and r.get("repository") == parts.name
|
||||
)
|
||||
for r in repositories
|
||||
)
|
||||
|
||||
if not exists:
|
||||
# Create a new entry with an automatically generated alias.
|
||||
new_entry = {
|
||||
"provider": provider_name,
|
||||
"port": port,
|
||||
"account": account,
|
||||
"repository": repository,
|
||||
"alias": generate_alias({"repository": repository, "provider": provider_name, "account": account}, bin_dir, existing_aliases=set()),
|
||||
"verified": {} # No initial verification info
|
||||
new_entry: Repository = {
|
||||
"provider": parts.host,
|
||||
"port": parts.port,
|
||||
"account": parts.owner,
|
||||
"repository": parts.name,
|
||||
"homepage": homepage,
|
||||
"alias": generate_alias(
|
||||
{"repository": parts.name, "provider": parts.host, "account": parts.owner},
|
||||
bin_dir,
|
||||
existing_aliases=set(),
|
||||
),
|
||||
"verified": {},
|
||||
}
|
||||
# Load or initialize the user configuration.
|
||||
|
||||
if os.path.exists(user_config_path):
|
||||
with open(user_config_path, "r") as f:
|
||||
with open(user_config_path, "r", encoding="utf-8") as f:
|
||||
user_config = yaml.safe_load(f) or {}
|
||||
else:
|
||||
user_config = {"repositories": []}
|
||||
|
||||
user_config.setdefault("repositories", [])
|
||||
user_config["repositories"].append(new_entry)
|
||||
save_user_config(user_config, user_config_path)
|
||||
print(f"Repository {identifier} added to the configuration.")
|
||||
# Also update the merged configuration object.
|
||||
config_merged.setdefault("repositories", []).append(new_entry)
|
||||
|
||||
# Create the local repository directory based on the configured base directory.
|
||||
base_dir = os.path.expanduser(config_merged["directories"]["repositories"])
|
||||
repo_dir = os.path.join(base_dir, provider_name, account, repository)
|
||||
if not os.path.exists(repo_dir):
|
||||
os.makedirs(repo_dir, exist_ok=True)
|
||||
print(f"Local repository directory created: {repo_dir}")
|
||||
else:
|
||||
print(f"Local repository directory already exists: {repo_dir}")
|
||||
|
||||
# Initialize a Git repository if not already initialized.
|
||||
if not os.path.exists(os.path.join(repo_dir, ".git")):
|
||||
cmd_init = "git init"
|
||||
if preview:
|
||||
print(f"[Preview] Would execute: '{cmd_init}' in {repo_dir}")
|
||||
print(f"[Preview] Would save user config: {user_config_path}")
|
||||
else:
|
||||
subprocess.run(cmd_init, cwd=repo_dir, shell=True, check=True)
|
||||
print(f"Git repository initialized in {repo_dir}.")
|
||||
save_user_config(user_config, user_config_path)
|
||||
|
||||
config_merged.setdefault("repositories", []).append(new_entry)
|
||||
repo = new_entry
|
||||
print(f"[INFO] Added repository to configuration: {parts.host}/{parts.owner}/{parts.name}")
|
||||
else:
|
||||
print("Git repository is already initialized.")
|
||||
repo = next(
|
||||
r
|
||||
for r in repositories
|
||||
if (
|
||||
r.get("provider") == parts.host
|
||||
and r.get("account") == parts.owner
|
||||
and r.get("repository") == parts.name
|
||||
)
|
||||
)
|
||||
print(f"[INFO] Repository already in configuration: {parts.host}/{parts.owner}/{parts.name}")
|
||||
|
||||
if preview:
|
||||
print(f"[Preview] Would ensure directory exists: {repo_dir}")
|
||||
else:
|
||||
os.makedirs(repo_dir, exist_ok=True)
|
||||
|
||||
tpl_context = {
|
||||
"provider": parts.host,
|
||||
"port": parts.port,
|
||||
"account": parts.owner,
|
||||
"repository": parts.name,
|
||||
"homepage": homepage,
|
||||
"author_name": author_name,
|
||||
"author_email": author_email,
|
||||
"license_text": f"All rights reserved by {author_name}",
|
||||
"primary_remote": primary_url,
|
||||
}
|
||||
|
||||
render_default_templates(repo_dir, context=tpl_context, preview=preview)
|
||||
_git_init_and_initial_commit(repo_dir, preview=preview)
|
||||
|
||||
_write_default_mirrors(repo_dir, primary=primary_url, name=parts.name, preview=preview)
|
||||
|
||||
repo.setdefault("mirrors", {})
|
||||
repo["mirrors"].setdefault("origin", primary_url)
|
||||
repo["mirrors"].setdefault("pypi", f"https://pypi.org/project/{parts.name}/")
|
||||
|
||||
setup_mirrors(
|
||||
selected_repos=[repo],
|
||||
repositories_base_dir=base_dir,
|
||||
all_repos=config_merged.get("repositories", []),
|
||||
preview=preview,
|
||||
local=True,
|
||||
remote=True,
|
||||
ensure_remote=bool(remote),
|
||||
)
|
||||
|
||||
if remote:
|
||||
# Create a README.md if it does not exist to have content for an initial commit.
|
||||
readme_path = os.path.join(repo_dir, "README.md")
|
||||
if not os.path.exists(readme_path):
|
||||
if preview:
|
||||
print(f"[Preview] Would create README.md in {repo_dir}.")
|
||||
else:
|
||||
with open(readme_path, "w") as f:
|
||||
f.write(f"# {repository}\n")
|
||||
subprocess.run("git add README.md", cwd=repo_dir, shell=True, check=True)
|
||||
subprocess.run('git commit -m "Initial commit"', cwd=repo_dir, shell=True, check=True)
|
||||
print("README.md created and initial commit made.")
|
||||
|
||||
# Build the remote URL.
|
||||
if provider_name.lower() == "github.com":
|
||||
remote_url = f"git@{provider_name}:{account}/{repository}.git"
|
||||
else:
|
||||
if port:
|
||||
remote_url = f"ssh://git@{provider_name}:{port}/{account}/{repository}.git"
|
||||
else:
|
||||
remote_url = f"ssh://git@{provider_name}/{account}/{repository}.git"
|
||||
|
||||
# Check if the remote "origin" already exists.
|
||||
cmd_list = "git remote"
|
||||
if preview:
|
||||
print(f"[Preview] Would check for existing remotes in {repo_dir}")
|
||||
remote_exists = False # Assume no remote in preview mode.
|
||||
else:
|
||||
result = subprocess.run(cmd_list, cwd=repo_dir, shell=True, capture_output=True, text=True, check=True)
|
||||
remote_list = result.stdout.strip().split()
|
||||
remote_exists = "origin" in remote_list
|
||||
|
||||
if remote_exists:
|
||||
# Remove the existing remote "origin".
|
||||
cmd_remove = "git remote remove origin"
|
||||
if preview:
|
||||
print(f"[Preview] Would execute: '{cmd_remove}' in {repo_dir}")
|
||||
else:
|
||||
subprocess.run(cmd_remove, cwd=repo_dir, shell=True, check=True)
|
||||
print("Existing remote 'origin' removed.")
|
||||
|
||||
# Now add the new remote.
|
||||
cmd_remote = f"git remote add origin {remote_url}"
|
||||
if preview:
|
||||
print(f"[Preview] Would execute: '{cmd_remote}' in {repo_dir}")
|
||||
else:
|
||||
try:
|
||||
subprocess.run(cmd_remote, cwd=repo_dir, shell=True, check=True)
|
||||
print(f"Remote 'origin' added: {remote_url}")
|
||||
except subprocess.CalledProcessError:
|
||||
print(f"Failed to add remote using URL: {remote_url}.")
|
||||
|
||||
# Push the initial commit to the remote repository
|
||||
cmd_push = "git push -u origin master"
|
||||
if preview:
|
||||
print(f"[Preview] Would execute: '{cmd_push}' in {repo_dir}")
|
||||
else:
|
||||
subprocess.run(cmd_push, cwd=repo_dir, shell=True, check=True)
|
||||
print("Initial push to the remote repository completed.")
|
||||
_git_push_main_or_master(repo_dir, preview=preview)
|
||||
|
||||
105
src/pkgmgr/actions/repository/scaffold.py
Normal file
105
src/pkgmgr/actions/repository/scaffold.py
Normal file
@@ -0,0 +1,105 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
try:
|
||||
from jinja2 import Environment, FileSystemLoader, StrictUndefined
|
||||
except Exception as exc: # pragma: no cover
|
||||
Environment = None # type: ignore[assignment]
|
||||
FileSystemLoader = None # type: ignore[assignment]
|
||||
StrictUndefined = None # type: ignore[assignment]
|
||||
_JINJA_IMPORT_ERROR = exc
|
||||
else:
|
||||
_JINJA_IMPORT_ERROR = None
|
||||
|
||||
|
||||
def _repo_root_from_here(anchor: Optional[Path] = None) -> str:
|
||||
"""
|
||||
Prefer git root (robust in editable installs / different layouts).
|
||||
Fallback to a conservative relative parent lookup.
|
||||
"""
|
||||
here = (anchor or Path(__file__)).resolve().parent
|
||||
try:
|
||||
r = subprocess.run(
|
||||
["git", "rev-parse", "--show-toplevel"],
|
||||
cwd=str(here),
|
||||
check=False,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
if r.returncode == 0:
|
||||
top = (r.stdout or "").strip()
|
||||
if top:
|
||||
return top
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Fallback: src/pkgmgr/actions/repository/scaffold.py -> <repo root> = parents[5]
|
||||
p = (anchor or Path(__file__)).resolve()
|
||||
if len(p.parents) < 6:
|
||||
raise RuntimeError(f"Unexpected path depth for: {p}")
|
||||
return str(p.parents[5])
|
||||
|
||||
|
||||
def _templates_dir() -> str:
|
||||
return os.path.join(_repo_root_from_here(), "templates", "default")
|
||||
|
||||
|
||||
def render_default_templates(
|
||||
repo_dir: str,
|
||||
*,
|
||||
context: Dict[str, Any],
|
||||
preview: bool,
|
||||
) -> None:
|
||||
"""
|
||||
Render templates/default/*.j2 into repo_dir.
|
||||
Keeps create.py clean: create.py calls this function only.
|
||||
"""
|
||||
tpl_dir = _templates_dir()
|
||||
if not os.path.isdir(tpl_dir):
|
||||
raise RuntimeError(f"Templates directory not found: {tpl_dir}")
|
||||
|
||||
# Preview mode: do not require Jinja2 at all. We only print planned outputs.
|
||||
if preview:
|
||||
for root, _, files in os.walk(tpl_dir):
|
||||
for fn in files:
|
||||
if not fn.endswith(".j2"):
|
||||
continue
|
||||
abs_src = os.path.join(root, fn)
|
||||
rel_src = os.path.relpath(abs_src, tpl_dir)
|
||||
rel_out = rel_src[:-3]
|
||||
print(f"[Preview] Would render template: {rel_src} -> {rel_out}")
|
||||
return
|
||||
|
||||
if Environment is None or FileSystemLoader is None or StrictUndefined is None:
|
||||
raise RuntimeError(
|
||||
"Jinja2 is required for repo templates but is not available. "
|
||||
f"Import error: {_JINJA_IMPORT_ERROR}"
|
||||
)
|
||||
|
||||
env = Environment(
|
||||
loader=FileSystemLoader(tpl_dir),
|
||||
undefined=StrictUndefined,
|
||||
autoescape=False,
|
||||
keep_trailing_newline=True,
|
||||
)
|
||||
|
||||
for root, _, files in os.walk(tpl_dir):
|
||||
for fn in files:
|
||||
if not fn.endswith(".j2"):
|
||||
continue
|
||||
|
||||
abs_src = os.path.join(root, fn)
|
||||
rel_src = os.path.relpath(abs_src, tpl_dir)
|
||||
rel_out = rel_src[:-3]
|
||||
abs_out = os.path.join(repo_dir, rel_out)
|
||||
|
||||
os.makedirs(os.path.dirname(abs_out), exist_ok=True)
|
||||
template = env.get_template(rel_src)
|
||||
rendered = template.render(**context)
|
||||
|
||||
with open(abs_out, "w", encoding="utf-8") as f:
|
||||
f.write(rendered)
|
||||
@@ -3,7 +3,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Iterable
|
||||
from typing import Any, Iterable, List, Tuple
|
||||
|
||||
from pkgmgr.actions.update.system_updater import SystemUpdater
|
||||
|
||||
@@ -30,32 +30,73 @@ class UpdateManager:
|
||||
quiet: bool,
|
||||
update_dependencies: bool,
|
||||
clone_mode: str,
|
||||
silent: bool = False,
|
||||
force_update: bool = True,
|
||||
) -> None:
|
||||
from pkgmgr.actions.install import install_repos
|
||||
from pkgmgr.actions.repository.pull import pull_with_verification
|
||||
from pkgmgr.core.repository.identifier import get_repo_identifier
|
||||
|
||||
pull_with_verification(
|
||||
selected_repos,
|
||||
repositories_base_dir,
|
||||
all_repos,
|
||||
[],
|
||||
no_verification,
|
||||
preview,
|
||||
)
|
||||
failures: List[Tuple[str, str]] = []
|
||||
|
||||
install_repos(
|
||||
selected_repos,
|
||||
repositories_base_dir,
|
||||
bin_dir,
|
||||
all_repos,
|
||||
no_verification,
|
||||
preview,
|
||||
quiet,
|
||||
clone_mode,
|
||||
update_dependencies,
|
||||
force_update=force_update,
|
||||
)
|
||||
for repo in list(selected_repos):
|
||||
identifier = get_repo_identifier(repo, all_repos)
|
||||
|
||||
try:
|
||||
pull_with_verification(
|
||||
[repo],
|
||||
repositories_base_dir,
|
||||
all_repos,
|
||||
[],
|
||||
no_verification,
|
||||
preview,
|
||||
)
|
||||
except SystemExit as exc:
|
||||
code = exc.code if isinstance(exc.code, int) else str(exc.code)
|
||||
failures.append((identifier, f"pull failed (exit={code})"))
|
||||
if not quiet:
|
||||
print(f"[Warning] update: pull failed for {identifier} (exit={code}). Continuing...")
|
||||
continue
|
||||
except Exception as exc:
|
||||
failures.append((identifier, f"pull failed: {exc}"))
|
||||
if not quiet:
|
||||
print(f"[Warning] update: pull failed for {identifier}: {exc}. Continuing...")
|
||||
continue
|
||||
|
||||
try:
|
||||
install_repos(
|
||||
[repo],
|
||||
repositories_base_dir,
|
||||
bin_dir,
|
||||
all_repos,
|
||||
no_verification,
|
||||
preview,
|
||||
quiet,
|
||||
clone_mode,
|
||||
update_dependencies,
|
||||
force_update=force_update,
|
||||
silent=silent,
|
||||
emit_summary=False,
|
||||
)
|
||||
except SystemExit as exc:
|
||||
code = exc.code if isinstance(exc.code, int) else str(exc.code)
|
||||
failures.append((identifier, f"install failed (exit={code})"))
|
||||
if not quiet:
|
||||
print(f"[Warning] update: install failed for {identifier} (exit={code}). Continuing...")
|
||||
continue
|
||||
except Exception as exc:
|
||||
failures.append((identifier, f"install failed: {exc}"))
|
||||
if not quiet:
|
||||
print(f"[Warning] update: install failed for {identifier}: {exc}. Continuing...")
|
||||
continue
|
||||
|
||||
if failures and not quiet:
|
||||
print("\n[pkgmgr] Update finished with warnings:")
|
||||
for ident, msg in failures:
|
||||
print(f" - {ident}: {msg}")
|
||||
|
||||
if failures and not silent:
|
||||
raise SystemExit(1)
|
||||
|
||||
if system_update:
|
||||
self._system_updater.run(preview=preview)
|
||||
|
||||
@@ -2,6 +2,7 @@ from .repos import handle_repos_command
|
||||
from .config import handle_config
|
||||
from .tools import handle_tools_command
|
||||
from .release import handle_release
|
||||
from .publish import handle_publish
|
||||
from .version import handle_version
|
||||
from .make import handle_make
|
||||
from .changelog import handle_changelog
|
||||
@@ -13,6 +14,7 @@ __all__ = [
|
||||
"handle_config",
|
||||
"handle_tools_command",
|
||||
"handle_release",
|
||||
"handle_publish",
|
||||
"handle_version",
|
||||
"handle_make",
|
||||
"handle_changelog",
|
||||
|
||||
34
src/pkgmgr/cli/commands/publish.py
Normal file
34
src/pkgmgr/cli/commands/publish.py
Normal file
@@ -0,0 +1,34 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from pkgmgr.actions.publish import publish
|
||||
from pkgmgr.cli.context import CLIContext
|
||||
from pkgmgr.core.repository.dir import get_repo_dir
|
||||
from pkgmgr.core.repository.identifier import get_repo_identifier
|
||||
|
||||
Repository = Dict[str, Any]
|
||||
|
||||
|
||||
def handle_publish(args, ctx: CLIContext, selected: List[Repository]) -> None:
|
||||
if not selected:
|
||||
print("[pkgmgr] No repositories selected for publish.")
|
||||
return
|
||||
|
||||
for repo in selected:
|
||||
identifier = get_repo_identifier(repo, ctx.all_repositories)
|
||||
repo_dir = repo.get("directory") or get_repo_dir(ctx.repositories_base_dir, repo)
|
||||
|
||||
if not os.path.isdir(repo_dir):
|
||||
print(f"[WARN] Skipping {identifier}: directory missing.")
|
||||
continue
|
||||
|
||||
print(f"[pkgmgr] Publishing repository {identifier}...")
|
||||
publish(
|
||||
repo=repo,
|
||||
repo_dir=repo_dir,
|
||||
preview=getattr(args, "preview", False),
|
||||
interactive=not getattr(args, "non_interactive", False),
|
||||
allow_prompt=not getattr(args, "non_interactive", False),
|
||||
)
|
||||
@@ -1,31 +1,17 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Release command wiring for the pkgmgr CLI.
|
||||
|
||||
This module implements the `pkgmgr release` subcommand on top of the
|
||||
generic selection logic from cli.dispatch. It does not define its
|
||||
own subparser; the CLI surface is configured in cli.parser.
|
||||
|
||||
Responsibilities:
|
||||
- Take the parsed argparse.Namespace for the `release` command.
|
||||
- Use the list of selected repositories provided by dispatch_command().
|
||||
- Optionally list affected repositories when --list is set.
|
||||
- For each selected repository, run pkgmgr.actions.release.release(...) in
|
||||
the context of that repository directory.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sys
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from pkgmgr.actions.publish import publish as run_publish
|
||||
from pkgmgr.actions.release import release as run_release
|
||||
from pkgmgr.cli.context import CLIContext
|
||||
from pkgmgr.core.repository.dir import get_repo_dir
|
||||
from pkgmgr.core.repository.identifier import get_repo_identifier
|
||||
from pkgmgr.actions.release import release as run_release
|
||||
|
||||
|
||||
Repository = Dict[str, Any]
|
||||
|
||||
@@ -35,23 +21,10 @@ def handle_release(
|
||||
ctx: CLIContext,
|
||||
selected: List[Repository],
|
||||
) -> None:
|
||||
"""
|
||||
Handle the `pkgmgr release` subcommand.
|
||||
|
||||
Flow:
|
||||
1) Use the `selected` repositories as computed by dispatch_command().
|
||||
2) If --list is given, print the identifiers of the selected repos
|
||||
and return without running any release.
|
||||
3) For each selected repository:
|
||||
- Resolve its identifier and local directory.
|
||||
- Change into that directory.
|
||||
- Call pkgmgr.actions.release.release(...) with the parsed options.
|
||||
"""
|
||||
if not selected:
|
||||
print("[pkgmgr] No repositories selected for release.")
|
||||
return
|
||||
|
||||
# List-only mode: show which repositories would be affected.
|
||||
if getattr(args, "list", False):
|
||||
print("[pkgmgr] Repositories that would be affected by this release:")
|
||||
for repo in selected:
|
||||
@@ -62,29 +35,22 @@ def handle_release(
|
||||
for repo in selected:
|
||||
identifier = get_repo_identifier(repo, ctx.all_repositories)
|
||||
|
||||
repo_dir = repo.get("directory")
|
||||
if not repo_dir:
|
||||
try:
|
||||
repo_dir = get_repo_dir(ctx.repositories_base_dir, repo)
|
||||
except Exception:
|
||||
repo_dir = None
|
||||
|
||||
if not repo_dir or not os.path.isdir(repo_dir):
|
||||
print(
|
||||
f"[WARN] Skipping repository {identifier}: "
|
||||
"local directory does not exist."
|
||||
)
|
||||
try:
|
||||
repo_dir = repo.get("directory") or get_repo_dir(ctx.repositories_base_dir, repo)
|
||||
except Exception as exc:
|
||||
print(f"[WARN] Skipping repository {identifier}: failed to resolve directory: {exc}")
|
||||
continue
|
||||
|
||||
print(
|
||||
f"[pkgmgr] Running release for repository {identifier} "
|
||||
f"in '{repo_dir}'..."
|
||||
)
|
||||
if not os.path.isdir(repo_dir):
|
||||
print(f"[WARN] Skipping repository {identifier}: directory missing.")
|
||||
continue
|
||||
|
||||
print(f"[pkgmgr] Running release for repository {identifier}...")
|
||||
|
||||
# Change to repo directory and invoke the helper.
|
||||
cwd_before = os.getcwd()
|
||||
try:
|
||||
os.chdir(repo_dir)
|
||||
|
||||
run_release(
|
||||
pyproject_path="pyproject.toml",
|
||||
changelog_path="CHANGELOG.md",
|
||||
@@ -94,5 +60,17 @@ def handle_release(
|
||||
force=getattr(args, "force", False),
|
||||
close=getattr(args, "close", False),
|
||||
)
|
||||
|
||||
if not getattr(args, "no_publish", False):
|
||||
print(f"[pkgmgr] Running publish for repository {identifier}...")
|
||||
is_tty = sys.stdin.isatty()
|
||||
run_publish(
|
||||
repo=repo,
|
||||
repo_dir=repo_dir,
|
||||
preview=getattr(args, "preview", False),
|
||||
interactive=is_tty,
|
||||
allow_prompt=is_tty,
|
||||
)
|
||||
|
||||
finally:
|
||||
os.chdir(cwd_before)
|
||||
|
||||
@@ -68,6 +68,7 @@ def handle_repos_command(
|
||||
args.clone_mode,
|
||||
args.dependencies,
|
||||
force_update=getattr(args, "update", False),
|
||||
silent=getattr(args, "silent", False),
|
||||
)
|
||||
return
|
||||
|
||||
|
||||
@@ -1,6 +1,3 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
@@ -16,6 +13,7 @@ from pkgmgr.cli.commands import (
|
||||
handle_repos_command,
|
||||
handle_tools_command,
|
||||
handle_release,
|
||||
handle_publish,
|
||||
handle_version,
|
||||
handle_config,
|
||||
handle_make,
|
||||
@@ -24,40 +22,20 @@ from pkgmgr.cli.commands import (
|
||||
handle_mirror_command,
|
||||
)
|
||||
|
||||
def _has_explicit_selection(args) -> bool:
|
||||
"""
|
||||
Return True if the user explicitly selected repositories via
|
||||
identifiers / --all / --category / --tag / --string.
|
||||
"""
|
||||
identifiers = getattr(args, "identifiers", []) or []
|
||||
use_all = getattr(args, "all", False)
|
||||
categories = getattr(args, "category", []) or []
|
||||
tags = getattr(args, "tag", []) or []
|
||||
string_filter = getattr(args, "string", "") or ""
|
||||
|
||||
def _has_explicit_selection(args) -> bool:
|
||||
return bool(
|
||||
use_all
|
||||
or identifiers
|
||||
or categories
|
||||
or tags
|
||||
or string_filter
|
||||
getattr(args, "all", False)
|
||||
or getattr(args, "identifiers", [])
|
||||
or getattr(args, "category", [])
|
||||
or getattr(args, "tag", [])
|
||||
or getattr(args, "string", "")
|
||||
)
|
||||
|
||||
|
||||
def _select_repo_for_current_directory(
|
||||
ctx: CLIContext,
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Heuristic: find the repository whose local directory matches the
|
||||
current working directory or is the closest parent.
|
||||
|
||||
Example:
|
||||
- Repo directory: /home/kevin/Repositories/foo
|
||||
- CWD: /home/kevin/Repositories/foo/subdir
|
||||
→ 'foo' is selected.
|
||||
"""
|
||||
def _select_repo_for_current_directory(ctx: CLIContext) -> List[Dict[str, Any]]:
|
||||
cwd = os.path.abspath(os.getcwd())
|
||||
candidates: List[tuple[str, Dict[str, Any]]] = []
|
||||
matches = []
|
||||
|
||||
for repo in ctx.all_repositories:
|
||||
repo_dir = repo.get("directory")
|
||||
@@ -65,33 +43,24 @@ def _select_repo_for_current_directory(
|
||||
try:
|
||||
repo_dir = get_repo_dir(ctx.repositories_base_dir, repo)
|
||||
except Exception:
|
||||
repo_dir = None
|
||||
if not repo_dir:
|
||||
continue
|
||||
continue
|
||||
|
||||
repo_dir_abs = os.path.abspath(os.path.expanduser(repo_dir))
|
||||
if cwd == repo_dir_abs or cwd.startswith(repo_dir_abs + os.sep):
|
||||
candidates.append((repo_dir_abs, repo))
|
||||
repo_dir = os.path.abspath(os.path.expanduser(repo_dir))
|
||||
if cwd == repo_dir or cwd.startswith(repo_dir + os.sep):
|
||||
matches.append((repo_dir, repo))
|
||||
|
||||
if not candidates:
|
||||
if not matches:
|
||||
return []
|
||||
|
||||
# Pick the repo with the longest (most specific) path.
|
||||
candidates.sort(key=lambda item: len(item[0]), reverse=True)
|
||||
return [candidates[0][1]]
|
||||
matches.sort(key=lambda x: len(x[0]), reverse=True)
|
||||
return [matches[0][1]]
|
||||
|
||||
|
||||
def dispatch_command(args, ctx: CLIContext) -> None:
|
||||
"""
|
||||
Dispatch the parsed arguments to the appropriate command handler.
|
||||
"""
|
||||
|
||||
# First: proxy commands (git / docker / docker compose / make wrapper etc.)
|
||||
if maybe_handle_proxy(args, ctx):
|
||||
return
|
||||
|
||||
# Commands that operate on repository selections
|
||||
commands_with_selection: List[str] = [
|
||||
commands_with_selection = {
|
||||
"install",
|
||||
"update",
|
||||
"deinstall",
|
||||
@@ -103,31 +72,25 @@ def dispatch_command(args, ctx: CLIContext) -> None:
|
||||
"list",
|
||||
"make",
|
||||
"release",
|
||||
"publish",
|
||||
"version",
|
||||
"changelog",
|
||||
"explore",
|
||||
"terminal",
|
||||
"code",
|
||||
"mirror",
|
||||
]
|
||||
}
|
||||
|
||||
if getattr(args, "command", None) in commands_with_selection:
|
||||
if _has_explicit_selection(args):
|
||||
# Classic selection logic (identifiers / --all / filters)
|
||||
selected = get_selected_repos(args, ctx.all_repositories)
|
||||
else:
|
||||
# Default per help text: repository of current folder.
|
||||
selected = _select_repo_for_current_directory(ctx)
|
||||
# If none is found, leave 'selected' empty.
|
||||
# Individual handlers will then emit a clear message instead
|
||||
# of silently picking an unrelated repository.
|
||||
if args.command in commands_with_selection:
|
||||
selected = (
|
||||
get_selected_repos(args, ctx.all_repositories)
|
||||
if _has_explicit_selection(args)
|
||||
else _select_repo_for_current_directory(ctx)
|
||||
)
|
||||
else:
|
||||
selected = []
|
||||
|
||||
# ------------------------------------------------------------------ #
|
||||
# Repos-related commands
|
||||
# ------------------------------------------------------------------ #
|
||||
if args.command in (
|
||||
if args.command in {
|
||||
"install",
|
||||
"deinstall",
|
||||
"delete",
|
||||
@@ -136,15 +99,13 @@ def dispatch_command(args, ctx: CLIContext) -> None:
|
||||
"shell",
|
||||
"create",
|
||||
"list",
|
||||
):
|
||||
}:
|
||||
handle_repos_command(args, ctx, selected)
|
||||
return
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# update
|
||||
# ------------------------------------------------------------
|
||||
if args.command == "update":
|
||||
from pkgmgr.actions.update import UpdateManager
|
||||
|
||||
UpdateManager().run(
|
||||
selected_repos=selected,
|
||||
repositories_base_dir=ctx.repositories_base_dir,
|
||||
@@ -156,25 +117,23 @@ def dispatch_command(args, ctx: CLIContext) -> None:
|
||||
quiet=args.quiet,
|
||||
update_dependencies=args.dependencies,
|
||||
clone_mode=args.clone_mode,
|
||||
silent=getattr(args, "silent", False),
|
||||
force_update=True,
|
||||
)
|
||||
return
|
||||
|
||||
|
||||
# ------------------------------------------------------------------ #
|
||||
# Tools (explore / terminal / code)
|
||||
# ------------------------------------------------------------------ #
|
||||
if args.command in ("explore", "terminal", "code"):
|
||||
handle_tools_command(args, ctx, selected)
|
||||
return
|
||||
|
||||
# ------------------------------------------------------------------ #
|
||||
# Release / Version / Changelog / Config / Make / Branch
|
||||
# ------------------------------------------------------------------ #
|
||||
if args.command == "release":
|
||||
handle_release(args, ctx, selected)
|
||||
return
|
||||
|
||||
if args.command == "publish":
|
||||
handle_publish(args, ctx, selected)
|
||||
return
|
||||
|
||||
if args.command == "version":
|
||||
handle_version(args, ctx, selected)
|
||||
return
|
||||
|
||||
@@ -1,68 +1,73 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
|
||||
from pkgmgr.cli.proxy import register_proxy_commands
|
||||
|
||||
from .common import SortedSubParsersAction
|
||||
from .install_update import add_install_update_subparsers
|
||||
from .config_cmd import add_config_subparsers
|
||||
from .navigation_cmd import add_navigation_subparsers
|
||||
from .branch_cmd import add_branch_subparsers
|
||||
from .release_cmd import add_release_subparser
|
||||
from .version_cmd import add_version_subparser
|
||||
from .changelog_cmd import add_changelog_subparser
|
||||
from .common import SortedSubParsersAction
|
||||
from .config_cmd import add_config_subparsers
|
||||
from .install_update import add_install_update_subparsers
|
||||
from .list_cmd import add_list_subparser
|
||||
from .make_cmd import add_make_subparsers
|
||||
from .mirror_cmd import add_mirror_subparsers
|
||||
from .navigation_cmd import add_navigation_subparsers
|
||||
from .publish_cmd import add_publish_subparser
|
||||
from .release_cmd import add_release_subparser
|
||||
from .version_cmd import add_version_subparser
|
||||
|
||||
|
||||
def create_parser(description_text: str) -> argparse.ArgumentParser:
|
||||
"""
|
||||
Create the top-level argument parser for pkgmgr.
|
||||
"""
|
||||
parser = argparse.ArgumentParser(
|
||||
description=description_text,
|
||||
formatter_class=argparse.RawTextHelpFormatter,
|
||||
)
|
||||
|
||||
subparsers = parser.add_subparsers(
|
||||
dest="command",
|
||||
help="Subcommands",
|
||||
action=SortedSubParsersAction,
|
||||
)
|
||||
|
||||
# Core repo operations
|
||||
# create
|
||||
p_create = subparsers.add_parser(
|
||||
"create",
|
||||
help="Create a new repository (scaffold + config).",
|
||||
)
|
||||
p_create.add_argument(
|
||||
"identifiers",
|
||||
nargs="+",
|
||||
help="Repository identifier(s): URL or 'provider(:port)/owner/repo'.",
|
||||
)
|
||||
p_create.add_argument(
|
||||
"--remote",
|
||||
action="store_true",
|
||||
help="Also push an initial commit to the remote (main/master).",
|
||||
)
|
||||
p_create.add_argument(
|
||||
"--preview",
|
||||
action="store_true",
|
||||
help="Print actions without writing files or executing commands.",
|
||||
)
|
||||
|
||||
add_install_update_subparsers(subparsers)
|
||||
add_config_subparsers(subparsers)
|
||||
|
||||
# Navigation / tooling around repos
|
||||
add_navigation_subparsers(subparsers)
|
||||
|
||||
# Branch & release workflow
|
||||
add_branch_subparsers(subparsers)
|
||||
add_release_subparser(subparsers)
|
||||
add_publish_subparser(subparsers)
|
||||
|
||||
# Info commands
|
||||
add_version_subparser(subparsers)
|
||||
add_changelog_subparser(subparsers)
|
||||
add_list_subparser(subparsers)
|
||||
|
||||
# Make wrapper
|
||||
add_make_subparsers(subparsers)
|
||||
|
||||
# Mirror management
|
||||
add_mirror_subparsers(subparsers)
|
||||
|
||||
# Proxy commands (git, docker, docker compose, ...)
|
||||
register_proxy_commands(subparsers)
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
__all__ = [
|
||||
"create_parser",
|
||||
"SortedSubParsersAction",
|
||||
]
|
||||
__all__ = ["create_parser", "SortedSubParsersAction"]
|
||||
|
||||
@@ -168,3 +168,10 @@ def add_install_update_arguments(subparser: argparse.ArgumentParser) -> None:
|
||||
default="ssh",
|
||||
help="Specify clone mode (default: ssh).",
|
||||
)
|
||||
|
||||
_add_option_if_missing(
|
||||
subparser,
|
||||
"--silent",
|
||||
action="store_true",
|
||||
help="Continue with other repositories if one fails; downgrade errors to warnings.",
|
||||
)
|
||||
|
||||
19
src/pkgmgr/cli/parser/publish_cmd.py
Normal file
19
src/pkgmgr/cli/parser/publish_cmd.py
Normal file
@@ -0,0 +1,19 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
|
||||
from .common import add_identifier_arguments
|
||||
|
||||
|
||||
def add_publish_subparser(subparsers: argparse._SubParsersAction) -> None:
|
||||
parser = subparsers.add_parser(
|
||||
"publish",
|
||||
help="Publish repository artifacts (e.g. PyPI) based on MIRRORS.",
|
||||
)
|
||||
add_identifier_arguments(parser)
|
||||
|
||||
parser.add_argument(
|
||||
"--non-interactive",
|
||||
action="store_true",
|
||||
help="Disable interactive credential prompts (CI mode).",
|
||||
)
|
||||
@@ -21,22 +21,22 @@ def add_release_subparser(
|
||||
"and updating the changelog."
|
||||
),
|
||||
)
|
||||
|
||||
release_parser.add_argument(
|
||||
"release_type",
|
||||
choices=["major", "minor", "patch"],
|
||||
help="Type of version increment for the release (major, minor, patch).",
|
||||
)
|
||||
|
||||
release_parser.add_argument(
|
||||
"-m",
|
||||
"--message",
|
||||
default=None,
|
||||
help=(
|
||||
"Optional release message to add to the changelog and tag."
|
||||
),
|
||||
help="Optional release message to add to the changelog and tag.",
|
||||
)
|
||||
# Generic selection / preview / list / extra_args
|
||||
|
||||
add_identifier_arguments(release_parser)
|
||||
# Close current branch after successful release
|
||||
|
||||
release_parser.add_argument(
|
||||
"--close",
|
||||
action="store_true",
|
||||
@@ -45,7 +45,7 @@ def add_release_subparser(
|
||||
"repository, if it is not main/master."
|
||||
),
|
||||
)
|
||||
# Force: skip preview+confirmation and run release directly
|
||||
|
||||
release_parser.add_argument(
|
||||
"-f",
|
||||
"--force",
|
||||
@@ -55,3 +55,9 @@ def add_release_subparser(
|
||||
"release directly."
|
||||
),
|
||||
)
|
||||
|
||||
release_parser.add_argument(
|
||||
"--no-publish",
|
||||
action="store_true",
|
||||
help="Do not run publish automatically after a successful release.",
|
||||
)
|
||||
|
||||
@@ -9,15 +9,33 @@ from ..types import KeyringUnavailableError, TokenRequest, TokenResult
|
||||
|
||||
|
||||
def _import_keyring():
|
||||
"""
|
||||
Import python-keyring.
|
||||
|
||||
Raises:
|
||||
KeyringUnavailableError if:
|
||||
- library is missing
|
||||
- no backend is configured / usable
|
||||
- import fails for any reason
|
||||
"""
|
||||
try:
|
||||
import keyring # type: ignore
|
||||
|
||||
return keyring
|
||||
except Exception as exc: # noqa: BLE001
|
||||
raise KeyringUnavailableError(
|
||||
"python-keyring is not available or no backend is configured."
|
||||
"python-keyring is not installed."
|
||||
) from exc
|
||||
|
||||
# Some environments have keyring installed but no usable backend.
|
||||
# We do a lightweight "backend sanity check" by attempting to read the backend.
|
||||
try:
|
||||
_ = keyring.get_keyring()
|
||||
except Exception as exc: # noqa: BLE001
|
||||
raise KeyringUnavailableError(
|
||||
"python-keyring is installed but no usable keyring backend is configured."
|
||||
) from exc
|
||||
|
||||
return keyring
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class KeyringTokenProvider:
|
||||
|
||||
@@ -9,6 +9,37 @@ from typing import Optional
|
||||
from ..types import TokenRequest, TokenResult
|
||||
|
||||
|
||||
def _token_help_url(provider_kind: str, host: str) -> Optional[str]:
|
||||
"""
|
||||
Return a provider-specific URL where a user can create/get an API token.
|
||||
|
||||
Keep this conservative and stable:
|
||||
- GitHub: official token settings URL
|
||||
- Gitea/Forgejo: common settings path on the given host
|
||||
- GitLab: common personal access token path
|
||||
"""
|
||||
kind = (provider_kind or "").strip().lower()
|
||||
h = (host or "").strip()
|
||||
|
||||
# GitHub (cloud)
|
||||
if kind == "github":
|
||||
return "https://github.com/settings/tokens"
|
||||
|
||||
# Gitea / Forgejo (self-hosted)
|
||||
if kind in ("gitea", "forgejo"):
|
||||
# Typical UI path: Settings -> Applications -> Access Tokens
|
||||
# In many installations this is available at /user/settings/applications
|
||||
base = f"https://{h}".rstrip("/")
|
||||
return f"{base}/user/settings/applications"
|
||||
|
||||
# GitLab (cloud or self-hosted)
|
||||
if kind == "gitlab":
|
||||
base = "https://gitlab.com" if not h else f"https://{h}".rstrip("/")
|
||||
return f"{base}/-/profile/personal_access_tokens"
|
||||
|
||||
return None
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class PromptTokenProvider:
|
||||
"""Interactively prompt for a token.
|
||||
@@ -25,6 +56,11 @@ class PromptTokenProvider:
|
||||
return None
|
||||
|
||||
owner_info = f" (owner: {request.owner})" if request.owner else ""
|
||||
help_url = _token_help_url(request.provider_kind, request.host)
|
||||
|
||||
if help_url:
|
||||
print(f"[INFO] Create/get your token here: {help_url}")
|
||||
|
||||
prompt = f"Enter API token for {request.provider_kind} on {request.host}{owner_info}: "
|
||||
token = (getpass(prompt) or "").strip()
|
||||
if not token:
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
# src/pkgmgr/core/credentials/resolver.py
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
|
||||
from .providers.env import EnvTokenProvider
|
||||
from .providers.keyring import KeyringTokenProvider
|
||||
from .providers.prompt import PromptTokenProvider
|
||||
from .types import NoCredentialsError, TokenRequest, TokenResult
|
||||
from .types import KeyringUnavailableError, NoCredentialsError, TokenRequest, TokenResult
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
@@ -26,6 +27,26 @@ class TokenResolver:
|
||||
self._env = EnvTokenProvider()
|
||||
self._keyring = KeyringTokenProvider()
|
||||
self._prompt = PromptTokenProvider()
|
||||
self._warned_keyring: bool = False
|
||||
|
||||
def _warn_keyring_unavailable(self, exc: Exception) -> None:
|
||||
if self._warned_keyring:
|
||||
return
|
||||
self._warned_keyring = True
|
||||
|
||||
msg = str(exc).strip() or "Keyring is unavailable."
|
||||
print("[WARN] Keyring support is not available.", file=sys.stderr)
|
||||
print(f" {msg}", file=sys.stderr)
|
||||
print(" Tokens will NOT be persisted securely.", file=sys.stderr)
|
||||
print("", file=sys.stderr)
|
||||
print(" To enable secure token storage, install python-keyring:", file=sys.stderr)
|
||||
print(" pip install keyring", file=sys.stderr)
|
||||
print("", file=sys.stderr)
|
||||
print(" Or install via system packages:", file=sys.stderr)
|
||||
print(" sudo apt install python3-keyring", file=sys.stderr)
|
||||
print(" sudo pacman -S python-keyring", file=sys.stderr)
|
||||
print(" sudo dnf install python3-keyring", file=sys.stderr)
|
||||
print("", file=sys.stderr)
|
||||
|
||||
def get_token(
|
||||
self,
|
||||
@@ -47,9 +68,11 @@ class TokenResolver:
|
||||
kr_res = self._keyring.get(request)
|
||||
if kr_res:
|
||||
return kr_res
|
||||
except KeyringUnavailableError as exc:
|
||||
# Show a helpful warning once, then continue (prompt fallback).
|
||||
self._warn_keyring_unavailable(exc)
|
||||
except Exception:
|
||||
# Keyring missing/unavailable: ignore to allow prompt (workstations)
|
||||
# or to fail cleanly below (headless CI without prompt).
|
||||
# Unknown keyring errors: do not block prompting; still avoid hard crash.
|
||||
pass
|
||||
|
||||
# 3) Prompt (optional)
|
||||
@@ -59,6 +82,8 @@ class TokenResolver:
|
||||
if opts.save_prompt_token_to_keyring:
|
||||
try:
|
||||
self._keyring.set(request, prompt_res.token)
|
||||
except KeyringUnavailableError as exc:
|
||||
self._warn_keyring_unavailable(exc)
|
||||
except Exception:
|
||||
# If keyring cannot store, still use token for this run.
|
||||
pass
|
||||
|
||||
@@ -64,10 +64,12 @@ def ensure_remote_repo(
|
||||
provider = reg.resolve(spec.host)
|
||||
if provider_hint and provider_hint.kind:
|
||||
forced = provider_hint.kind.strip().lower()
|
||||
provider = next(
|
||||
forced_provider = next(
|
||||
(p for p in reg.providers if getattr(p, "kind", "").lower() == forced),
|
||||
None,
|
||||
)
|
||||
if forced_provider is not None:
|
||||
provider = forced_provider
|
||||
|
||||
if provider is None:
|
||||
raise UnsupportedProviderError(f"No provider matched host: {spec.host}")
|
||||
|
||||
124
src/pkgmgr/core/repository/paths.py
Normal file
124
src/pkgmgr/core/repository/paths.py
Normal file
@@ -0,0 +1,124 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Central repository path resolver.
|
||||
|
||||
Goal:
|
||||
- Provide ONE place to define where packaging / changelog / metadata files live.
|
||||
- Prefer modern layout (packaging/*) but stay backwards-compatible with legacy
|
||||
root-level paths.
|
||||
|
||||
Both:
|
||||
- readers (pkgmgr.core.version.source)
|
||||
- writers (pkgmgr.actions.release.workflow)
|
||||
|
||||
should use this module instead of hardcoding paths.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from dataclasses import dataclass
|
||||
from typing import Iterable, Optional
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class RepoPaths:
|
||||
repo_dir: str
|
||||
|
||||
pyproject_toml: str
|
||||
flake_nix: str
|
||||
|
||||
# Human changelog (typically Markdown)
|
||||
changelog_md: Optional[str]
|
||||
|
||||
# Packaging-related files
|
||||
arch_pkgbuild: Optional[str]
|
||||
debian_changelog: Optional[str]
|
||||
rpm_spec: Optional[str]
|
||||
|
||||
|
||||
def _first_existing(candidates: Iterable[str]) -> Optional[str]:
|
||||
for p in candidates:
|
||||
if p and os.path.isfile(p):
|
||||
return p
|
||||
return None
|
||||
|
||||
|
||||
def _find_first_spec_in_dir(dir_path: str) -> Optional[str]:
|
||||
if not os.path.isdir(dir_path):
|
||||
return None
|
||||
try:
|
||||
for fn in sorted(os.listdir(dir_path)):
|
||||
if fn.endswith(".spec"):
|
||||
p = os.path.join(dir_path, fn)
|
||||
if os.path.isfile(p):
|
||||
return p
|
||||
except OSError:
|
||||
return None
|
||||
return None
|
||||
|
||||
|
||||
def resolve_repo_paths(repo_dir: str) -> RepoPaths:
|
||||
"""
|
||||
Resolve canonical file locations for a repository.
|
||||
|
||||
Preferences (new layout first, legacy fallback second):
|
||||
- PKGBUILD: packaging/arch/PKGBUILD -> PKGBUILD
|
||||
- Debian changelog: packaging/debian/changelog -> debian/changelog
|
||||
- RPM spec: packaging/fedora/package-manager.spec
|
||||
-> first *.spec in packaging/fedora
|
||||
-> first *.spec in repo root
|
||||
- CHANGELOG.md: CHANGELOG.md -> packaging/CHANGELOG.md (optional fallback)
|
||||
|
||||
Notes:
|
||||
- This resolver only returns paths; it does not read/parse files.
|
||||
- Callers should treat Optional paths as "may not exist".
|
||||
"""
|
||||
repo_dir = os.path.abspath(repo_dir)
|
||||
|
||||
pyproject_toml = os.path.join(repo_dir, "pyproject.toml")
|
||||
flake_nix = os.path.join(repo_dir, "flake.nix")
|
||||
|
||||
changelog_md = _first_existing(
|
||||
[
|
||||
os.path.join(repo_dir, "CHANGELOG.md"),
|
||||
os.path.join(repo_dir, "packaging", "CHANGELOG.md"),
|
||||
]
|
||||
)
|
||||
|
||||
arch_pkgbuild = _first_existing(
|
||||
[
|
||||
os.path.join(repo_dir, "packaging", "arch", "PKGBUILD"),
|
||||
os.path.join(repo_dir, "PKGBUILD"),
|
||||
]
|
||||
)
|
||||
|
||||
debian_changelog = _first_existing(
|
||||
[
|
||||
os.path.join(repo_dir, "packaging", "debian", "changelog"),
|
||||
os.path.join(repo_dir, "debian", "changelog"),
|
||||
]
|
||||
)
|
||||
|
||||
# RPM spec: prefer the canonical file, else first spec in packaging/fedora, else first spec in repo root.
|
||||
rpm_spec = _first_existing(
|
||||
[
|
||||
os.path.join(repo_dir, "packaging", "fedora", "package-manager.spec"),
|
||||
]
|
||||
)
|
||||
if rpm_spec is None:
|
||||
rpm_spec = _find_first_spec_in_dir(os.path.join(repo_dir, "packaging", "fedora"))
|
||||
if rpm_spec is None:
|
||||
rpm_spec = _find_first_spec_in_dir(repo_dir)
|
||||
|
||||
return RepoPaths(
|
||||
repo_dir=repo_dir,
|
||||
pyproject_toml=pyproject_toml,
|
||||
flake_nix=flake_nix,
|
||||
changelog_md=changelog_md,
|
||||
arch_pkgbuild=arch_pkgbuild,
|
||||
debian_changelog=debian_changelog,
|
||||
rpm_spec=rpm_spec,
|
||||
)
|
||||
@@ -1,3 +1,4 @@
|
||||
# src/pkgmgr/core/version/source.py
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
@@ -6,6 +7,8 @@ from typing import Optional
|
||||
|
||||
import yaml
|
||||
|
||||
from pkgmgr.core.repository.paths import resolve_repo_paths
|
||||
|
||||
|
||||
def read_pyproject_version(repo_dir: str) -> Optional[str]:
|
||||
"""
|
||||
@@ -13,7 +16,8 @@ def read_pyproject_version(repo_dir: str) -> Optional[str]:
|
||||
|
||||
Expects a PEP 621-style [project] table with a 'version' field.
|
||||
"""
|
||||
path = os.path.join(repo_dir, "pyproject.toml")
|
||||
paths = resolve_repo_paths(repo_dir)
|
||||
path = paths.pyproject_toml
|
||||
if not os.path.isfile(path):
|
||||
return None
|
||||
|
||||
@@ -39,7 +43,8 @@ def read_pyproject_project_name(repo_dir: str) -> Optional[str]:
|
||||
This is required to correctly resolve installed Python package
|
||||
versions via importlib.metadata.
|
||||
"""
|
||||
path = os.path.join(repo_dir, "pyproject.toml")
|
||||
paths = resolve_repo_paths(repo_dir)
|
||||
path = paths.pyproject_toml
|
||||
if not os.path.isfile(path):
|
||||
return None
|
||||
|
||||
@@ -65,7 +70,8 @@ def read_flake_version(repo_dir: str) -> Optional[str]:
|
||||
Looks for:
|
||||
version = "X.Y.Z";
|
||||
"""
|
||||
path = os.path.join(repo_dir, "flake.nix")
|
||||
paths = resolve_repo_paths(repo_dir)
|
||||
path = paths.flake_nix
|
||||
if not os.path.isfile(path):
|
||||
return None
|
||||
|
||||
@@ -84,15 +90,16 @@ def read_flake_version(repo_dir: str) -> Optional[str]:
|
||||
|
||||
def read_pkgbuild_version(repo_dir: str) -> Optional[str]:
|
||||
"""
|
||||
Read the version from PKGBUILD in repo_dir.
|
||||
Read the version from PKGBUILD (preferring packaging/arch/PKGBUILD).
|
||||
|
||||
Combines pkgver and pkgrel if both exist:
|
||||
pkgver=1.2.3
|
||||
pkgrel=1
|
||||
-> 1.2.3-1
|
||||
"""
|
||||
path = os.path.join(repo_dir, "PKGBUILD")
|
||||
if not os.path.isfile(path):
|
||||
paths = resolve_repo_paths(repo_dir)
|
||||
path = paths.arch_pkgbuild
|
||||
if not path or not os.path.isfile(path):
|
||||
return None
|
||||
|
||||
try:
|
||||
@@ -117,13 +124,19 @@ def read_pkgbuild_version(repo_dir: str) -> Optional[str]:
|
||||
|
||||
def read_debian_changelog_version(repo_dir: str) -> Optional[str]:
|
||||
"""
|
||||
Read the latest version from debian/changelog.
|
||||
Read the latest version from debian changelog.
|
||||
|
||||
Preferred path:
|
||||
packaging/debian/changelog
|
||||
Fallback:
|
||||
debian/changelog
|
||||
|
||||
Expected format:
|
||||
package (1.2.3-1) unstable; urgency=medium
|
||||
"""
|
||||
path = os.path.join(repo_dir, "debian", "changelog")
|
||||
if not os.path.isfile(path):
|
||||
paths = resolve_repo_paths(repo_dir)
|
||||
path = paths.debian_changelog
|
||||
if not path or not os.path.isfile(path):
|
||||
return None
|
||||
|
||||
try:
|
||||
@@ -146,37 +159,40 @@ def read_spec_version(repo_dir: str) -> Optional[str]:
|
||||
"""
|
||||
Read the version from an RPM spec file.
|
||||
|
||||
Preferred paths:
|
||||
packaging/fedora/package-manager.spec
|
||||
packaging/fedora/*.spec
|
||||
repo_root/*.spec
|
||||
|
||||
Combines:
|
||||
Version: 1.2.3
|
||||
Release: 1%{?dist}
|
||||
-> 1.2.3-1
|
||||
"""
|
||||
for fn in os.listdir(repo_dir):
|
||||
if not fn.endswith(".spec"):
|
||||
continue
|
||||
paths = resolve_repo_paths(repo_dir)
|
||||
path = paths.rpm_spec
|
||||
if not path or not os.path.isfile(path):
|
||||
return None
|
||||
|
||||
path = os.path.join(repo_dir, fn)
|
||||
try:
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
text = f.read()
|
||||
except Exception:
|
||||
return None
|
||||
try:
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
text = f.read()
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
ver_match = re.search(r"^Version:\s*(.+)$", text, re.MULTILINE)
|
||||
if not ver_match:
|
||||
return None
|
||||
version = ver_match.group(1).strip()
|
||||
ver_match = re.search(r"^Version:\s*(.+)$", text, re.MULTILINE)
|
||||
if not ver_match:
|
||||
return None
|
||||
version = ver_match.group(1).strip()
|
||||
|
||||
rel_match = re.search(r"^Release:\s*(.+)$", text, re.MULTILINE)
|
||||
if rel_match:
|
||||
release_raw = rel_match.group(1).strip()
|
||||
release = release_raw.split("%", 1)[0].split(" ", 1)[0].strip()
|
||||
if release:
|
||||
return f"{version}-{release}"
|
||||
rel_match = re.search(r"^Release:\s*(.+)$", text, re.MULTILINE)
|
||||
if rel_match:
|
||||
release_raw = rel_match.group(1).strip()
|
||||
release = release_raw.split("%", 1)[0].split(" ", 1)[0].strip()
|
||||
if release:
|
||||
return f"{version}-{release}"
|
||||
|
||||
return version or None
|
||||
|
||||
return None
|
||||
return version or None
|
||||
|
||||
|
||||
def read_ansible_galaxy_version(repo_dir: str) -> Optional[str]:
|
||||
|
||||
5
templates/default/.gitignore.j2
Normal file
5
templates/default/.gitignore.j2
Normal file
@@ -0,0 +1,5 @@
|
||||
.venv/
|
||||
dist/
|
||||
build/
|
||||
__pycache__/
|
||||
*.pyc
|
||||
1
templates/default/LICENSE.j2
Normal file
1
templates/default/LICENSE.j2
Normal file
@@ -0,0 +1 @@
|
||||
{{ license_text }}
|
||||
6
templates/default/README.md.j2
Normal file
6
templates/default/README.md.j2
Normal file
@@ -0,0 +1,6 @@
|
||||
# {{ repository }}
|
||||
|
||||
Homepage: {{ homepage }}
|
||||
|
||||
## Author
|
||||
{{ author_name }} <{{ author_email }}>
|
||||
11
templates/default/flake.nix.j2
Normal file
11
templates/default/flake.nix.j2
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
description = "{{ repository }}";
|
||||
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
|
||||
outputs = { self, nixpkgs }:
|
||||
let system = "x86_64-linux"; pkgs = import nixpkgs { inherit system; };
|
||||
in {
|
||||
devShells.${system}.default = pkgs.mkShell {
|
||||
packages = with pkgs; [ python312 python312Packages.pytest python312Packages.ruff ];
|
||||
};
|
||||
};
|
||||
}
|
||||
21
templates/default/pyproject.toml.j2
Normal file
21
templates/default/pyproject.toml.j2
Normal file
@@ -0,0 +1,21 @@
|
||||
[build-system]
|
||||
requires = ["setuptools>=68", "wheel"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "{{ repository }}"
|
||||
version = "0.1.0"
|
||||
description = ""
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.10"
|
||||
authors = [{ name = "{{ author_name }}", email = "{{ author_email }}" }]
|
||||
license = { text = "{{ license_text }}" }
|
||||
urls = { Homepage = "{{ homepage }}" }
|
||||
|
||||
dependencies = []
|
||||
|
||||
[tool.setuptools]
|
||||
package-dir = {"" = "src"}
|
||||
|
||||
[tool.setuptools.packages.find]
|
||||
where = ["src"]
|
||||
@@ -1,164 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
E2E integration tests for the `pkgmgr mirror` command family.
|
||||
|
||||
Covered commands:
|
||||
|
||||
- pkgmgr mirror --help
|
||||
- pkgmgr mirror list --preview --all
|
||||
- pkgmgr mirror diff --preview --all
|
||||
- pkgmgr mirror merge config file --preview --all
|
||||
- pkgmgr mirror setup --preview --all
|
||||
- pkgmgr mirror check --preview --all
|
||||
- pkgmgr mirror provision --preview --all
|
||||
|
||||
All commands are executed via the real CLI entry point (main module).
|
||||
With --preview enabled, all operations are non-destructive and safe
|
||||
to run inside CI containers.
|
||||
"""
|
||||
|
||||
import io
|
||||
import runpy
|
||||
import sys
|
||||
import unittest
|
||||
from contextlib import redirect_stdout, redirect_stderr
|
||||
|
||||
|
||||
class TestIntegrationMirrorCommands(unittest.TestCase):
|
||||
"""
|
||||
End-to-end tests for `pkgmgr mirror` commands.
|
||||
"""
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# Helper
|
||||
# ------------------------------------------------------------
|
||||
def _run_pkgmgr(self, args):
|
||||
"""
|
||||
Execute pkgmgr with the given arguments and return captured output.
|
||||
|
||||
- Treat SystemExit(0) or SystemExit(None) as success.
|
||||
- Any other exit code is considered a test failure.
|
||||
"""
|
||||
original_argv = list(sys.argv)
|
||||
buffer = io.StringIO()
|
||||
cmd_repr = "pkgmgr " + " ".join(args)
|
||||
|
||||
try:
|
||||
sys.argv = ["pkgmgr"] + list(args)
|
||||
|
||||
try:
|
||||
with redirect_stdout(buffer), redirect_stderr(buffer):
|
||||
runpy.run_module("pkgmgr", run_name="__main__")
|
||||
except SystemExit as exc:
|
||||
code = exc.code if isinstance(exc.code, int) else None
|
||||
if code not in (0, None):
|
||||
raise AssertionError(
|
||||
"%r failed with exit code %r.\n\nOutput:\n%s"
|
||||
% (cmd_repr, exc.code, buffer.getvalue())
|
||||
)
|
||||
|
||||
return buffer.getvalue()
|
||||
|
||||
finally:
|
||||
sys.argv = original_argv
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# Tests
|
||||
# ------------------------------------------------------------
|
||||
|
||||
def test_mirror_help(self):
|
||||
"""
|
||||
`pkgmgr mirror --help` should run without error and print usage info.
|
||||
"""
|
||||
output = self._run_pkgmgr(["mirror", "--help"])
|
||||
self.assertIn("usage:", output)
|
||||
self.assertIn("pkgmgr mirror", output)
|
||||
|
||||
def test_mirror_list_preview_all(self):
|
||||
"""
|
||||
`pkgmgr mirror list --preview --all`
|
||||
"""
|
||||
output = self._run_pkgmgr(
|
||||
["mirror", "list", "--preview", "--all"]
|
||||
)
|
||||
self.assertTrue(
|
||||
output.strip(),
|
||||
"Expected output from mirror list",
|
||||
)
|
||||
|
||||
def test_mirror_diff_preview_all(self):
|
||||
"""
|
||||
`pkgmgr mirror diff --preview --all`
|
||||
"""
|
||||
output = self._run_pkgmgr(
|
||||
["mirror", "diff", "--preview", "--all"]
|
||||
)
|
||||
self.assertTrue(
|
||||
output.strip(),
|
||||
"Expected output from mirror diff",
|
||||
)
|
||||
|
||||
def test_mirror_merge_config_to_file_preview_all(self):
|
||||
"""
|
||||
`pkgmgr mirror merge config file --preview --all`
|
||||
"""
|
||||
output = self._run_pkgmgr(
|
||||
[
|
||||
"mirror",
|
||||
"merge",
|
||||
"config",
|
||||
"file",
|
||||
"--preview",
|
||||
"--all",
|
||||
]
|
||||
)
|
||||
self.assertTrue(
|
||||
output.strip(),
|
||||
"Expected output from mirror merge (config -> file)",
|
||||
)
|
||||
|
||||
def test_mirror_setup_preview_all(self):
|
||||
"""
|
||||
`pkgmgr mirror setup --preview --all`
|
||||
"""
|
||||
output = self._run_pkgmgr(
|
||||
["mirror", "setup", "--preview", "--all"]
|
||||
)
|
||||
self.assertTrue(
|
||||
output.strip(),
|
||||
"Expected output from mirror setup",
|
||||
)
|
||||
|
||||
def test_mirror_check_preview_all(self):
|
||||
"""
|
||||
`pkgmgr mirror check --preview --all`
|
||||
|
||||
Performs non-destructive remote checks (git ls-remote).
|
||||
"""
|
||||
output = self._run_pkgmgr(
|
||||
["mirror", "check", "--preview", "--all"]
|
||||
)
|
||||
self.assertTrue(
|
||||
output.strip(),
|
||||
"Expected output from mirror check",
|
||||
)
|
||||
|
||||
def test_mirror_provision_preview_all(self):
|
||||
"""
|
||||
`pkgmgr mirror provision --preview --all`
|
||||
|
||||
In preview mode this MUST NOT create remote repositories.
|
||||
"""
|
||||
output = self._run_pkgmgr(
|
||||
["mirror", "provision", "--preview", "--all"]
|
||||
)
|
||||
self.assertTrue(
|
||||
output.strip(),
|
||||
"Expected output from mirror provision (preview)",
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
70
tests/e2e/test_publish_commands.py
Normal file
70
tests/e2e/test_publish_commands.py
Normal file
@@ -0,0 +1,70 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import unittest
|
||||
|
||||
|
||||
PROJECT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".."))
|
||||
|
||||
|
||||
def _run_help(cmd: list[str], label: str) -> str:
|
||||
print(f"\n[TEST] Running ({label}): {' '.join(cmd)}")
|
||||
proc = subprocess.run(
|
||||
cmd,
|
||||
cwd=PROJECT_ROOT,
|
||||
text=True,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
check=False,
|
||||
env=os.environ.copy(),
|
||||
)
|
||||
print(proc.stdout.rstrip())
|
||||
|
||||
# For --help we expect success (0). Anything else is an error.
|
||||
if proc.returncode != 0:
|
||||
raise AssertionError(
|
||||
f"[TEST] Help command failed ({label}).\n"
|
||||
f"Command: {' '.join(cmd)}\n"
|
||||
f"Exit code: {proc.returncode}\n"
|
||||
f"--- output ---\n{proc.stdout}\n"
|
||||
)
|
||||
|
||||
return proc.stdout
|
||||
|
||||
|
||||
class TestPublishHelpE2E(unittest.TestCase):
|
||||
def test_pkgmgr_publish_help(self) -> None:
|
||||
out = _run_help(["pkgmgr", "publish", "--help"], "pkgmgr publish --help")
|
||||
self.assertIn("usage:", out)
|
||||
self.assertIn("publish", out)
|
||||
|
||||
def test_pkgmgr_help_mentions_publish(self) -> None:
|
||||
out = _run_help(["pkgmgr", "--help"], "pkgmgr --help")
|
||||
self.assertIn("publish", out)
|
||||
|
||||
def test_nix_run_pkgmgr_publish_help(self) -> None:
|
||||
if shutil.which("nix") is None:
|
||||
self.skipTest("nix is not available in this environment")
|
||||
|
||||
out = _run_help(
|
||||
["nix", "run", ".#pkgmgr", "--", "publish", "--help"],
|
||||
"nix run .#pkgmgr -- publish --help",
|
||||
)
|
||||
self.assertIn("usage:", out)
|
||||
self.assertIn("publish", out)
|
||||
|
||||
def test_nix_run_pkgmgr_help_mentions_publish(self) -> None:
|
||||
if shutil.which("nix") is None:
|
||||
self.skipTest("nix is not available in this environment")
|
||||
|
||||
out = _run_help(
|
||||
["nix", "run", ".#pkgmgr", "--", "--help"],
|
||||
"nix run .#pkgmgr -- --help",
|
||||
)
|
||||
self.assertIn("publish", out)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
42
tests/e2e/test_repos_create_preview_output.py
Normal file
42
tests/e2e/test_repos_create_preview_output.py
Normal file
@@ -0,0 +1,42 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import io
|
||||
import unittest
|
||||
from contextlib import redirect_stdout
|
||||
from unittest.mock import patch
|
||||
|
||||
from pkgmgr.actions.repository.create import create_repo
|
||||
|
||||
|
||||
class TestE2ECreateRepoPreviewOutput(unittest.TestCase):
|
||||
def test_create_repo_preview_prints_expected_steps(self) -> None:
|
||||
cfg = {"directories": {"repositories": "/tmp/Repositories"}, "repositories": []}
|
||||
|
||||
out = io.StringIO()
|
||||
with (
|
||||
redirect_stdout(out),
|
||||
patch("pkgmgr.actions.repository.create.os.path.exists", return_value=False),
|
||||
patch("pkgmgr.actions.repository.create.generate_alias", return_value="repo"),
|
||||
patch("pkgmgr.actions.repository.create.save_user_config"),
|
||||
patch("pkgmgr.actions.repository.create.os.makedirs"),
|
||||
patch("pkgmgr.actions.repository.create.render_default_templates"),
|
||||
patch("pkgmgr.actions.repository.create.write_mirrors_file"),
|
||||
patch("pkgmgr.actions.repository.create.setup_mirrors"),
|
||||
patch("pkgmgr.actions.repository.create.subprocess.run"),
|
||||
):
|
||||
create_repo(
|
||||
"github.com/acme/repo",
|
||||
cfg,
|
||||
"/tmp/user.yml",
|
||||
"/tmp/bin",
|
||||
remote=False,
|
||||
preview=True,
|
||||
)
|
||||
|
||||
s = out.getvalue()
|
||||
self.assertIn("[Preview] Would save user config:", s)
|
||||
self.assertIn("[Preview] Would ensure directory exists:", s)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
@@ -96,6 +96,7 @@ class TestIntegrationUpdateAllshallowNoSystem(unittest.TestCase):
|
||||
"--clone-mode",
|
||||
"shallow",
|
||||
"--no-verification",
|
||||
"--silent",
|
||||
]
|
||||
self._run_cmd(["pkgmgr", *args], label="pkgmgr", env=env)
|
||||
pkgmgr_help_debug()
|
||||
@@ -110,6 +111,7 @@ class TestIntegrationUpdateAllshallowNoSystem(unittest.TestCase):
|
||||
"--clone-mode",
|
||||
"shallow",
|
||||
"--no-verification",
|
||||
"--silent",
|
||||
]
|
||||
self._run_cmd(
|
||||
["nix", "run", ".#pkgmgr", "--", *args],
|
||||
|
||||
172
tests/integration/test_mirror_commands.py
Normal file
172
tests/integration/test_mirror_commands.py
Normal file
@@ -0,0 +1,172 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
CLI integration tests for `pkgmgr mirror`.
|
||||
|
||||
These tests validate:
|
||||
- CLI argument parsing
|
||||
- command dispatch
|
||||
- command orchestration
|
||||
|
||||
All side effects (git, network, remote provisioning, filesystem writes)
|
||||
are patched to keep tests deterministic and CI-safe.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import importlib
|
||||
import io
|
||||
import os
|
||||
import runpy
|
||||
import sys
|
||||
import unittest
|
||||
from contextlib import ExitStack, redirect_stderr, redirect_stdout
|
||||
from typing import Dict, List, Optional
|
||||
from unittest.mock import MagicMock, PropertyMock, patch
|
||||
|
||||
|
||||
class TestIntegrationMirrorCommands(unittest.TestCase):
|
||||
"""
|
||||
Integration tests for `pkgmgr mirror` commands.
|
||||
"""
|
||||
|
||||
def _run_pkgmgr(self, args: List[str], extra_env: Optional[Dict[str, str]] = None) -> str:
|
||||
"""
|
||||
Execute pkgmgr with the given arguments and return captured output.
|
||||
|
||||
- Treat SystemExit(0) or SystemExit(None) as success.
|
||||
- Any other exit code is considered a test failure.
|
||||
- Mirror commands are patched to avoid network/destructive operations.
|
||||
"""
|
||||
original_argv = list(sys.argv)
|
||||
original_env = dict(os.environ)
|
||||
buffer = io.StringIO()
|
||||
cmd_repr = "pkgmgr " + " ".join(args)
|
||||
|
||||
# Shared dummy context used by multiple mirror commands
|
||||
dummy_ctx = MagicMock()
|
||||
dummy_ctx.identifier = "dummy-repo"
|
||||
dummy_ctx.repo_dir = "/tmp/dummy-repo"
|
||||
dummy_ctx.config_mirrors = {"origin": "git@github.com:alice/repo.git"}
|
||||
dummy_ctx.file_mirrors = {"backup": "ssh://git@git.example:2201/alice/repo.git"}
|
||||
type(dummy_ctx).resolved_mirrors = PropertyMock(
|
||||
return_value={
|
||||
"origin": "git@github.com:alice/repo.git",
|
||||
"backup": "ssh://git@git.example:2201/alice/repo.git",
|
||||
}
|
||||
)
|
||||
|
||||
# Helper: patch with create=True so missing symbols don't explode.
|
||||
# IMPORTANT: patch() does not auto-import submodules when resolving dotted names.
|
||||
def _p(target: str, **kwargs):
|
||||
module_name = target.rsplit(".", 1)[0]
|
||||
try:
|
||||
importlib.import_module(module_name)
|
||||
except ModuleNotFoundError:
|
||||
# If the module truly doesn't exist, create=True may still allow patching
|
||||
# in some cases, but dotted resolution can still fail. Best-effort.
|
||||
pass
|
||||
return patch(target, create=True, **kwargs)
|
||||
|
||||
# Fake result for remote provisioning (preview-safe)
|
||||
def _fake_ensure_remote_repo(spec, provider_hint=None, options=None):
|
||||
# Safety: E2E should only ever call this in preview mode
|
||||
if options is not None and getattr(options, "preview", False) is not True:
|
||||
raise AssertionError(
|
||||
f"{cmd_repr} attempted ensure_remote_repo without preview=True in E2E."
|
||||
)
|
||||
r = MagicMock()
|
||||
r.status = "preview"
|
||||
r.message = "Preview mode (E2E patched): no remote provisioning performed."
|
||||
r.url = None
|
||||
return r
|
||||
|
||||
try:
|
||||
sys.argv = ["pkgmgr"] + list(args)
|
||||
if extra_env:
|
||||
os.environ.update(extra_env)
|
||||
|
||||
with ExitStack() as stack:
|
||||
# build_context is imported directly in these modules:
|
||||
stack.enter_context(_p("pkgmgr.actions.mirror.list_cmd.build_context", return_value=dummy_ctx))
|
||||
stack.enter_context(_p("pkgmgr.actions.mirror.diff_cmd.build_context", return_value=dummy_ctx))
|
||||
stack.enter_context(_p("pkgmgr.actions.mirror.merge_cmd.build_context", return_value=dummy_ctx))
|
||||
stack.enter_context(_p("pkgmgr.actions.mirror.setup_cmd.build_context", return_value=dummy_ctx))
|
||||
stack.enter_context(_p("pkgmgr.actions.mirror.remote_provision.build_context", return_value=dummy_ctx))
|
||||
|
||||
# Deterministic remote probing (covers setup + likely check implementations)
|
||||
stack.enter_context(_p("pkgmgr.actions.mirror.remote_check.probe_mirror", return_value=(True, "")))
|
||||
stack.enter_context(_p("pkgmgr.actions.mirror.setup_cmd.probe_mirror", return_value=(True, "")))
|
||||
stack.enter_context(_p("pkgmgr.actions.mirror.git_remote.is_remote_reachable", return_value=True))
|
||||
|
||||
# setup_cmd imports ensure_origin_remote directly:
|
||||
stack.enter_context(_p("pkgmgr.actions.mirror.setup_cmd.ensure_origin_remote", return_value=None))
|
||||
# Extra safety: if any code calls git_remote.ensure_origin_remote directly
|
||||
stack.enter_context(_p("pkgmgr.actions.mirror.git_remote.ensure_origin_remote", return_value=None))
|
||||
|
||||
# remote provisioning: remote_provision imports ensure_remote_repo directly from core:
|
||||
stack.enter_context(
|
||||
_p(
|
||||
"pkgmgr.actions.mirror.remote_provision.ensure_remote_repo",
|
||||
side_effect=_fake_ensure_remote_repo,
|
||||
)
|
||||
)
|
||||
|
||||
# Extra safety: if anything calls remote_check.run_git directly, make it inert
|
||||
stack.enter_context(_p("pkgmgr.actions.mirror.remote_check.run_git", return_value="dummy"))
|
||||
|
||||
with redirect_stdout(buffer), redirect_stderr(buffer):
|
||||
try:
|
||||
runpy.run_module("pkgmgr", run_name="__main__")
|
||||
except SystemExit as exc:
|
||||
code = exc.code if isinstance(exc.code, int) else None
|
||||
if code not in (0, None):
|
||||
raise AssertionError(
|
||||
"%r failed with exit code %r.\n\nOutput:\n%s"
|
||||
% (cmd_repr, exc.code, buffer.getvalue())
|
||||
)
|
||||
|
||||
return buffer.getvalue()
|
||||
|
||||
finally:
|
||||
sys.argv = original_argv
|
||||
os.environ.clear()
|
||||
os.environ.update(original_env)
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# Tests
|
||||
# ------------------------------------------------------------
|
||||
|
||||
def test_mirror_help(self) -> None:
|
||||
output = self._run_pkgmgr(["mirror", "--help"])
|
||||
self.assertIn("usage:", output.lower())
|
||||
self.assertIn("mirror", output.lower())
|
||||
|
||||
def test_mirror_list_preview_all(self) -> None:
|
||||
output = self._run_pkgmgr(["mirror", "list", "--preview", "--all"])
|
||||
self.assertTrue(output.strip(), "Expected output from mirror list")
|
||||
|
||||
def test_mirror_diff_preview_all(self) -> None:
|
||||
output = self._run_pkgmgr(["mirror", "diff", "--preview", "--all"])
|
||||
self.assertTrue(output.strip(), "Expected output from mirror diff")
|
||||
|
||||
def test_mirror_merge_config_to_file_preview_all(self) -> None:
|
||||
output = self._run_pkgmgr(["mirror", "merge", "config", "file", "--preview", "--all"])
|
||||
self.assertTrue(output.strip(), "Expected output from mirror merge (config -> file)")
|
||||
|
||||
def test_mirror_setup_preview_all(self) -> None:
|
||||
output = self._run_pkgmgr(["mirror", "setup", "--preview", "--all"])
|
||||
self.assertTrue(output.strip(), "Expected output from mirror setup")
|
||||
|
||||
def test_mirror_check_preview_all(self) -> None:
|
||||
output = self._run_pkgmgr(["mirror", "check", "--preview", "--all"])
|
||||
self.assertTrue(output.strip(), "Expected output from mirror check")
|
||||
|
||||
def test_mirror_provision_preview_all(self) -> None:
|
||||
output = self._run_pkgmgr(["mirror", "provision", "--preview", "--all"])
|
||||
self.assertTrue(output.strip(), "Expected output from mirror provision (preview)")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
63
tests/integration/test_nix_profile_list_json.py
Normal file
63
tests/integration/test_nix_profile_list_json.py
Normal file
@@ -0,0 +1,63 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import unittest
|
||||
from dataclasses import dataclass
|
||||
|
||||
|
||||
@dataclass
|
||||
class FakeRunResult:
|
||||
"""
|
||||
Mimics your runner returning a structured result object.
|
||||
"""
|
||||
returncode: int
|
||||
stdout: str
|
||||
stderr: str = ""
|
||||
|
||||
|
||||
class FakeRunner:
|
||||
"""
|
||||
Minimal runner stub: returns exactly what we configure.
|
||||
"""
|
||||
def __init__(self, result):
|
||||
self._result = result
|
||||
|
||||
def run(self, ctx, cmd: str, allow_failure: bool = False):
|
||||
return self._result
|
||||
|
||||
|
||||
class TestE2ENixProfileListJsonParsing(unittest.TestCase):
|
||||
"""
|
||||
This test verifies that NixProfileInspector can parse `nix profile list --json`
|
||||
regardless of whether the CommandRunner returns:
|
||||
- raw stdout string, OR
|
||||
- a RunResult-like object with a `.stdout` attribute.
|
||||
"""
|
||||
|
||||
def test_list_json_accepts_raw_string(self) -> None:
|
||||
from pkgmgr.actions.install.installers.nix.profile import NixProfileInspector
|
||||
|
||||
payload = {"elements": {"pkgmgr-1": {"attrPath": "packages.x86_64-linux.pkgmgr"}}}
|
||||
raw = json.dumps(payload)
|
||||
|
||||
runner = FakeRunner(raw)
|
||||
inspector = NixProfileInspector()
|
||||
|
||||
data = inspector.list_json(ctx=None, runner=runner)
|
||||
self.assertEqual(data["elements"]["pkgmgr-1"]["attrPath"], "packages.x86_64-linux.pkgmgr")
|
||||
|
||||
def test_list_json_accepts_runresult_object(self) -> None:
|
||||
from pkgmgr.actions.install.installers.nix.profile import NixProfileInspector
|
||||
|
||||
payload = {"elements": {"pkgmgr-1": {"attrPath": "packages.x86_64-linux.pkgmgr"}}}
|
||||
raw = json.dumps(payload)
|
||||
|
||||
runner = FakeRunner(FakeRunResult(returncode=0, stdout=raw))
|
||||
inspector = NixProfileInspector()
|
||||
|
||||
data = inspector.list_json(ctx=None, runner=runner)
|
||||
self.assertEqual(data["elements"]["pkgmgr-1"]["attrPath"], "packages.x86_64-linux.pkgmgr")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
119
tests/integration/test_publish_integration.py
Normal file
119
tests/integration/test_publish_integration.py
Normal file
@@ -0,0 +1,119 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import io
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
import unittest
|
||||
from contextlib import redirect_stdout
|
||||
from types import SimpleNamespace
|
||||
|
||||
from pkgmgr.cli.commands.publish import handle_publish
|
||||
|
||||
|
||||
def _run(cmd: list[str], cwd: str) -> None:
|
||||
subprocess.run(
|
||||
cmd,
|
||||
cwd=cwd,
|
||||
check=True,
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL,
|
||||
)
|
||||
|
||||
|
||||
class TestIntegrationPublish(unittest.TestCase):
|
||||
def setUp(self) -> None:
|
||||
if shutil.which("git") is None:
|
||||
self.skipTest("git is required for this integration test")
|
||||
|
||||
self.tmp = tempfile.TemporaryDirectory()
|
||||
self.repo_dir = self.tmp.name
|
||||
|
||||
# Initialize git repository
|
||||
_run(["git", "init"], cwd=self.repo_dir)
|
||||
_run(["git", "config", "user.email", "ci@example.invalid"], cwd=self.repo_dir)
|
||||
_run(["git", "config", "user.name", "CI"], cwd=self.repo_dir)
|
||||
|
||||
with open(os.path.join(self.repo_dir, "README.md"), "w", encoding="utf-8") as f:
|
||||
f.write("test\n")
|
||||
|
||||
_run(["git", "add", "README.md"], cwd=self.repo_dir)
|
||||
_run(["git", "commit", "-m", "init"], cwd=self.repo_dir)
|
||||
_run(["git", "tag", "-a", "v1.2.3", "-m", "v1.2.3"], cwd=self.repo_dir)
|
||||
|
||||
# Create MIRRORS file with PyPI target
|
||||
with open(os.path.join(self.repo_dir, "MIRRORS"), "w", encoding="utf-8") as f:
|
||||
f.write("https://pypi.org/project/pkgmgr/\n")
|
||||
|
||||
def tearDown(self) -> None:
|
||||
self.tmp.cleanup()
|
||||
|
||||
def test_publish_preview_end_to_end(self) -> None:
|
||||
ctx = SimpleNamespace(
|
||||
repositories_base_dir=self.repo_dir,
|
||||
all_repositories=[
|
||||
{
|
||||
"name": "pkgmgr",
|
||||
"directory": self.repo_dir,
|
||||
}
|
||||
],
|
||||
)
|
||||
|
||||
selected = [
|
||||
{
|
||||
"name": "pkgmgr",
|
||||
"directory": self.repo_dir,
|
||||
}
|
||||
]
|
||||
|
||||
args = SimpleNamespace(
|
||||
preview=True,
|
||||
non_interactive=False,
|
||||
)
|
||||
|
||||
buf = io.StringIO()
|
||||
with redirect_stdout(buf):
|
||||
handle_publish(args=args, ctx=ctx, selected=selected)
|
||||
|
||||
out = buf.getvalue()
|
||||
|
||||
self.assertIn("[pkgmgr] Publishing repository", out)
|
||||
self.assertIn("[INFO] Publishing pkgmgr for tag v1.2.3", out)
|
||||
self.assertIn("[PREVIEW] Would build and upload to PyPI.", out)
|
||||
|
||||
# Preview must not create dist/
|
||||
self.assertFalse(os.path.isdir(os.path.join(self.repo_dir, "dist")))
|
||||
|
||||
def test_publish_skips_without_pypi_mirror(self) -> None:
|
||||
with open(os.path.join(self.repo_dir, "MIRRORS"), "w", encoding="utf-8") as f:
|
||||
f.write("git@github.com:example/example.git\n")
|
||||
|
||||
ctx = SimpleNamespace(
|
||||
repositories_base_dir=self.repo_dir,
|
||||
all_repositories=[
|
||||
{
|
||||
"name": "pkgmgr",
|
||||
"directory": self.repo_dir,
|
||||
}
|
||||
],
|
||||
)
|
||||
|
||||
selected = [
|
||||
{
|
||||
"name": "pkgmgr",
|
||||
"directory": self.repo_dir,
|
||||
}
|
||||
]
|
||||
|
||||
args = SimpleNamespace(
|
||||
preview=True,
|
||||
non_interactive=False,
|
||||
)
|
||||
|
||||
buf = io.StringIO()
|
||||
with redirect_stdout(buf):
|
||||
handle_publish(args=args, ctx=ctx, selected=selected)
|
||||
|
||||
out = buf.getvalue()
|
||||
self.assertIn("[INFO] No PyPI mirror found. Skipping publish.", out)
|
||||
66
tests/integration/test_release_publish_hook.py
Normal file
66
tests/integration/test_release_publish_hook.py
Normal file
@@ -0,0 +1,66 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import tempfile
|
||||
import unittest
|
||||
from types import SimpleNamespace
|
||||
from unittest.mock import patch
|
||||
|
||||
|
||||
class TestIntegrationReleasePublishHook(unittest.TestCase):
|
||||
def _ctx(self) -> SimpleNamespace:
|
||||
# Minimal CLIContext shape used by handle_release()
|
||||
return SimpleNamespace(
|
||||
repositories_base_dir="/tmp",
|
||||
all_repositories=[],
|
||||
)
|
||||
|
||||
def _parse(self, argv: list[str]):
|
||||
from pkgmgr.cli.parser import create_parser
|
||||
|
||||
parser = create_parser("pkgmgr test")
|
||||
return parser.parse_args(argv)
|
||||
|
||||
def test_release_runs_publish_by_default_and_respects_tty(self) -> None:
|
||||
from pkgmgr.cli.commands.release import handle_release
|
||||
|
||||
with tempfile.TemporaryDirectory() as td:
|
||||
selected = [{"directory": td}]
|
||||
|
||||
# Go through real parser to ensure CLI surface is wired correctly
|
||||
args = self._parse(["release", "patch"])
|
||||
|
||||
with patch("pkgmgr.cli.commands.release.run_release") as m_release, patch(
|
||||
"pkgmgr.cli.commands.release.run_publish"
|
||||
) as m_publish, patch(
|
||||
"pkgmgr.cli.commands.release.sys.stdin.isatty", return_value=False
|
||||
):
|
||||
handle_release(args=args, ctx=self._ctx(), selected=selected)
|
||||
|
||||
m_release.assert_called_once()
|
||||
m_publish.assert_called_once()
|
||||
|
||||
_, kwargs = m_publish.call_args
|
||||
self.assertEqual(kwargs["repo"], selected[0])
|
||||
self.assertEqual(kwargs["repo_dir"], td)
|
||||
self.assertFalse(kwargs["interactive"])
|
||||
self.assertFalse(kwargs["allow_prompt"])
|
||||
|
||||
def test_release_skips_publish_when_no_publish_flag_set(self) -> None:
|
||||
from pkgmgr.cli.commands.release import handle_release
|
||||
|
||||
with tempfile.TemporaryDirectory() as td:
|
||||
selected = [{"directory": td}]
|
||||
|
||||
args = self._parse(["release", "patch", "--no-publish"])
|
||||
|
||||
with patch("pkgmgr.cli.commands.release.run_release") as m_release, patch(
|
||||
"pkgmgr.cli.commands.release.run_publish"
|
||||
) as m_publish:
|
||||
handle_release(args=args, ctx=self._ctx(), selected=selected)
|
||||
|
||||
m_release.assert_called_once()
|
||||
m_publish.assert_not_called()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
53
tests/integration/test_repos_create_preview.py
Normal file
53
tests/integration/test_repos_create_preview.py
Normal file
@@ -0,0 +1,53 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import importlib
|
||||
import io
|
||||
import unittest
|
||||
from contextlib import redirect_stdout
|
||||
from types import SimpleNamespace
|
||||
from unittest.mock import patch
|
||||
|
||||
|
||||
class TestIntegrationReposCreatePreview(unittest.TestCase):
|
||||
def test_repos_create_preview_wires_create_repo(self) -> None:
|
||||
# Import lazily to avoid hard-failing if the CLI module/function name differs.
|
||||
try:
|
||||
repos_mod = importlib.import_module("pkgmgr.cli.commands.repos")
|
||||
except Exception as exc:
|
||||
self.skipTest(f"CLI module not available: {exc}")
|
||||
|
||||
handle = getattr(repos_mod, "handle_repos_command", None)
|
||||
if handle is None:
|
||||
self.skipTest("handle_repos_command not found in pkgmgr.cli.commands.repos")
|
||||
|
||||
ctx = SimpleNamespace(
|
||||
repositories_base_dir="/tmp/Repositories",
|
||||
binaries_dir="/tmp/bin",
|
||||
all_repositories=[],
|
||||
config_merged={"directories": {"repositories": "/tmp/Repositories"}, "repositories": []},
|
||||
user_config_path="/tmp/user.yml",
|
||||
)
|
||||
|
||||
args = SimpleNamespace(
|
||||
command="create",
|
||||
identifiers=["github.com/acme/repo"],
|
||||
remote=False,
|
||||
preview=True,
|
||||
)
|
||||
|
||||
out = io.StringIO()
|
||||
with (
|
||||
redirect_stdout(out),
|
||||
patch("pkgmgr.cli.commands.repos.create_repo") as create_repo,
|
||||
):
|
||||
handle(args, ctx, selected=[])
|
||||
|
||||
create_repo.assert_called_once()
|
||||
called = create_repo.call_args.kwargs
|
||||
self.assertEqual(called["remote"], False)
|
||||
self.assertEqual(called["preview"], True)
|
||||
self.assertEqual(create_repo.call_args.args[0], "github.com/acme/repo")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
65
tests/integration/test_repository_paths_exist.py
Normal file
65
tests/integration/test_repository_paths_exist.py
Normal file
@@ -0,0 +1,65 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
|
||||
from pkgmgr.core.repository.paths import resolve_repo_paths
|
||||
|
||||
|
||||
def _find_repo_root() -> Path:
|
||||
"""
|
||||
Locate the pkgmgr repository root from the test location.
|
||||
|
||||
Assumes:
|
||||
repo_root/
|
||||
src/pkgmgr/...
|
||||
tests/integration/...
|
||||
"""
|
||||
here = Path(__file__).resolve()
|
||||
for parent in here.parents:
|
||||
if (parent / "pyproject.toml").is_file() and (parent / "src" / "pkgmgr").is_dir():
|
||||
return parent
|
||||
raise RuntimeError("Could not determine repository root for pkgmgr integration test")
|
||||
|
||||
|
||||
class TestRepositoryPathsExist(unittest.TestCase):
|
||||
"""
|
||||
Integration test: pkgmgr is the TEMPLATE repository.
|
||||
All canonical paths resolved for pkgmgr must exist.
|
||||
"""
|
||||
|
||||
def test_pkgmgr_repository_paths_exist(self) -> None:
|
||||
repo_root = _find_repo_root()
|
||||
paths = resolve_repo_paths(str(repo_root))
|
||||
|
||||
missing: list[str] = []
|
||||
|
||||
def require(path: str | None, description: str) -> None:
|
||||
if not path:
|
||||
missing.append(f"{description}: <not resolved>")
|
||||
return
|
||||
if not os.path.isfile(path):
|
||||
missing.append(f"{description}: {path} (missing)")
|
||||
|
||||
# Core metadata
|
||||
require(paths.pyproject_toml, "pyproject.toml")
|
||||
require(paths.flake_nix, "flake.nix")
|
||||
|
||||
# Human changelog
|
||||
require(paths.changelog_md, "CHANGELOG.md")
|
||||
|
||||
# Packaging files (pkgmgr defines the template)
|
||||
require(paths.arch_pkgbuild, "Arch PKGBUILD")
|
||||
require(paths.debian_changelog, "Debian changelog")
|
||||
require(paths.rpm_spec, "RPM spec file")
|
||||
|
||||
if missing:
|
||||
self.fail(
|
||||
"pkgmgr repository does not satisfy the canonical repository layout:\n"
|
||||
+ "\n".join(f" - {item}" for item in missing)
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
110
tests/integration/test_update_silent_continues.py
Normal file
110
tests/integration/test_update_silent_continues.py
Normal file
@@ -0,0 +1,110 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import unittest
|
||||
from unittest.mock import patch
|
||||
|
||||
from pkgmgr.actions.update.manager import UpdateManager
|
||||
|
||||
|
||||
class TestUpdateSilentContinues(unittest.TestCase):
|
||||
def test_update_continues_on_failures_and_silent_controls_exit_code(self) -> None:
|
||||
"""
|
||||
Integration test for UpdateManager:
|
||||
- pull failure on repo A should not stop repo B/C
|
||||
- install failure on repo B should not stop repo C
|
||||
- without silent -> SystemExit(1) at end if any failures
|
||||
- with silent -> no SystemExit even if there are failures
|
||||
"""
|
||||
|
||||
repos = [
|
||||
{"provider": "github", "account": "example", "repository": "repo-a"},
|
||||
{"provider": "github", "account": "example", "repository": "repo-b"},
|
||||
{"provider": "github", "account": "example", "repository": "repo-c"},
|
||||
]
|
||||
|
||||
# We patch the internal calls used by UpdateManager:
|
||||
# - pull_with_verification is called once per repo
|
||||
# - install_repos is called once per repo that successfully pulled
|
||||
#
|
||||
# We simulate:
|
||||
# repo-a: pull fails
|
||||
# repo-b: pull ok, install fails
|
||||
# repo-c: pull ok, install ok
|
||||
pull_calls = []
|
||||
install_calls = []
|
||||
|
||||
def pull_side_effect(selected_repos, *_args, **_kwargs):
|
||||
# selected_repos is a list with exactly one repo in our implementation.
|
||||
repo = selected_repos[0]
|
||||
pull_calls.append(repo["repository"])
|
||||
if repo["repository"] == "repo-a":
|
||||
raise SystemExit(2)
|
||||
return None
|
||||
|
||||
def install_side_effect(selected_repos, *_args, **kwargs):
|
||||
repo = selected_repos[0]
|
||||
install_calls.append((repo["repository"], kwargs.get("silent"), kwargs.get("emit_summary")))
|
||||
if repo["repository"] == "repo-b":
|
||||
raise SystemExit(3)
|
||||
return None
|
||||
|
||||
# Patch at the exact import locations used inside UpdateManager.run()
|
||||
with patch("pkgmgr.actions.repository.pull.pull_with_verification", side_effect=pull_side_effect), patch(
|
||||
"pkgmgr.actions.install.install_repos", side_effect=install_side_effect
|
||||
):
|
||||
# 1) silent=True: should NOT raise (even though failures happened)
|
||||
UpdateManager().run(
|
||||
selected_repos=repos,
|
||||
repositories_base_dir="/tmp/repos",
|
||||
bin_dir="/tmp/bin",
|
||||
all_repos=repos,
|
||||
no_verification=True,
|
||||
system_update=False,
|
||||
preview=True,
|
||||
quiet=True,
|
||||
update_dependencies=False,
|
||||
clone_mode="shallow",
|
||||
silent=True,
|
||||
force_update=True,
|
||||
)
|
||||
|
||||
# Ensure it tried all pulls, and installs happened for B and C only.
|
||||
self.assertEqual(pull_calls, ["repo-a", "repo-b", "repo-c"])
|
||||
self.assertEqual([r for r, _silent, _emit in install_calls], ["repo-b", "repo-c"])
|
||||
|
||||
# Ensure UpdateManager suppressed install summary spam by passing emit_summary=False.
|
||||
for _repo_name, _silent, emit_summary in install_calls:
|
||||
self.assertFalse(emit_summary)
|
||||
|
||||
# Reset tracking for the non-silent run
|
||||
pull_calls.clear()
|
||||
install_calls.clear()
|
||||
|
||||
# 2) silent=False: should raise SystemExit(1) at end due to failures
|
||||
with self.assertRaises(SystemExit) as cm:
|
||||
UpdateManager().run(
|
||||
selected_repos=repos,
|
||||
repositories_base_dir="/tmp/repos",
|
||||
bin_dir="/tmp/bin",
|
||||
all_repos=repos,
|
||||
no_verification=True,
|
||||
system_update=False,
|
||||
preview=True,
|
||||
quiet=True,
|
||||
update_dependencies=False,
|
||||
clone_mode="shallow",
|
||||
silent=False,
|
||||
force_update=True,
|
||||
)
|
||||
self.assertEqual(cm.exception.code, 1)
|
||||
|
||||
# Still must have processed all repos (continue-on-failure behavior).
|
||||
self.assertEqual(pull_calls, ["repo-a", "repo-b", "repo-c"])
|
||||
self.assertEqual([r for r, _silent, _emit in install_calls], ["repo-b", "repo-c"])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
44
tests/unit/pkgmgr/actions/install/installers/nix/_fakes.py
Normal file
44
tests/unit/pkgmgr/actions/install/installers/nix/_fakes.py
Normal file
@@ -0,0 +1,44 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Optional
|
||||
|
||||
|
||||
@dataclass
|
||||
class FakeRunResult:
|
||||
returncode: int
|
||||
stdout: str = ""
|
||||
stderr: str = ""
|
||||
|
||||
|
||||
class FakeRunner:
|
||||
"""
|
||||
Minimal runner stub compatible with:
|
||||
- CommandRunner.run(ctx, cmd, allow_failure=...)
|
||||
- Generic runner.run(ctx, cmd, allow_failure=...)
|
||||
"""
|
||||
|
||||
def __init__(self, mapping: Optional[dict[str, Any]] = None, default: Any = None):
|
||||
self.mapping = mapping or {}
|
||||
self.default = default if default is not None else FakeRunResult(0, "", "")
|
||||
self.calls: list[tuple[Any, str, bool]] = []
|
||||
|
||||
def run(self, ctx, cmd: str, allow_failure: bool = False):
|
||||
self.calls.append((ctx, cmd, allow_failure))
|
||||
return self.mapping.get(cmd, self.default)
|
||||
|
||||
|
||||
class FakeRetry:
|
||||
"""
|
||||
Mimics GitHubRateLimitRetry.run_with_retry(ctx, runner, cmd)
|
||||
"""
|
||||
|
||||
def __init__(self, results: list[FakeRunResult]):
|
||||
self._results = list(results)
|
||||
self.calls: list[str] = []
|
||||
|
||||
def run_with_retry(self, ctx, runner, cmd: str):
|
||||
self.calls.append(cmd)
|
||||
if self._results:
|
||||
return self._results.pop(0)
|
||||
return FakeRunResult(0, "", "")
|
||||
@@ -0,0 +1,58 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import unittest
|
||||
|
||||
from pkgmgr.actions.install.installers.nix.conflicts import NixConflictResolver
|
||||
from ._fakes import FakeRunResult, FakeRunner, FakeRetry
|
||||
|
||||
|
||||
class DummyCtx:
|
||||
quiet = True
|
||||
|
||||
|
||||
class TestNixConflictResolver(unittest.TestCase):
|
||||
def test_resolve_removes_tokens_and_retries_success(self) -> None:
|
||||
ctx = DummyCtx()
|
||||
install_cmd = "nix profile install /repo#default"
|
||||
|
||||
stderr = '''
|
||||
error: An existing package already provides the following file:
|
||||
/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pkgmgr/bin/pkgmgr
|
||||
'''
|
||||
|
||||
runner = FakeRunner(mapping={
|
||||
"nix profile remove pkgmgr": FakeRunResult(0, "", ""),
|
||||
})
|
||||
retry = FakeRetry(results=[FakeRunResult(0, "", "")])
|
||||
|
||||
class FakeProfile:
|
||||
def find_remove_tokens_for_store_prefixes(self, ctx, runner, prefixes):
|
||||
return []
|
||||
def find_remove_tokens_for_output(self, ctx, runner, output):
|
||||
return ["pkgmgr"]
|
||||
|
||||
resolver = NixConflictResolver(runner=runner, retry=retry, profile=FakeProfile())
|
||||
ok = resolver.resolve(ctx, install_cmd, stdout="", stderr=stderr, output="pkgmgr", max_rounds=2)
|
||||
self.assertTrue(ok)
|
||||
self.assertIn("nix profile remove pkgmgr", [c[1] for c in runner.calls])
|
||||
|
||||
def test_resolve_uses_textual_remove_tokens_last_resort(self) -> None:
|
||||
ctx = DummyCtx()
|
||||
install_cmd = "nix profile install /repo#default"
|
||||
|
||||
stderr = "hint: try:\n nix profile remove 'pkgmgr-1'\n"
|
||||
runner = FakeRunner(mapping={
|
||||
"nix profile remove pkgmgr-1": FakeRunResult(0, "", ""),
|
||||
})
|
||||
retry = FakeRetry(results=[FakeRunResult(0, "", "")])
|
||||
|
||||
class FakeProfile:
|
||||
def find_remove_tokens_for_store_prefixes(self, ctx, runner, prefixes):
|
||||
return []
|
||||
def find_remove_tokens_for_output(self, ctx, runner, output):
|
||||
return []
|
||||
|
||||
resolver = NixConflictResolver(runner=runner, retry=retry, profile=FakeProfile())
|
||||
ok = resolver.resolve(ctx, install_cmd, stdout="", stderr=stderr, output="pkgmgr", max_rounds=2)
|
||||
self.assertTrue(ok)
|
||||
self.assertIn("nix profile remove pkgmgr-1", [c[1] for c in runner.calls])
|
||||
@@ -0,0 +1,62 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import unittest
|
||||
|
||||
from pkgmgr.actions.install.installers.nix.profile import NixProfileInspector
|
||||
from ._fakes import FakeRunResult, FakeRunner
|
||||
|
||||
|
||||
class TestNixProfileInspector(unittest.TestCase):
|
||||
def test_list_json_accepts_raw_string(self) -> None:
|
||||
payload = {"elements": {"pkgmgr-1": {"attrPath": "packages.x86_64-linux.pkgmgr"}}}
|
||||
raw = json.dumps(payload)
|
||||
runner = FakeRunner(default=raw)
|
||||
insp = NixProfileInspector()
|
||||
data = insp.list_json(ctx=None, runner=runner)
|
||||
self.assertEqual(data["elements"]["pkgmgr-1"]["attrPath"], "packages.x86_64-linux.pkgmgr")
|
||||
|
||||
def test_list_json_accepts_result_object(self) -> None:
|
||||
payload = {"elements": {"pkgmgr-1": {"attrPath": "packages.x86_64-linux.pkgmgr"}}}
|
||||
raw = json.dumps(payload)
|
||||
runner = FakeRunner(default=FakeRunResult(0, stdout=raw))
|
||||
insp = NixProfileInspector()
|
||||
data = insp.list_json(ctx=None, runner=runner)
|
||||
self.assertEqual(data["elements"]["pkgmgr-1"]["attrPath"], "packages.x86_64-linux.pkgmgr")
|
||||
|
||||
def test_find_remove_tokens_for_output_includes_output_first(self) -> None:
|
||||
payload = {
|
||||
"elements": {
|
||||
"pkgmgr-1": {"name": "pkgmgr-1", "attrPath": "packages.x86_64-linux.pkgmgr"},
|
||||
"default-1": {"name": "default-1", "attrPath": "packages.x86_64-linux.default"},
|
||||
}
|
||||
}
|
||||
raw = json.dumps(payload)
|
||||
runner = FakeRunner(default=FakeRunResult(0, stdout=raw))
|
||||
insp = NixProfileInspector()
|
||||
tokens = insp.find_remove_tokens_for_output(ctx=None, runner=runner, output="pkgmgr")
|
||||
self.assertEqual(tokens[0], "pkgmgr")
|
||||
self.assertIn("pkgmgr-1", tokens)
|
||||
|
||||
def test_find_remove_tokens_for_store_prefixes(self) -> None:
|
||||
payload = {
|
||||
"elements": {
|
||||
"pkgmgr-1": {
|
||||
"name": "pkgmgr-1",
|
||||
"attrPath": "packages.x86_64-linux.pkgmgr",
|
||||
"storePaths": ["/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pkgmgr"],
|
||||
},
|
||||
"something": {
|
||||
"name": "other",
|
||||
"attrPath": "packages.x86_64-linux.other",
|
||||
"storePaths": ["/nix/store/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb-other"],
|
||||
},
|
||||
}
|
||||
}
|
||||
raw = json.dumps(payload)
|
||||
runner = FakeRunner(default=FakeRunResult(0, stdout=raw))
|
||||
insp = NixProfileInspector()
|
||||
tokens = insp.find_remove_tokens_for_store_prefixes(
|
||||
ctx=None, runner=runner, prefixes=["/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pkgmgr"]
|
||||
)
|
||||
self.assertIn("pkgmgr-1", tokens)
|
||||
@@ -0,0 +1,88 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import unittest
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
from pkgmgr.actions.install.installers.nix.installer import NixFlakeInstaller
|
||||
from ._fakes import FakeRunResult
|
||||
|
||||
|
||||
class DummyCtx:
|
||||
def __init__(self, identifier: str = "x", repo_dir: str = "/repo", quiet: bool = True, force_update: bool = False):
|
||||
self.identifier = identifier
|
||||
self.repo_dir = repo_dir
|
||||
self.quiet = quiet
|
||||
self.force_update = force_update
|
||||
|
||||
|
||||
class TestNixFlakeInstallerCore(unittest.TestCase):
|
||||
def test_install_only_success_returns(self) -> None:
|
||||
ins = NixFlakeInstaller()
|
||||
ins.supports = MagicMock(return_value=True)
|
||||
|
||||
ins._retry = MagicMock()
|
||||
ins._retry.run_with_retry.return_value = FakeRunResult(0, "", "")
|
||||
ins._conflicts = MagicMock()
|
||||
ins._profile = MagicMock()
|
||||
ins._runner = MagicMock()
|
||||
|
||||
ctx = DummyCtx(identifier="lib", repo_dir="/repo", quiet=True)
|
||||
ins.run(ctx)
|
||||
ins._retry.run_with_retry.assert_called()
|
||||
|
||||
def test_conflict_resolver_success_short_circuits(self) -> None:
|
||||
ins = NixFlakeInstaller()
|
||||
ins.supports = MagicMock(return_value=True)
|
||||
|
||||
ins._retry = MagicMock()
|
||||
ins._retry.run_with_retry.return_value = FakeRunResult(1, "out", "err")
|
||||
ins._conflicts = MagicMock()
|
||||
ins._conflicts.resolve.return_value = True
|
||||
ins._profile = MagicMock()
|
||||
ins._runner = MagicMock()
|
||||
|
||||
ctx = DummyCtx(identifier="lib", repo_dir="/repo", quiet=True)
|
||||
ins.run(ctx)
|
||||
ins._conflicts.resolve.assert_called()
|
||||
|
||||
def test_mandatory_failure_raises_systemexit(self) -> None:
|
||||
ins = NixFlakeInstaller()
|
||||
ins.supports = MagicMock(return_value=True)
|
||||
|
||||
ins._retry = MagicMock()
|
||||
ins._retry.run_with_retry.return_value = FakeRunResult(2, "", "no")
|
||||
ins._conflicts = MagicMock()
|
||||
ins._conflicts.resolve.return_value = False
|
||||
ins._profile = MagicMock()
|
||||
ins._profile.find_installed_indices_for_output.return_value = []
|
||||
ins._runner = MagicMock()
|
||||
ins._runner.run.return_value = FakeRunResult(2, "", "")
|
||||
|
||||
ctx = DummyCtx(identifier="lib", repo_dir="/repo", quiet=True)
|
||||
with self.assertRaises(SystemExit) as cm:
|
||||
ins.run(ctx)
|
||||
self.assertEqual(cm.exception.code, 2)
|
||||
|
||||
def test_optional_failure_does_not_raise(self) -> None:
|
||||
ins = NixFlakeInstaller()
|
||||
ins.supports = MagicMock(return_value=True)
|
||||
|
||||
results = [
|
||||
FakeRunResult(0, "", ""),
|
||||
FakeRunResult(2, "", ""),
|
||||
]
|
||||
|
||||
def run_with_retry(ctx, runner, cmd):
|
||||
return results.pop(0)
|
||||
|
||||
ins._retry = MagicMock()
|
||||
ins._retry.run_with_retry.side_effect = run_with_retry
|
||||
ins._conflicts = MagicMock()
|
||||
ins._conflicts.resolve.return_value = False
|
||||
ins._profile = MagicMock()
|
||||
ins._profile.find_installed_indices_for_output.return_value = []
|
||||
ins._runner = MagicMock()
|
||||
ins._runner.run.return_value = FakeRunResult(2, "", "")
|
||||
|
||||
ctx = DummyCtx(identifier="pkgmgr", repo_dir="/repo", quiet=True)
|
||||
ins.run(ctx) # must not raise
|
||||
@@ -115,105 +115,7 @@ class TestNixFlakeInstaller(unittest.TestCase):
|
||||
|
||||
install_cmds = self._install_cmds_from_calls(subproc_mock.call_args_list)
|
||||
self.assertEqual(install_cmds, [f"nix profile install {self.repo_dir}#default"])
|
||||
|
||||
def test_nix_flake_run_mandatory_failure_raises(self) -> None:
|
||||
"""
|
||||
For a generic repository, 'default' is mandatory.
|
||||
A non-zero return code must raise SystemExit with that code.
|
||||
"""
|
||||
ctx = DummyCtx(identifier="some-lib", repo_dir=self.repo_dir)
|
||||
installer = NixFlakeInstaller()
|
||||
|
||||
# retry layer does one attempt (non-403), then fallback does final attempt => 2 installs
|
||||
install_results = [self._cp(1), self._cp(1)]
|
||||
|
||||
def fake_subprocess_run(cmd, *args, **kwargs):
|
||||
if isinstance(cmd, str) and cmd.startswith("nix profile list --json"):
|
||||
return self._cp(0, stdout='{"elements": []}', stderr="")
|
||||
if isinstance(cmd, str) and cmd.startswith("nix profile install "):
|
||||
return install_results.pop(0)
|
||||
return self._cp(0)
|
||||
|
||||
buf = io.StringIO()
|
||||
with patch("pkgmgr.actions.install.installers.nix.installer.shutil.which") as which_mock, patch(
|
||||
"pkgmgr.actions.install.installers.nix.installer.os.path.exists", return_value=True
|
||||
), patch(
|
||||
"pkgmgr.actions.install.installers.nix.runner.subprocess.run", side_effect=fake_subprocess_run
|
||||
) as subproc_mock, redirect_stdout(buf):
|
||||
self._enable_nix_in_module(which_mock)
|
||||
|
||||
self.assertTrue(installer.supports(ctx))
|
||||
with self.assertRaises(SystemExit) as cm:
|
||||
installer.run(ctx)
|
||||
|
||||
self.assertEqual(cm.exception.code, 1)
|
||||
|
||||
out = buf.getvalue()
|
||||
self.assertIn("[nix] install: nix profile install", out)
|
||||
self.assertIn("[ERROR] Failed to install Nix flake output 'default' (exit 1)", out)
|
||||
|
||||
install_cmds = self._install_cmds_from_calls(subproc_mock.call_args_list)
|
||||
self.assertEqual(
|
||||
install_cmds,
|
||||
[
|
||||
f"nix profile install {self.repo_dir}#default",
|
||||
f"nix profile install {self.repo_dir}#default",
|
||||
],
|
||||
)
|
||||
|
||||
def test_nix_flake_run_optional_failure_does_not_raise(self) -> None:
|
||||
"""
|
||||
For pkgmgr/package-manager repositories:
|
||||
- 'pkgmgr' output is mandatory
|
||||
- 'default' output is optional
|
||||
Failure of optional output must not raise.
|
||||
"""
|
||||
ctx = DummyCtx(identifier="pkgmgr", repo_dir=self.repo_dir)
|
||||
installer = NixFlakeInstaller()
|
||||
|
||||
# pkgmgr success (1 call), default fails (2 calls: attempt + final)
|
||||
install_results = [self._cp(0), self._cp(1), self._cp(1)]
|
||||
|
||||
def fake_subprocess_run(cmd, *args, **kwargs):
|
||||
if isinstance(cmd, str) and cmd.startswith("nix profile list --json"):
|
||||
return self._cp(0, stdout='{"elements": []}', stderr="")
|
||||
if isinstance(cmd, str) and cmd.startswith("nix profile install "):
|
||||
return install_results.pop(0)
|
||||
return self._cp(0)
|
||||
|
||||
buf = io.StringIO()
|
||||
with patch("pkgmgr.actions.install.installers.nix.installer.shutil.which") as which_mock, patch(
|
||||
"pkgmgr.actions.install.installers.nix.installer.os.path.exists", return_value=True
|
||||
), patch(
|
||||
"pkgmgr.actions.install.installers.nix.runner.subprocess.run", side_effect=fake_subprocess_run
|
||||
) as subproc_mock, redirect_stdout(buf):
|
||||
self._enable_nix_in_module(which_mock)
|
||||
|
||||
self.assertTrue(installer.supports(ctx))
|
||||
installer.run(ctx) # must NOT raise
|
||||
|
||||
out = buf.getvalue()
|
||||
|
||||
# Should announce both outputs
|
||||
self.assertIn("ensuring outputs: pkgmgr, default", out)
|
||||
|
||||
# First output ok
|
||||
self.assertIn("[nix] output 'pkgmgr' successfully installed.", out)
|
||||
|
||||
# Second output failed but no raise
|
||||
self.assertIn("[ERROR] Failed to install Nix flake output 'default' (exit 1)", out)
|
||||
self.assertIn("[WARNING] Continuing despite failure of optional output 'default'.", out)
|
||||
|
||||
install_cmds = self._install_cmds_from_calls(subproc_mock.call_args_list)
|
||||
self.assertEqual(
|
||||
install_cmds,
|
||||
[
|
||||
f"nix profile install {self.repo_dir}#pkgmgr",
|
||||
f"nix profile install {self.repo_dir}#default",
|
||||
f"nix profile install {self.repo_dir}#default",
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def test_nix_flake_supports_respects_disable_env(self) -> None:
|
||||
"""
|
||||
PKGMGR_DISABLE_NIX_FLAKE_INSTALLER=1 must disable the installer,
|
||||
|
||||
@@ -0,0 +1,37 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import unittest
|
||||
|
||||
from pkgmgr.actions.install.installers.nix.profile.models import NixProfileEntry
|
||||
from pkgmgr.actions.install.installers.nix.profile.matcher import entry_matches_output, entry_matches_store_path
|
||||
|
||||
|
||||
class TestMatcher(unittest.TestCase):
|
||||
def _e(self, name: str, attr: str) -> NixProfileEntry:
|
||||
return NixProfileEntry(
|
||||
key="pkgmgr-1",
|
||||
index=None,
|
||||
name=name,
|
||||
attr_path=attr,
|
||||
store_paths=["/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pkgmgr"],
|
||||
)
|
||||
|
||||
def test_matches_direct_name(self) -> None:
|
||||
self.assertTrue(entry_matches_output(self._e("pkgmgr", ""), "pkgmgr"))
|
||||
|
||||
def test_matches_attrpath_hash(self) -> None:
|
||||
self.assertTrue(entry_matches_output(self._e("", "github:me/repo#pkgmgr"), "pkgmgr"))
|
||||
|
||||
def test_matches_attrpath_dot_suffix(self) -> None:
|
||||
self.assertTrue(entry_matches_output(self._e("", "packages.x86_64-linux.pkgmgr"), "pkgmgr"))
|
||||
|
||||
def test_matches_name_with_suffix_number(self) -> None:
|
||||
self.assertTrue(entry_matches_output(self._e("pkgmgr-1", ""), "pkgmgr"))
|
||||
|
||||
def test_package_manager_special_case(self) -> None:
|
||||
self.assertTrue(entry_matches_output(self._e("package-manager-2", ""), "pkgmgr"))
|
||||
|
||||
def test_store_path_match(self) -> None:
|
||||
entry = self._e("pkgmgr-1", "")
|
||||
self.assertTrue(entry_matches_store_path(entry, "/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pkgmgr"))
|
||||
self.assertFalse(entry_matches_store_path(entry, "/nix/store/cccccccccccccccccccccccccccccccc-zzz"))
|
||||
@@ -0,0 +1,39 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import unittest
|
||||
|
||||
from pkgmgr.actions.install.installers.nix.profile.normalizer import coerce_index, normalize_elements
|
||||
|
||||
|
||||
class TestNormalizer(unittest.TestCase):
|
||||
def test_coerce_index_numeric_key(self) -> None:
|
||||
self.assertEqual(coerce_index("3", {"name": "x"}), 3)
|
||||
|
||||
def test_coerce_index_explicit_field(self) -> None:
|
||||
self.assertEqual(coerce_index("pkgmgr-1", {"index": 7}), 7)
|
||||
self.assertEqual(coerce_index("pkgmgr-1", {"id": "8"}), 8)
|
||||
|
||||
def test_coerce_index_trailing_number(self) -> None:
|
||||
self.assertEqual(coerce_index("pkgmgr-42", {"name": "x"}), 42)
|
||||
|
||||
def test_normalize_elements_handles_missing_elements(self) -> None:
|
||||
self.assertEqual(normalize_elements({}), [])
|
||||
|
||||
def test_normalize_elements_collects_store_paths(self) -> None:
|
||||
data = {
|
||||
"elements": {
|
||||
"pkgmgr-1": {
|
||||
"name": "pkgmgr-1",
|
||||
"attrPath": "packages.x86_64-linux.pkgmgr",
|
||||
"storePaths": ["/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pkgmgr"],
|
||||
},
|
||||
"2": {
|
||||
"name": "foo",
|
||||
"attrPath": "packages.x86_64-linux.default",
|
||||
"storePath": "/nix/store/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb-foo",
|
||||
},
|
||||
}
|
||||
}
|
||||
entries = normalize_elements(data)
|
||||
self.assertEqual(len(entries), 2)
|
||||
self.assertTrue(entries[0].store_paths)
|
||||
@@ -0,0 +1,18 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import unittest
|
||||
|
||||
from pkgmgr.actions.install.installers.nix.profile.parser import parse_profile_list_json
|
||||
|
||||
|
||||
class TestParseProfileListJson(unittest.TestCase):
|
||||
def test_parses_valid_json(self) -> None:
|
||||
payload = {"elements": {"0": {"name": "pkgmgr"}}}
|
||||
raw = json.dumps(payload)
|
||||
self.assertEqual(parse_profile_list_json(raw)["elements"]["0"]["name"], "pkgmgr")
|
||||
|
||||
def test_raises_systemexit_on_invalid_json(self) -> None:
|
||||
with self.assertRaises(SystemExit) as cm:
|
||||
parse_profile_list_json("{not json")
|
||||
self.assertIn("Failed to parse", str(cm.exception))
|
||||
@@ -0,0 +1,29 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import unittest
|
||||
|
||||
from pkgmgr.actions.install.installers.nix.profile_list import NixProfileListReader
|
||||
from ._fakes import FakeRunResult, FakeRunner
|
||||
|
||||
|
||||
class TestNixProfileListReader(unittest.TestCase):
|
||||
def test_entries_parses_indices_and_store_prefixes(self) -> None:
|
||||
out = '''
|
||||
0 something /nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pkgmgr
|
||||
1 something /nix/store/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb-foo
|
||||
'''
|
||||
runner = FakeRunner(default=FakeRunResult(0, stdout=out))
|
||||
reader = NixProfileListReader(runner=runner)
|
||||
entries = reader.entries(ctx=None)
|
||||
self.assertEqual(entries[0][0], 0)
|
||||
self.assertTrue(entries[0][1].startswith("/nix/store/"))
|
||||
|
||||
def test_indices_matching_store_prefixes(self) -> None:
|
||||
out = " 7 x /nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pkgmgr\n"
|
||||
runner = FakeRunner(default=FakeRunResult(0, stdout=out))
|
||||
reader = NixProfileListReader(runner=runner)
|
||||
hits = reader.indices_matching_store_prefixes(
|
||||
ctx=None,
|
||||
prefixes=["/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pkgmgr"],
|
||||
)
|
||||
self.assertEqual(hits, [7])
|
||||
@@ -0,0 +1,29 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import unittest
|
||||
|
||||
from pkgmgr.actions.install.installers.nix.profile.result import extract_stdout_text
|
||||
|
||||
|
||||
class TestExtractStdoutText(unittest.TestCase):
|
||||
def test_accepts_string(self) -> None:
|
||||
self.assertEqual(extract_stdout_text("hello"), "hello")
|
||||
|
||||
def test_accepts_bytes(self) -> None:
|
||||
self.assertEqual(extract_stdout_text(b"hi"), "hi")
|
||||
|
||||
def test_accepts_object_with_stdout_str(self) -> None:
|
||||
class R:
|
||||
stdout = "ok"
|
||||
self.assertEqual(extract_stdout_text(R()), "ok")
|
||||
|
||||
def test_accepts_object_with_stdout_bytes(self) -> None:
|
||||
class R:
|
||||
stdout = b"ok"
|
||||
self.assertEqual(extract_stdout_text(R()), "ok")
|
||||
|
||||
def test_fallback_str(self) -> None:
|
||||
class R:
|
||||
def __str__(self) -> str:
|
||||
return "repr"
|
||||
self.assertEqual(extract_stdout_text(R()), "repr")
|
||||
@@ -0,0 +1,30 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import unittest
|
||||
|
||||
from pkgmgr.actions.install.installers.nix.textparse import NixConflictTextParser
|
||||
|
||||
|
||||
class TestNixConflictTextParser(unittest.TestCase):
|
||||
def test_remove_tokens_parses_unquoted_and_quoted(self) -> None:
|
||||
t = NixConflictTextParser()
|
||||
text = '''
|
||||
nix profile remove pkgmgr
|
||||
nix profile remove 'pkgmgr-1'
|
||||
nix profile remove "default-2"
|
||||
'''
|
||||
tokens = t.remove_tokens(text)
|
||||
self.assertEqual(tokens, ["pkgmgr", "pkgmgr-1", "default-2"])
|
||||
|
||||
def test_existing_store_prefixes_extracts_existing_section_only(self) -> None:
|
||||
t = NixConflictTextParser()
|
||||
text = '''
|
||||
error: An existing package already provides the following file:
|
||||
/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pkgmgr/bin/pkgmgr
|
||||
/nix/store/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb-pkgmgr/share/doc
|
||||
This is the conflicting file from the new package:
|
||||
/nix/store/cccccccccccccccccccccccccccccccc-pkgmgr/bin/pkgmgr
|
||||
'''
|
||||
prefixes = t.existing_store_prefixes(text)
|
||||
self.assertEqual(len(prefixes), 2)
|
||||
self.assertTrue(prefixes[0].startswith("/nix/store/"))
|
||||
@@ -0,0 +1 @@
|
||||
# Unit test package for pkgmgr.actions.mirror
|
||||
|
||||
51
tests/unit/pkgmgr/actions/mirror/test_context.py
Normal file
51
tests/unit/pkgmgr/actions/mirror/test_context.py
Normal file
@@ -0,0 +1,51 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import unittest
|
||||
from unittest.mock import patch
|
||||
|
||||
from pkgmgr.actions.mirror.context import build_context
|
||||
|
||||
|
||||
class TestMirrorContext(unittest.TestCase):
|
||||
"""
|
||||
Unit tests for building RepoMirrorContext from repo + filesystem.
|
||||
"""
|
||||
|
||||
@patch("pkgmgr.actions.mirror.context.read_mirrors_file")
|
||||
@patch("pkgmgr.actions.mirror.context.load_config_mirrors")
|
||||
@patch("pkgmgr.actions.mirror.context.get_repo_dir")
|
||||
@patch("pkgmgr.actions.mirror.context.get_repo_identifier")
|
||||
def test_build_context_bundles_config_and_file_mirrors(
|
||||
self,
|
||||
mock_identifier,
|
||||
mock_repo_dir,
|
||||
mock_load_config,
|
||||
mock_read_file,
|
||||
) -> None:
|
||||
mock_identifier.return_value = "id"
|
||||
mock_repo_dir.return_value = "/tmp/repo"
|
||||
mock_load_config.return_value = {"origin": "git@github.com:alice/repo.git"}
|
||||
mock_read_file.return_value = {"backup": "ssh://git@backup/alice/repo.git"}
|
||||
|
||||
repo = {"provider": "github.com", "account": "alice", "repository": "repo"}
|
||||
|
||||
ctx = build_context(repo, repositories_base_dir="/base", all_repos=[repo])
|
||||
|
||||
self.assertEqual(ctx.identifier, "id")
|
||||
self.assertEqual(ctx.repo_dir, "/tmp/repo")
|
||||
self.assertEqual(ctx.config_mirrors, {"origin": "git@github.com:alice/repo.git"})
|
||||
self.assertEqual(ctx.file_mirrors, {"backup": "ssh://git@backup/alice/repo.git"})
|
||||
self.assertEqual(
|
||||
ctx.resolved_mirrors,
|
||||
{
|
||||
"origin": "git@github.com:alice/repo.git",
|
||||
"backup": "ssh://git@backup/alice/repo.git",
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
77
tests/unit/pkgmgr/actions/mirror/test_diff_cmd.py
Normal file
77
tests/unit/pkgmgr/actions/mirror/test_diff_cmd.py
Normal file
@@ -0,0 +1,77 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import io
|
||||
import unittest
|
||||
from contextlib import redirect_stdout
|
||||
from unittest.mock import MagicMock, PropertyMock, patch
|
||||
|
||||
from pkgmgr.actions.mirror.diff_cmd import diff_mirrors
|
||||
|
||||
|
||||
class TestDiffCmd(unittest.TestCase):
|
||||
"""
|
||||
Unit tests for mirror diff output.
|
||||
"""
|
||||
|
||||
@patch("pkgmgr.actions.mirror.diff_cmd.build_context")
|
||||
def test_diff_mirrors_reports_only_in_config_and_only_in_file(self, mock_build_context) -> None:
|
||||
ctx = MagicMock()
|
||||
ctx.identifier = "id"
|
||||
ctx.repo_dir = "/tmp/repo"
|
||||
ctx.config_mirrors = {"origin": "a", "cfgonly": "b"}
|
||||
ctx.file_mirrors = {"origin": "a", "fileonly": "c"}
|
||||
type(ctx).resolved_mirrors = PropertyMock(
|
||||
return_value={"origin": "a", "cfgonly": "b", "fileonly": "c"}
|
||||
)
|
||||
mock_build_context.return_value = ctx
|
||||
|
||||
buf = io.StringIO()
|
||||
with redirect_stdout(buf):
|
||||
diff_mirrors(selected_repos=[{}], repositories_base_dir="/base", all_repos=[])
|
||||
|
||||
out = buf.getvalue()
|
||||
self.assertIn("[ONLY IN CONFIG] cfgonly: b", out)
|
||||
self.assertIn("[ONLY IN FILE] fileonly: c", out)
|
||||
|
||||
@patch("pkgmgr.actions.mirror.diff_cmd.build_context")
|
||||
def test_diff_mirrors_reports_url_mismatch(self, mock_build_context) -> None:
|
||||
ctx = MagicMock()
|
||||
ctx.identifier = "id"
|
||||
ctx.repo_dir = "/tmp/repo"
|
||||
ctx.config_mirrors = {"origin": "a"}
|
||||
ctx.file_mirrors = {"origin": "different"}
|
||||
type(ctx).resolved_mirrors = PropertyMock(return_value={"origin": "different"})
|
||||
mock_build_context.return_value = ctx
|
||||
|
||||
buf = io.StringIO()
|
||||
with redirect_stdout(buf):
|
||||
diff_mirrors(selected_repos=[{}], repositories_base_dir="/base", all_repos=[])
|
||||
|
||||
out = buf.getvalue()
|
||||
self.assertIn("[URL MISMATCH]", out)
|
||||
self.assertIn("config: a", out)
|
||||
self.assertIn("file: different", out)
|
||||
|
||||
@patch("pkgmgr.actions.mirror.diff_cmd.build_context")
|
||||
def test_diff_mirrors_reports_in_sync(self, mock_build_context) -> None:
|
||||
ctx = MagicMock()
|
||||
ctx.identifier = "id"
|
||||
ctx.repo_dir = "/tmp/repo"
|
||||
ctx.config_mirrors = {"origin": "a"}
|
||||
ctx.file_mirrors = {"origin": "a"}
|
||||
type(ctx).resolved_mirrors = PropertyMock(return_value={"origin": "a"})
|
||||
mock_build_context.return_value = ctx
|
||||
|
||||
buf = io.StringIO()
|
||||
with redirect_stdout(buf):
|
||||
diff_mirrors(selected_repos=[{}], repositories_base_dir="/base", all_repos=[])
|
||||
|
||||
out = buf.getvalue()
|
||||
self.assertIn("[OK] Mirrors in config and MIRRORS file are in sync.", out)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
@@ -1,110 +1,66 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import unittest
|
||||
from unittest.mock import patch
|
||||
|
||||
from pkgmgr.actions.mirror.git_remote import (
|
||||
build_default_ssh_url,
|
||||
determine_primary_remote_url,
|
||||
has_origin_remote,
|
||||
)
|
||||
from pkgmgr.actions.mirror.types import MirrorMap, Repository
|
||||
from pkgmgr.actions.mirror.types import RepoMirrorContext
|
||||
|
||||
|
||||
class TestMirrorGitRemote(unittest.TestCase):
|
||||
"""
|
||||
Unit tests for SSH URL and primary remote selection logic.
|
||||
"""
|
||||
|
||||
def test_build_default_ssh_url_without_port(self) -> None:
|
||||
repo: Repository = {
|
||||
"provider": "github.com",
|
||||
"account": "kevinveenbirkenbach",
|
||||
"repository": "package-manager",
|
||||
}
|
||||
|
||||
url = build_default_ssh_url(repo)
|
||||
self.assertEqual(
|
||||
url,
|
||||
"git@github.com:kevinveenbirkenbach/package-manager.git",
|
||||
def _ctx(self, *, file=None, config=None) -> RepoMirrorContext:
|
||||
return RepoMirrorContext(
|
||||
identifier="repo",
|
||||
repo_dir="/tmp/repo",
|
||||
config_mirrors=config or {},
|
||||
file_mirrors=file or {},
|
||||
)
|
||||
|
||||
def test_build_default_ssh_url_with_port(self) -> None:
|
||||
repo: Repository = {
|
||||
"provider": "code.cymais.cloud",
|
||||
"account": "kevinveenbirkenbach",
|
||||
"repository": "pkgmgr",
|
||||
"port": 2201,
|
||||
def test_build_default_ssh_url(self) -> None:
|
||||
repo = {
|
||||
"provider": "github.com",
|
||||
"account": "alice",
|
||||
"repository": "repo",
|
||||
}
|
||||
|
||||
url = build_default_ssh_url(repo)
|
||||
self.assertEqual(
|
||||
url,
|
||||
"ssh://git@code.cymais.cloud:2201/kevinveenbirkenbach/pkgmgr.git",
|
||||
build_default_ssh_url(repo),
|
||||
"git@github.com:alice/repo.git",
|
||||
)
|
||||
|
||||
def test_build_default_ssh_url_missing_fields_returns_none(self) -> None:
|
||||
repo: Repository = {
|
||||
"provider": "github.com",
|
||||
"account": "kevinveenbirkenbach",
|
||||
# "repository" fehlt absichtlich
|
||||
}
|
||||
|
||||
url = build_default_ssh_url(repo)
|
||||
self.assertIsNone(url)
|
||||
|
||||
def test_determine_primary_remote_url_prefers_origin_in_resolved_mirrors(
|
||||
self,
|
||||
) -> None:
|
||||
repo: Repository = {
|
||||
"provider": "github.com",
|
||||
"account": "kevinveenbirkenbach",
|
||||
"repository": "package-manager",
|
||||
}
|
||||
mirrors: MirrorMap = {
|
||||
"origin": "git@github.com:kevinveenbirkenbach/package-manager.git",
|
||||
"backup": "ssh://git@git.veen.world:2201/kevinveenbirkenbach/pkgmgr.git",
|
||||
}
|
||||
|
||||
url = determine_primary_remote_url(repo, mirrors)
|
||||
def test_determine_primary_prefers_origin(self) -> None:
|
||||
repo = {"provider": "github.com", "account": "alice", "repository": "repo"}
|
||||
ctx = self._ctx(config={"origin": "git@github.com:alice/repo.git"})
|
||||
self.assertEqual(
|
||||
url,
|
||||
"git@github.com:kevinveenbirkenbach/package-manager.git",
|
||||
determine_primary_remote_url(repo, ctx),
|
||||
"git@github.com:alice/repo.git",
|
||||
)
|
||||
|
||||
def test_determine_primary_remote_url_uses_any_mirror_if_no_origin(self) -> None:
|
||||
repo: Repository = {
|
||||
"provider": "github.com",
|
||||
"account": "kevinveenbirkenbach",
|
||||
"repository": "package-manager",
|
||||
}
|
||||
mirrors: MirrorMap = {
|
||||
"backup": "ssh://git@git.veen.world:2201/kevinveenbirkenbach/pkgmgr.git",
|
||||
"mirror2": "ssh://git@code.cymais.cloud:2201/kevinveenbirkenbach/pkgmgr.git",
|
||||
}
|
||||
|
||||
url = determine_primary_remote_url(repo, mirrors)
|
||||
# Alphabetisch sortiert: backup, mirror2 → backup gewinnt
|
||||
def test_determine_primary_uses_file_order(self) -> None:
|
||||
repo = {"provider": "github.com", "account": "alice", "repository": "repo"}
|
||||
ctx = self._ctx(
|
||||
file={
|
||||
"first": "git@a/first.git",
|
||||
"second": "git@a/second.git",
|
||||
}
|
||||
)
|
||||
self.assertEqual(
|
||||
url,
|
||||
"ssh://git@git.veen.world:2201/kevinveenbirkenbach/pkgmgr.git",
|
||||
determine_primary_remote_url(repo, ctx),
|
||||
"git@a/first.git",
|
||||
)
|
||||
|
||||
def test_determine_primary_remote_url_falls_back_to_default_ssh(self) -> None:
|
||||
repo: Repository = {
|
||||
"provider": "github.com",
|
||||
"account": "kevinveenbirkenbach",
|
||||
"repository": "package-manager",
|
||||
}
|
||||
mirrors: MirrorMap = {}
|
||||
|
||||
url = determine_primary_remote_url(repo, mirrors)
|
||||
def test_determine_primary_fallback_default(self) -> None:
|
||||
repo = {"provider": "github.com", "account": "alice", "repository": "repo"}
|
||||
ctx = self._ctx()
|
||||
self.assertEqual(
|
||||
url,
|
||||
"git@github.com:kevinveenbirkenbach/package-manager.git",
|
||||
determine_primary_remote_url(repo, ctx),
|
||||
"git@github.com:alice/repo.git",
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
@patch("pkgmgr.actions.mirror.git_remote._safe_git_output")
|
||||
def test_has_origin_remote(self, m_out) -> None:
|
||||
m_out.return_value = "origin\nupstream\n"
|
||||
self.assertTrue(has_origin_remote("/tmp/repo"))
|
||||
|
||||
@@ -0,0 +1,50 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import unittest
|
||||
from unittest.mock import patch
|
||||
|
||||
from pkgmgr.actions.mirror.git_remote import ensure_origin_remote
|
||||
from pkgmgr.actions.mirror.types import RepoMirrorContext
|
||||
|
||||
|
||||
class TestGitRemotePrimaryPush(unittest.TestCase):
|
||||
def test_origin_created_and_extra_push_added(self) -> None:
|
||||
repo = {"provider": "github.com", "account": "alice", "repository": "repo"}
|
||||
ctx = RepoMirrorContext(
|
||||
identifier="repo",
|
||||
repo_dir="/tmp/repo",
|
||||
config_mirrors={},
|
||||
file_mirrors={
|
||||
"primary": "git@github.com:alice/repo.git",
|
||||
"backup": "git@github.com:alice/repo-backup.git",
|
||||
},
|
||||
)
|
||||
|
||||
executed: list[str] = []
|
||||
|
||||
def fake_run(cmd: str, cwd: str, preview: bool) -> None:
|
||||
executed.append(cmd)
|
||||
|
||||
def fake_git(args, cwd):
|
||||
if args == ["remote"]:
|
||||
return ""
|
||||
if args == ["remote", "get-url", "--push", "--all", "origin"]:
|
||||
return "git@github.com:alice/repo.git\n"
|
||||
return ""
|
||||
|
||||
with patch("os.path.isdir", return_value=True), patch(
|
||||
"pkgmgr.actions.mirror.git_remote.run_command", side_effect=fake_run
|
||||
), patch(
|
||||
"pkgmgr.actions.mirror.git_remote._safe_git_output", side_effect=fake_git
|
||||
):
|
||||
ensure_origin_remote(repo, ctx, preview=False)
|
||||
|
||||
self.assertEqual(
|
||||
executed,
|
||||
[
|
||||
"git remote add origin git@github.com:alice/repo.git",
|
||||
"git remote set-url origin git@github.com:alice/repo.git",
|
||||
"git remote set-url --push origin git@github.com:alice/repo.git",
|
||||
"git remote set-url --add --push origin git@github.com:alice/repo-backup.git",
|
||||
],
|
||||
)
|
||||
@@ -7,10 +7,7 @@ import os
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
from pkgmgr.actions.mirror.io import (
|
||||
load_config_mirrors,
|
||||
read_mirrors_file,
|
||||
)
|
||||
from pkgmgr.actions.mirror.io import load_config_mirrors, read_mirrors_file, write_mirrors_file
|
||||
|
||||
|
||||
class TestMirrorIO(unittest.TestCase):
|
||||
@@ -18,117 +15,96 @@ class TestMirrorIO(unittest.TestCase):
|
||||
Unit tests for pkgmgr.actions.mirror.io helpers.
|
||||
"""
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# load_config_mirrors
|
||||
# ------------------------------------------------------------------
|
||||
def test_load_config_mirrors_from_dict(self) -> None:
|
||||
def test_load_config_mirrors_from_dict_filters_empty(self) -> None:
|
||||
repo = {
|
||||
"mirrors": {
|
||||
"origin": "ssh://git@example.com/account/repo.git",
|
||||
"backup": "ssh://git@backup/account/repo.git",
|
||||
"empty": "",
|
||||
"none": None,
|
||||
"backup": "",
|
||||
"invalid": None,
|
||||
}
|
||||
}
|
||||
|
||||
mirrors = load_config_mirrors(repo)
|
||||
self.assertEqual(mirrors, {"origin": "ssh://git@example.com/account/repo.git"})
|
||||
|
||||
self.assertEqual(
|
||||
mirrors,
|
||||
{
|
||||
"origin": "ssh://git@example.com/account/repo.git",
|
||||
"backup": "ssh://git@backup/account/repo.git",
|
||||
},
|
||||
)
|
||||
|
||||
def test_load_config_mirrors_from_list(self) -> None:
|
||||
def test_load_config_mirrors_from_list_filters_invalid_entries(self) -> None:
|
||||
repo = {
|
||||
"mirrors": [
|
||||
{"name": "origin", "url": "ssh://git@example.com/account/repo.git"},
|
||||
{"name": "backup", "url": "ssh://git@backup/account/repo.git"},
|
||||
{"name": "", "url": "ssh://git@invalid/ignored.git"},
|
||||
{"name": "missing-url"},
|
||||
"not-a-dict",
|
||||
{"name": "backup", "url": ""},
|
||||
{"name": "", "url": "ssh://git@example.com/empty-name.git"},
|
||||
{"url": "ssh://git@example.com/missing-name.git"},
|
||||
]
|
||||
}
|
||||
|
||||
mirrors = load_config_mirrors(repo)
|
||||
|
||||
self.assertEqual(
|
||||
mirrors,
|
||||
{
|
||||
"origin": "ssh://git@example.com/account/repo.git",
|
||||
"backup": "ssh://git@backup/account/repo.git",
|
||||
},
|
||||
)
|
||||
self.assertEqual(mirrors, {"origin": "ssh://git@example.com/account/repo.git"})
|
||||
|
||||
def test_load_config_mirrors_empty_when_missing(self) -> None:
|
||||
repo = {}
|
||||
mirrors = load_config_mirrors(repo)
|
||||
self.assertEqual(mirrors, {})
|
||||
self.assertEqual(load_config_mirrors({}), {})
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# read_mirrors_file
|
||||
# ------------------------------------------------------------------
|
||||
def test_read_mirrors_file_with_named_and_url_only_entries(self) -> None:
|
||||
"""
|
||||
Ensure that the MIRRORS file format is parsed correctly:
|
||||
|
||||
- 'name url' → exact name
|
||||
- 'url' → auto name derived from netloc (host[:port]),
|
||||
with numeric suffix if duplicated.
|
||||
"""
|
||||
def test_read_mirrors_file_parses_named_entries(self) -> None:
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
mirrors_path = os.path.join(tmpdir, "MIRRORS")
|
||||
content = "\n".join(
|
||||
[
|
||||
"# comment",
|
||||
"",
|
||||
"origin ssh://git@example.com/account/repo.git",
|
||||
"https://github.com/kevinveenbirkenbach/package-manager",
|
||||
"https://github.com/kevinveenbirkenbach/another-repo",
|
||||
"ssh://git@git.veen.world:2201/kevinveenbirkenbach/pkgmgr.git",
|
||||
]
|
||||
)
|
||||
|
||||
with open(mirrors_path, "w", encoding="utf-8") as fh:
|
||||
fh.write(content + "\n")
|
||||
p = os.path.join(tmpdir, "MIRRORS")
|
||||
with open(p, "w", encoding="utf-8") as fh:
|
||||
fh.write("origin ssh://git@example.com/account/repo.git\n")
|
||||
|
||||
mirrors = read_mirrors_file(tmpdir)
|
||||
|
||||
# 'origin' is preserved as given
|
||||
self.assertIn("origin", mirrors)
|
||||
self.assertEqual(
|
||||
mirrors["origin"],
|
||||
"ssh://git@example.com/account/repo.git",
|
||||
)
|
||||
self.assertEqual(mirrors, {"origin": "ssh://git@example.com/account/repo.git"})
|
||||
|
||||
# Two GitHub URLs → auto names: github.com, github.com2
|
||||
github_urls = {
|
||||
mirrors.get("github.com"),
|
||||
mirrors.get("github.com2"),
|
||||
}
|
||||
self.assertIn(
|
||||
"https://github.com/kevinveenbirkenbach/package-manager",
|
||||
github_urls,
|
||||
)
|
||||
self.assertIn(
|
||||
"https://github.com/kevinveenbirkenbach/another-repo",
|
||||
github_urls,
|
||||
)
|
||||
def test_read_mirrors_file_url_only_uses_netloc_basename_and_suffix(self) -> None:
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
p = os.path.join(tmpdir, "MIRRORS")
|
||||
with open(p, "w", encoding="utf-8") as fh:
|
||||
fh.write(
|
||||
"\n".join(
|
||||
[
|
||||
"https://github.com/alice/repo1",
|
||||
"https://github.com/alice/repo2",
|
||||
"ssh://git@git.veen.world:2201/alice/repo3.git",
|
||||
]
|
||||
)
|
||||
+ "\n"
|
||||
)
|
||||
|
||||
mirrors = read_mirrors_file(tmpdir)
|
||||
|
||||
self.assertIn("github.com", mirrors)
|
||||
self.assertIn("github.com2", mirrors)
|
||||
self.assertEqual(mirrors["github.com"], "https://github.com/alice/repo1")
|
||||
self.assertEqual(mirrors["github.com2"], "https://github.com/alice/repo2")
|
||||
|
||||
# SSH-URL mit User-Teil → netloc ist "git@git.veen.world:2201"
|
||||
# → host = "git@git.veen.world"
|
||||
self.assertIn("git@git.veen.world", mirrors)
|
||||
self.assertEqual(
|
||||
mirrors["git@git.veen.world"],
|
||||
"ssh://git@git.veen.world:2201/kevinveenbirkenbach/pkgmgr.git",
|
||||
)
|
||||
self.assertEqual(mirrors["git@git.veen.world"], "ssh://git@git.veen.world:2201/alice/repo3.git")
|
||||
|
||||
def test_read_mirrors_file_missing_returns_empty(self) -> None:
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
mirrors = read_mirrors_file(tmpdir) # no MIRRORS file
|
||||
self.assertEqual(mirrors, {})
|
||||
self.assertEqual(read_mirrors_file(tmpdir), {})
|
||||
|
||||
def test_write_mirrors_file_writes_sorted_lines(self) -> None:
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
mirrors = {
|
||||
"b": "ssh://b.example/repo.git",
|
||||
"a": "ssh://a.example/repo.git",
|
||||
}
|
||||
write_mirrors_file(tmpdir, mirrors, preview=False)
|
||||
|
||||
p = os.path.join(tmpdir, "MIRRORS")
|
||||
self.assertTrue(os.path.exists(p))
|
||||
|
||||
with open(p, "r", encoding="utf-8") as fh:
|
||||
content = fh.read()
|
||||
|
||||
self.assertEqual(content, "a ssh://a.example/repo.git\nb ssh://b.example/repo.git\n")
|
||||
|
||||
def test_write_mirrors_file_preview_does_not_create_file(self) -> None:
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
mirrors = {"a": "ssh://a.example/repo.git"}
|
||||
write_mirrors_file(tmpdir, mirrors, preview=True)
|
||||
|
||||
p = os.path.join(tmpdir, "MIRRORS")
|
||||
self.assertFalse(os.path.exists(p))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
72
tests/unit/pkgmgr/actions/mirror/test_list_cmd.py
Normal file
72
tests/unit/pkgmgr/actions/mirror/test_list_cmd.py
Normal file
@@ -0,0 +1,72 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import io
|
||||
import unittest
|
||||
from contextlib import redirect_stdout
|
||||
from unittest.mock import MagicMock, PropertyMock, patch
|
||||
|
||||
from pkgmgr.actions.mirror.list_cmd import list_mirrors
|
||||
|
||||
|
||||
class TestListCmd(unittest.TestCase):
|
||||
"""
|
||||
Unit tests for mirror list output.
|
||||
"""
|
||||
|
||||
@patch("pkgmgr.actions.mirror.list_cmd.build_context")
|
||||
def test_list_mirrors_all_sources_prints_sections(self, mock_build_context) -> None:
|
||||
ctx = MagicMock()
|
||||
ctx.identifier = "id"
|
||||
ctx.repo_dir = "/tmp/repo"
|
||||
ctx.config_mirrors = {"origin": "a"}
|
||||
ctx.file_mirrors = {"backup": "b"}
|
||||
type(ctx).resolved_mirrors = PropertyMock(return_value={"origin": "a", "backup": "b"})
|
||||
mock_build_context.return_value = ctx
|
||||
|
||||
buf = io.StringIO()
|
||||
with redirect_stdout(buf):
|
||||
list_mirrors(
|
||||
selected_repos=[{}],
|
||||
repositories_base_dir="/base",
|
||||
all_repos=[],
|
||||
source="all",
|
||||
)
|
||||
|
||||
out = buf.getvalue()
|
||||
self.assertIn("[config mirrors]", out)
|
||||
self.assertIn("[MIRRORS file]", out)
|
||||
self.assertIn("[resolved mirrors]", out)
|
||||
self.assertIn("origin: a", out)
|
||||
self.assertIn("backup: b", out)
|
||||
|
||||
@patch("pkgmgr.actions.mirror.list_cmd.build_context")
|
||||
def test_list_mirrors_config_only(self, mock_build_context) -> None:
|
||||
ctx = MagicMock()
|
||||
ctx.identifier = "id"
|
||||
ctx.repo_dir = "/tmp/repo"
|
||||
ctx.config_mirrors = {"origin": "a"}
|
||||
ctx.file_mirrors = {"backup": "b"}
|
||||
type(ctx).resolved_mirrors = PropertyMock(return_value={"origin": "a", "backup": "b"})
|
||||
mock_build_context.return_value = ctx
|
||||
|
||||
buf = io.StringIO()
|
||||
with redirect_stdout(buf):
|
||||
list_mirrors(
|
||||
selected_repos=[{}],
|
||||
repositories_base_dir="/base",
|
||||
all_repos=[],
|
||||
source="config",
|
||||
)
|
||||
|
||||
out = buf.getvalue()
|
||||
self.assertIn("[config mirrors]", out)
|
||||
self.assertIn("origin: a", out)
|
||||
self.assertNotIn("[MIRRORS file]", out)
|
||||
self.assertNotIn("[resolved mirrors]", out)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
52
tests/unit/pkgmgr/actions/mirror/test_remote_check.py
Normal file
52
tests/unit/pkgmgr/actions/mirror/test_remote_check.py
Normal file
@@ -0,0 +1,52 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import unittest
|
||||
from unittest.mock import patch
|
||||
|
||||
from pkgmgr.actions.mirror.remote_check import probe_mirror
|
||||
from pkgmgr.core.git import GitError
|
||||
|
||||
|
||||
class TestRemoteCheck(unittest.TestCase):
|
||||
"""
|
||||
Unit tests for non-destructive remote probing (git ls-remote).
|
||||
"""
|
||||
|
||||
@patch("pkgmgr.actions.mirror.remote_check.run_git")
|
||||
def test_probe_mirror_success_returns_true_and_empty_message(self, mock_run_git) -> None:
|
||||
mock_run_git.return_value = "dummy-output"
|
||||
|
||||
ok, message = probe_mirror(
|
||||
"ssh://git@code.example.org:2201/alice/repo.git",
|
||||
"/tmp/some-repo",
|
||||
)
|
||||
|
||||
self.assertTrue(ok)
|
||||
self.assertEqual(message, "")
|
||||
mock_run_git.assert_called_once_with(
|
||||
["ls-remote", "ssh://git@code.example.org:2201/alice/repo.git"],
|
||||
cwd="/tmp/some-repo",
|
||||
)
|
||||
|
||||
@patch("pkgmgr.actions.mirror.remote_check.run_git")
|
||||
def test_probe_mirror_failure_returns_false_and_error_message(self, mock_run_git) -> None:
|
||||
mock_run_git.side_effect = GitError("Git command failed (simulated)")
|
||||
|
||||
ok, message = probe_mirror(
|
||||
"ssh://git@code.example.org:2201/alice/repo.git",
|
||||
"/tmp/some-repo",
|
||||
)
|
||||
|
||||
self.assertFalse(ok)
|
||||
self.assertIn("Git command failed", message)
|
||||
mock_run_git.assert_called_once_with(
|
||||
["ls-remote", "ssh://git@code.example.org:2201/alice/repo.git"],
|
||||
cwd="/tmp/some-repo",
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
114
tests/unit/pkgmgr/actions/mirror/test_remote_provision.py
Normal file
114
tests/unit/pkgmgr/actions/mirror/test_remote_provision.py
Normal file
@@ -0,0 +1,114 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import unittest
|
||||
from unittest.mock import MagicMock, PropertyMock, patch
|
||||
|
||||
from pkgmgr.actions.mirror.remote_provision import ensure_remote_repository
|
||||
|
||||
|
||||
class TestRemoteProvision(unittest.TestCase):
|
||||
"""
|
||||
Unit tests for remote provisioning wrapper logic (action layer).
|
||||
"""
|
||||
|
||||
@patch("pkgmgr.actions.mirror.remote_provision.ensure_remote_repo")
|
||||
@patch("pkgmgr.actions.mirror.remote_provision.determine_primary_remote_url")
|
||||
@patch("pkgmgr.actions.mirror.remote_provision.build_context")
|
||||
def test_ensure_remote_repository_builds_spec_from_url_and_calls_core(
|
||||
self,
|
||||
mock_build_context,
|
||||
mock_determine_primary,
|
||||
mock_ensure_remote_repo,
|
||||
) -> None:
|
||||
ctx = MagicMock()
|
||||
type(ctx).resolved_mirrors = PropertyMock(
|
||||
return_value={"origin": "ssh://git@git.veen.world:2201/alice/repo.git"}
|
||||
)
|
||||
ctx.identifier = "repo-id"
|
||||
mock_build_context.return_value = ctx
|
||||
|
||||
mock_determine_primary.return_value = "ssh://git@git.veen.world:2201/alice/repo.git"
|
||||
|
||||
result = MagicMock()
|
||||
result.status = "created"
|
||||
result.message = "Repository created (user)."
|
||||
result.url = "https://git.veen.world/alice/repo"
|
||||
mock_ensure_remote_repo.return_value = result
|
||||
|
||||
repo = {
|
||||
"provider": "gitea",
|
||||
"account": "SHOULD_NOT_BE_USED_ANYMORE",
|
||||
"repository": "SHOULD_NOT_BE_USED_ANYMORE",
|
||||
"private": True,
|
||||
"description": "desc",
|
||||
}
|
||||
|
||||
ensure_remote_repository(
|
||||
repo=repo,
|
||||
repositories_base_dir="/base",
|
||||
all_repos=[],
|
||||
preview=False,
|
||||
)
|
||||
|
||||
self.assertTrue(mock_ensure_remote_repo.called)
|
||||
called_spec = mock_ensure_remote_repo.call_args[0][0]
|
||||
self.assertEqual(called_spec.host, "git.veen.world")
|
||||
self.assertEqual(called_spec.owner, "alice")
|
||||
self.assertEqual(called_spec.name, "repo")
|
||||
|
||||
@patch("pkgmgr.actions.mirror.remote_provision.ensure_remote_repo")
|
||||
@patch("pkgmgr.actions.mirror.remote_provision.determine_primary_remote_url")
|
||||
@patch("pkgmgr.actions.mirror.remote_provision.build_context")
|
||||
def test_ensure_remote_repository_skips_when_no_primary_url(
|
||||
self,
|
||||
mock_build_context,
|
||||
mock_determine_primary,
|
||||
mock_ensure_remote_repo,
|
||||
) -> None:
|
||||
ctx = MagicMock()
|
||||
type(ctx).resolved_mirrors = PropertyMock(return_value={})
|
||||
ctx.identifier = "repo-id"
|
||||
mock_build_context.return_value = ctx
|
||||
mock_determine_primary.return_value = None
|
||||
|
||||
ensure_remote_repository(
|
||||
repo={"provider": "gitea"},
|
||||
repositories_base_dir="/base",
|
||||
all_repos=[],
|
||||
preview=False,
|
||||
)
|
||||
|
||||
mock_ensure_remote_repo.assert_not_called()
|
||||
|
||||
@patch("pkgmgr.actions.mirror.remote_provision.ensure_remote_repo")
|
||||
@patch("pkgmgr.actions.mirror.remote_provision.determine_primary_remote_url")
|
||||
@patch("pkgmgr.actions.mirror.remote_provision.build_context")
|
||||
def test_ensure_remote_repository_skips_when_url_not_parseable(
|
||||
self,
|
||||
mock_build_context,
|
||||
mock_determine_primary,
|
||||
mock_ensure_remote_repo,
|
||||
) -> None:
|
||||
ctx = MagicMock()
|
||||
type(ctx).resolved_mirrors = PropertyMock(
|
||||
return_value={"origin": "ssh://git@host:2201/not-enough-parts"}
|
||||
)
|
||||
ctx.identifier = "repo-id"
|
||||
mock_build_context.return_value = ctx
|
||||
mock_determine_primary.return_value = "ssh://git@host:2201/not-enough-parts"
|
||||
|
||||
ensure_remote_repository(
|
||||
repo={"provider": "gitea"},
|
||||
repositories_base_dir="/base",
|
||||
all_repos=[],
|
||||
preview=False,
|
||||
)
|
||||
|
||||
mock_ensure_remote_repo.assert_not_called()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
@@ -1,58 +1,101 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import unittest
|
||||
from unittest.mock import patch
|
||||
|
||||
from pkgmgr.actions.mirror.setup_cmd import _probe_mirror
|
||||
from pkgmgr.core.git import GitError
|
||||
from pkgmgr.actions.mirror.setup_cmd import setup_mirrors
|
||||
from pkgmgr.actions.mirror.types import RepoMirrorContext
|
||||
|
||||
|
||||
class TestMirrorSetupCmd(unittest.TestCase):
|
||||
"""
|
||||
Unit tests for the non-destructive remote probing logic in setup_cmd.
|
||||
"""
|
||||
|
||||
@patch("pkgmgr.actions.mirror.setup_cmd.run_git")
|
||||
def test_probe_mirror_success_returns_true_and_empty_message(
|
||||
def _ctx(
|
||||
self,
|
||||
mock_run_git,
|
||||
) -> None:
|
||||
"""
|
||||
If run_git returns successfully, _probe_mirror must report (True, "").
|
||||
"""
|
||||
mock_run_git.return_value = "dummy-output"
|
||||
|
||||
ok, message = _probe_mirror(
|
||||
"ssh://git@code.cymais.cloud:2201/kevinveenbirkenbach/pkgmgr.git",
|
||||
"/tmp/some-repo",
|
||||
*,
|
||||
repo_dir: str = "/tmp/repo",
|
||||
resolved: dict[str, str] | None = None,
|
||||
) -> RepoMirrorContext:
|
||||
# RepoMirrorContext derives resolved via property (config + file)
|
||||
# We feed mirrors via file_mirrors to keep insertion order realistic.
|
||||
return RepoMirrorContext(
|
||||
identifier="repo-id",
|
||||
repo_dir=repo_dir,
|
||||
config_mirrors={},
|
||||
file_mirrors=resolved or {},
|
||||
)
|
||||
|
||||
self.assertTrue(ok)
|
||||
self.assertEqual(message, "")
|
||||
mock_run_git.assert_called_once()
|
||||
@patch("pkgmgr.actions.mirror.setup_cmd.build_context")
|
||||
@patch("pkgmgr.actions.mirror.setup_cmd.ensure_origin_remote")
|
||||
def test_setup_mirrors_local_calls_ensure_origin_remote(self, m_ensure, m_ctx) -> None:
|
||||
m_ctx.return_value = self._ctx(repo_dir="/tmp/repo", resolved={"primary": "git@x/y.git"})
|
||||
|
||||
@patch("pkgmgr.actions.mirror.setup_cmd.run_git")
|
||||
def test_probe_mirror_failure_returns_false_and_error_message(
|
||||
self,
|
||||
mock_run_git,
|
||||
) -> None:
|
||||
"""
|
||||
If run_git raises GitError, _probe_mirror must report (False, <message>),
|
||||
and not re-raise the exception.
|
||||
"""
|
||||
mock_run_git.side_effect = GitError("Git command failed (simulated)")
|
||||
|
||||
ok, message = _probe_mirror(
|
||||
"ssh://git@code.cymais.cloud:2201/kevinveenbirkenbach/pkgmgr.git",
|
||||
"/tmp/some-repo",
|
||||
repos = [{"provider": "github.com", "account": "alice", "repository": "repo"}]
|
||||
setup_mirrors(
|
||||
selected_repos=repos,
|
||||
repositories_base_dir="/tmp",
|
||||
all_repos=repos,
|
||||
preview=True,
|
||||
local=True,
|
||||
remote=False,
|
||||
ensure_remote=False,
|
||||
)
|
||||
|
||||
self.assertFalse(ok)
|
||||
self.assertIn("Git command failed", message)
|
||||
mock_run_git.assert_called_once()
|
||||
self.assertEqual(m_ensure.call_count, 1)
|
||||
args, kwargs = m_ensure.call_args
|
||||
|
||||
# ensure_origin_remote(repo, ctx, preview) may be positional or kw.
|
||||
# Accept both to avoid coupling tests to call style.
|
||||
if "preview" in kwargs:
|
||||
self.assertTrue(kwargs["preview"])
|
||||
else:
|
||||
# args: (repo, ctx, preview)
|
||||
self.assertTrue(args[2])
|
||||
|
||||
@patch("pkgmgr.actions.mirror.setup_cmd.build_context")
|
||||
@patch("pkgmgr.actions.mirror.setup_cmd.probe_mirror")
|
||||
@patch("pkgmgr.actions.mirror.setup_cmd.determine_primary_remote_url")
|
||||
def test_setup_mirrors_remote_no_mirrors_probes_primary(self, m_primary, m_probe, m_ctx) -> None:
|
||||
m_ctx.return_value = self._ctx(repo_dir="/tmp/repo", resolved={})
|
||||
m_primary.return_value = "git@github.com:alice/repo.git"
|
||||
m_probe.return_value = (True, "")
|
||||
|
||||
repos = [{"provider": "github.com", "account": "alice", "repository": "repo"}]
|
||||
setup_mirrors(
|
||||
selected_repos=repos,
|
||||
repositories_base_dir="/tmp",
|
||||
all_repos=repos,
|
||||
preview=True,
|
||||
local=False,
|
||||
remote=True,
|
||||
ensure_remote=False,
|
||||
)
|
||||
|
||||
m_primary.assert_called()
|
||||
m_probe.assert_called_with("git@github.com:alice/repo.git", "/tmp/repo")
|
||||
|
||||
@patch("pkgmgr.actions.mirror.setup_cmd.build_context")
|
||||
@patch("pkgmgr.actions.mirror.setup_cmd.probe_mirror")
|
||||
def test_setup_mirrors_remote_with_mirrors_probes_each(self, m_probe, m_ctx) -> None:
|
||||
m_ctx.return_value = self._ctx(
|
||||
repo_dir="/tmp/repo",
|
||||
resolved={
|
||||
"origin": "git@github.com:alice/repo.git",
|
||||
"backup": "ssh://git@git.veen.world:2201/alice/repo.git",
|
||||
},
|
||||
)
|
||||
m_probe.return_value = (True, "")
|
||||
|
||||
repos = [{"provider": "github.com", "account": "alice", "repository": "repo"}]
|
||||
setup_mirrors(
|
||||
selected_repos=repos,
|
||||
repositories_base_dir="/tmp",
|
||||
all_repos=repos,
|
||||
preview=True,
|
||||
local=False,
|
||||
remote=True,
|
||||
ensure_remote=False,
|
||||
)
|
||||
|
||||
self.assertEqual(m_probe.call_count, 2)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
77
tests/unit/pkgmgr/actions/mirror/test_url_utils.py
Normal file
77
tests/unit/pkgmgr/actions/mirror/test_url_utils.py
Normal file
@@ -0,0 +1,77 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import unittest
|
||||
|
||||
from pkgmgr.actions.mirror.url_utils import hostport_from_git_url, normalize_provider_host, parse_repo_from_git_url
|
||||
|
||||
|
||||
class TestUrlUtils(unittest.TestCase):
|
||||
"""
|
||||
Unit tests for URL parsing helpers used in mirror setup/provisioning.
|
||||
"""
|
||||
|
||||
def test_hostport_from_git_url_ssh_url_with_port(self) -> None:
|
||||
host, port = hostport_from_git_url("ssh://git@code.example.org:2201/alice/repo.git")
|
||||
self.assertEqual(host, "code.example.org")
|
||||
self.assertEqual(port, "2201")
|
||||
|
||||
def test_hostport_from_git_url_https_url_no_port(self) -> None:
|
||||
host, port = hostport_from_git_url("https://github.com/alice/repo.git")
|
||||
self.assertEqual(host, "github.com")
|
||||
self.assertIsNone(port)
|
||||
|
||||
def test_hostport_from_git_url_scp_like(self) -> None:
|
||||
host, port = hostport_from_git_url("git@github.com:alice/repo.git")
|
||||
self.assertEqual(host, "github.com")
|
||||
self.assertIsNone(port)
|
||||
|
||||
def test_hostport_from_git_url_empty(self) -> None:
|
||||
host, port = hostport_from_git_url("")
|
||||
self.assertEqual(host, "")
|
||||
self.assertIsNone(port)
|
||||
|
||||
def test_normalize_provider_host_strips_port_and_lowercases(self) -> None:
|
||||
self.assertEqual(normalize_provider_host("GIT.VEEN.WORLD:2201"), "git.veen.world")
|
||||
|
||||
def test_normalize_provider_host_ipv6_brackets(self) -> None:
|
||||
self.assertEqual(normalize_provider_host("[::1]"), "::1")
|
||||
|
||||
def test_normalize_provider_host_empty(self) -> None:
|
||||
self.assertEqual(normalize_provider_host(""), "")
|
||||
|
||||
def test_parse_repo_from_git_url_ssh_url(self) -> None:
|
||||
host, owner, name = parse_repo_from_git_url("ssh://git@code.example.org:2201/alice/repo.git")
|
||||
self.assertEqual(host, "code.example.org")
|
||||
self.assertEqual(owner, "alice")
|
||||
self.assertEqual(name, "repo")
|
||||
|
||||
def test_parse_repo_from_git_url_https_url(self) -> None:
|
||||
host, owner, name = parse_repo_from_git_url("https://github.com/alice/repo.git")
|
||||
self.assertEqual(host, "github.com")
|
||||
self.assertEqual(owner, "alice")
|
||||
self.assertEqual(name, "repo")
|
||||
|
||||
def test_parse_repo_from_git_url_scp_like(self) -> None:
|
||||
host, owner, name = parse_repo_from_git_url("git@github.com:alice/repo.git")
|
||||
self.assertEqual(host, "github.com")
|
||||
self.assertEqual(owner, "alice")
|
||||
self.assertEqual(name, "repo")
|
||||
|
||||
def test_parse_repo_from_git_url_best_effort_host_owner_repo(self) -> None:
|
||||
host, owner, name = parse_repo_from_git_url("git.veen.world/alice/repo.git")
|
||||
self.assertEqual(host, "git.veen.world")
|
||||
self.assertEqual(owner, "alice")
|
||||
self.assertEqual(name, "repo")
|
||||
|
||||
def test_parse_repo_from_git_url_missing_owner_repo_returns_none(self) -> None:
|
||||
host, owner, name = parse_repo_from_git_url("https://github.com/")
|
||||
self.assertEqual(host, "github.com")
|
||||
self.assertIsNone(owner)
|
||||
self.assertIsNone(name)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
0
tests/unit/pkgmgr/actions/publish/__init__.py
Normal file
0
tests/unit/pkgmgr/actions/publish/__init__.py
Normal file
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user