Compare commits
57 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0a6c2f2988 | ||
|
|
0c90e984ad | ||
|
|
0a0cbbfe6d | ||
|
|
15c44cd484 | ||
|
|
6d7ee6fc04 | ||
|
|
5a022db0db | ||
|
|
37ac22e0b4 | ||
|
|
bcea440e40 | ||
|
|
6edde2d65b | ||
|
|
74189c1e14 | ||
|
|
b5ddf7402a | ||
|
|
900224ed2e | ||
|
|
e290043089 | ||
|
|
a7fd37d646 | ||
|
|
d4b00046d3 | ||
|
|
545d345ea4 | ||
|
|
a29b831e41 | ||
|
|
bc9ca140bd | ||
|
|
ad8e3cd07c | ||
|
|
22efe0b32e | ||
|
|
d23a0a94d5 | ||
|
|
e42b79c9d8 | ||
|
|
3b2c657bfa | ||
|
|
e335ab05a1 | ||
|
|
75f963d6e2 | ||
|
|
94b998741f | ||
|
|
172c734866 | ||
|
|
1b483e178d | ||
|
|
78693225f1 | ||
|
|
ca08c84789 | ||
|
|
e930b422e5 | ||
|
|
0833d04376 | ||
|
|
55f36d76ec | ||
|
|
6a838ee84f | ||
|
|
4285bf4a54 | ||
|
|
640b1042c2 | ||
|
|
9357c4632e | ||
|
|
ca5d0d22f3 | ||
|
|
3875338fb7 | ||
|
|
196f55c58e | ||
|
|
9a149715f6 | ||
|
|
bf40533469 | ||
|
|
7bc7259988 | ||
|
|
66b96ac3a5 | ||
|
|
f974e0b14a | ||
|
|
de8c3f768d | ||
|
|
05ff250251 | ||
|
|
ab52d37467 | ||
|
|
80329b85fb | ||
|
|
44ff0a6cd9 | ||
|
|
e00b1a7b69 | ||
|
|
14f0188efd | ||
|
|
a4efb847ba | ||
|
|
d50891dfe5 | ||
|
|
59d0355b91 | ||
|
|
da9d5cfa6b | ||
|
|
f9943fafae |
25
.github/workflows/test-container.yml
vendored
Normal file
25
.github/workflows/test-container.yml
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
name: Test OS Containers
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- master
|
||||
- develop
|
||||
- "*"
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
test-container:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Show Docker version
|
||||
run: docker version
|
||||
|
||||
- name: Run container tests
|
||||
run: make test-container
|
||||
2
.github/workflows/test-e2e.yml
vendored
2
.github/workflows/test-e2e.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: Test package-manager (e2e)
|
||||
name: Test End-To-End
|
||||
|
||||
on:
|
||||
push:
|
||||
|
||||
8
.github/workflows/test-integration.yml
vendored
8
.github/workflows/test-integration.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: Test package-manager (integration)
|
||||
name: Test Code Integration
|
||||
|
||||
on:
|
||||
push:
|
||||
@@ -21,9 +21,5 @@ jobs:
|
||||
- name: Show Docker version
|
||||
run: docker version
|
||||
|
||||
# Build Arch test image (same as used in test-unit and test-e2e)
|
||||
- name: Build test images
|
||||
run: make build
|
||||
|
||||
- name: Run integration tests via make (Arch container)
|
||||
run: make test-integration
|
||||
run: make test-integration DISTROS="arch"
|
||||
|
||||
4
.github/workflows/test-unit.yml
vendored
4
.github/workflows/test-unit.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: Test package-manager (unit)
|
||||
name: Test Units
|
||||
|
||||
on:
|
||||
push:
|
||||
@@ -22,4 +22,4 @@ jobs:
|
||||
run: docker version
|
||||
|
||||
- name: Run unit tests via make (Arch container)
|
||||
run: make test-unit
|
||||
run: make test-unit DISTROS="arch"
|
||||
|
||||
64
.github/workflows/test-virgin-root.yml
vendored
Normal file
64
.github/workflows/test-virgin-root.yml
vendored
Normal file
@@ -0,0 +1,64 @@
|
||||
name: Test Virgin Root
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- master
|
||||
- develop
|
||||
- "*"
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
test-virgin-root:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 45
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Show Docker version
|
||||
run: docker version
|
||||
|
||||
- name: Virgin Arch pkgmgr flake test (root)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
echo ">>> Starting virgin ArchLinux container test (root, with shared caches)..."
|
||||
|
||||
docker run --rm \
|
||||
-v "$PWD":/src \
|
||||
-v pkgmgr_repos:/root/Repositories \
|
||||
-v pkgmgr_pip_cache:/root/.cache/pip \
|
||||
-w /src \
|
||||
archlinux:latest \
|
||||
bash -lc '
|
||||
set -euo pipefail
|
||||
|
||||
echo ">>> Updating and upgrading Arch system..."
|
||||
pacman -Syu --noconfirm git python python-pip nix >/dev/null
|
||||
|
||||
echo ">>> Creating isolated virtual environment for pkgmgr..."
|
||||
python -m venv /tmp/pkgmgr-venv
|
||||
|
||||
echo ">>> Activating virtual environment..."
|
||||
source /tmp/pkgmgr-venv/bin/activate
|
||||
|
||||
echo ">>> Upgrading pip (cached)..."
|
||||
python -m pip install --upgrade pip >/dev/null
|
||||
|
||||
echo ">>> Installing pkgmgr from current source tree (cached pip)..."
|
||||
python -m pip install /src >/dev/null
|
||||
|
||||
echo ">>> Enabling Nix experimental features..."
|
||||
export NIX_CONFIG="experimental-features = nix-command flakes"
|
||||
|
||||
echo ">>> Running: pkgmgr update pkgmgr --clone-mode shallow --no-verification"
|
||||
pkgmgr update pkgmgr --clone-mode shallow --no-verification
|
||||
|
||||
echo ">>> Running: pkgmgr version pkgmgr"
|
||||
pkgmgr version pkgmgr
|
||||
|
||||
echo ">>> Virgin Arch (root) test completed successfully."
|
||||
'
|
||||
79
.github/workflows/test-virgin-user.yml
vendored
Normal file
79
.github/workflows/test-virgin-user.yml
vendored
Normal file
@@ -0,0 +1,79 @@
|
||||
name: Test Virgin User
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- master
|
||||
- develop
|
||||
- "*"
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
test-virgin-user:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 45
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Show Docker version
|
||||
run: docker version
|
||||
|
||||
- name: Virgin Arch pkgmgr user test (non-root with sudo)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
echo ">>> Starting virgin ArchLinux container test (non-root user with sudo)..."
|
||||
|
||||
docker run --rm \
|
||||
-v "$PWD":/src \
|
||||
archlinux:latest \
|
||||
bash -lc '
|
||||
set -euo pipefail
|
||||
|
||||
echo ">>> [root] Updating and upgrading Arch system..."
|
||||
pacman -Syu --noconfirm git python python-pip sudo base-devel debugedit
|
||||
|
||||
echo ">>> [root] Creating non-root user dev..."
|
||||
useradd -m dev
|
||||
|
||||
echo ">>> [root] Allowing passwordless sudo for dev..."
|
||||
echo "dev ALL=(ALL) NOPASSWD: ALL" > /etc/sudoers.d/dev
|
||||
chmod 0440 /etc/sudoers.d/dev
|
||||
|
||||
echo ">>> [root] Adjusting ownership of /src for dev..."
|
||||
chown -R dev:dev /src
|
||||
|
||||
echo ">>> [root] Running pkgmgr flow as non-root user dev..."
|
||||
sudo -u dev env PKGMGR_DISABLE_NIX_FLAKE_INSTALLER=1 bash -lc "
|
||||
set -euo pipefail
|
||||
cd /src
|
||||
|
||||
echo \">>> [dev] Using user: \$(whoami)\"
|
||||
echo \">>> [dev] Running scripts/installation/main.sh...\"
|
||||
bash scripts/installation/main.sh
|
||||
|
||||
echo \">>> [dev] Activating venv...\"
|
||||
. \"\$HOME/.venvs/pkgmgr/bin/activate\"
|
||||
|
||||
echo \">>> [dev] Installing pkgmgr into venv via pip...\"
|
||||
python -m pip install /src >/dev/null
|
||||
|
||||
echo \">>> [dev] PKGMGR_DISABLE_NIX_FLAKE_INSTALLER=\$PKGMGR_DISABLE_NIX_FLAKE_INSTALLER\"
|
||||
echo \">>> [dev] Updating managed repo package-manager via pkgmgr...\"
|
||||
pkgmgr update pkgmgr --clone-mode shallow --no-verification
|
||||
|
||||
echo \">>> [dev] PATH:\"
|
||||
echo \"\$PATH\"
|
||||
|
||||
echo \">>> [dev] which pkgmgr:\"
|
||||
which pkgmgr || echo \">>> [dev] pkgmgr not found in PATH\"
|
||||
|
||||
echo \">>> [dev] Running: pkgmgr version pkgmgr\"
|
||||
pkgmgr version pkgmgr
|
||||
"
|
||||
|
||||
echo ">>> [root] Container flow finished."
|
||||
'
|
||||
16
.gitignore
vendored
16
.gitignore
vendored
@@ -1,9 +1,6 @@
|
||||
|
||||
# Prevents unwanted files from being committed to version control.
|
||||
|
||||
# Custom Config file
|
||||
config/config.yaml
|
||||
|
||||
# Python bytecode
|
||||
__pycache__/
|
||||
*.pyc
|
||||
@@ -15,8 +12,18 @@ venv/
|
||||
|
||||
# Build artifacts
|
||||
dist/
|
||||
build/
|
||||
build/*
|
||||
*.egg-info/
|
||||
pkg
|
||||
src/source
|
||||
package-manager-*
|
||||
|
||||
# debian
|
||||
debian/package-manager/
|
||||
debian/debhelper-build-stamp
|
||||
debian/files
|
||||
debian/.debhelper/
|
||||
debian/package-manager.substvars
|
||||
|
||||
# Editor files
|
||||
.vscode/
|
||||
@@ -31,4 +38,3 @@ Thumbs.db
|
||||
|
||||
# Ignore logs
|
||||
*.log
|
||||
package-manager-*
|
||||
110
CHANGELOG.md
110
CHANGELOG.md
@@ -1,3 +1,113 @@
|
||||
## [0.9.1] - 2025-12-10
|
||||
|
||||
* * Refactored installer: new `venv-create.sh`, cleaner root/user setup flow, updated README with architecture map.
|
||||
* Split virgin tests into root/user workflows; stabilized Nix installer across distros; improved test scripts with dynamic distro selection and isolated Nix stores.
|
||||
* Fixed repository directory resolution; improved `pkgmgr path` and `pkgmgr shell`; added full unit/E2E coverage.
|
||||
* Removed deprecated files and updated `.gitignore`.
|
||||
|
||||
|
||||
## [0.9.0] - 2025-12-10
|
||||
|
||||
* Introduce a virgin Arch-based Nix flake E2E workflow that validates pkgmgr’s full flake installation path using shared caches for faster and reproducible CI runs.
|
||||
|
||||
|
||||
## [0.8.0] - 2025-12-10
|
||||
|
||||
* **v0.7.15 — Installer & Command Resolution Improvements**
|
||||
|
||||
* Introduced a unified **layer-based installer pipeline** with clear precedence (OS-packages, Nix, Python, Makefile).
|
||||
* Reworked installer structure and improved Python/Nix/Makefile installers, including isolated Python venvs and refined flake-output handling.
|
||||
* Fully rewrote **command resolution** with stronger typing, safer fallbacks, and explicit support for `command: null` to mark library-only repositories.
|
||||
* Added extensive **unit and integration tests** for installer capability ordering, command resolution, and Nix/Python installer behavior.
|
||||
* Expanded documentation with capability hierarchy diagrams and scenario matrices.
|
||||
* Removed deprecated repository entries and obsolete configuration files.
|
||||
|
||||
|
||||
## [0.7.14] - 2025-12-10
|
||||
|
||||
* Fixed the clone-all integration test so that `SystemExit(0)` from the proxy is treated as a successful command instead of a failure.
|
||||
|
||||
|
||||
## [0.7.13] - 2025-12-10
|
||||
|
||||
### Fix tools path resolution and add tests
|
||||
|
||||
- Fixed a crash in `pkgmgr code` caused by missing `directory` metadata by introducing `_resolve_repository_path()` with proper fallbacks to `repositories_base_dir` / `repositories_dir`.
|
||||
- Updated `explore`, `terminal` and `code` tool commands to use the new resolver.
|
||||
- Improved VS Code workspace generation and path handling.
|
||||
- Added unit & E2E tests for tool commands.
|
||||
|
||||
|
||||
## [0.7.12] - 2025-12-09
|
||||
|
||||
* Fixed self refering alias during setup
|
||||
|
||||
|
||||
## [0.7.11] - 2025-12-09
|
||||
|
||||
* test: fix installer unit tests for OS packages and Nix dev shell
|
||||
|
||||
|
||||
## [0.7.10] - 2025-12-09
|
||||
|
||||
* Fixed test_install_pkgmgr_shallow.py
|
||||
|
||||
|
||||
## [0.7.9] - 2025-12-09
|
||||
|
||||
* 'main' and 'master' are now both accepted as branches for branch close merge
|
||||
|
||||
|
||||
## [0.7.8] - 2025-12-09
|
||||
|
||||
* Missing pyproject.toml doesn't lead to an error during release
|
||||
|
||||
|
||||
## [0.7.7] - 2025-12-09
|
||||
|
||||
* Added TEST_PATTERN parameter to execute dedicated tests
|
||||
|
||||
|
||||
## [0.7.6] - 2025-12-09
|
||||
|
||||
* Fixed pull --preview bug in e2e test
|
||||
|
||||
|
||||
## [0.7.5] - 2025-12-09
|
||||
|
||||
* Fixed wrong directory permissions for nix
|
||||
|
||||
|
||||
## [0.7.4] - 2025-12-09
|
||||
|
||||
* Fixed missing build in test workflow -> Tests pass now
|
||||
|
||||
|
||||
## [0.7.3] - 2025-12-09
|
||||
|
||||
* Fixed bug: Ignored packages are now ignored
|
||||
|
||||
|
||||
## [0.7.2] - 2025-12-09
|
||||
|
||||
* Implemented Changelog Support for Fedora and Debian
|
||||
|
||||
|
||||
## [0.7.1] - 2025-12-09
|
||||
|
||||
* Fix floating 'latest' tag logic: dereference annotated target (vX.Y.Z^{}), add tag message to avoid Git errors, ensure best-effort update without blocking releases, and update unit tests (see ChatGPT conversation: https://chatgpt.com/share/69383024-efa4-800f-a875-129b81fa40ff).
|
||||
|
||||
|
||||
## [0.7.0] - 2025-12-09
|
||||
|
||||
* Add Git helpers for branch sync and floating 'latest' tag in the release workflow, ensure main/master are updated from origin before tagging, and extend unit/e2e tests including 'pkgmgr release --help' coverage (see ChatGPT conversation: https://chatgpt.com/share/69383024-efa4-800f-a875-129b81fa40ff)
|
||||
|
||||
|
||||
## [0.6.0] - 2025-12-09
|
||||
|
||||
* Expose DISTROS and BASE_IMAGE_* variables as exported Makefile environment variables so all build and test commands can consume them dynamically. By exporting these values, every Make target (e.g., build, build-no-cache, build-missing, test-container, test-unit, test-e2e) and every delegated script in scripts/build/ and scripts/test/ now receives a consistent view of the supported distributions and their base container images. This change removes duplicated definitions across scripts, ensures reproducible builds, and allows build tooling to react automatically when new distros or base images are added to the Makefile.
|
||||
|
||||
|
||||
## [0.5.1] - 2025-12-09
|
||||
|
||||
* Refine pkgmgr release CLI close wiring and integration tests for --close flag (ChatGPT: https://chatgpt.com/share/69376b4e-8440-800f-9d06-535ec1d7a40e)
|
||||
|
||||
194
Dockerfile
194
Dockerfile
@@ -4,87 +4,6 @@
|
||||
ARG BASE_IMAGE=archlinux:latest
|
||||
FROM ${BASE_IMAGE}
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# System base + conditional package tool installation
|
||||
#
|
||||
# Important:
|
||||
# - We do NOT install Nix directly here via curl.
|
||||
# - Nix is installed/initialized by init-nix.sh, which is invoked
|
||||
# from the system packaging hooks (Arch .install, Debian postinst,
|
||||
# RPM %post).
|
||||
# ------------------------------------------------------------
|
||||
RUN set -e; \
|
||||
if [ -f /etc/os-release ]; then . /etc/os-release; else echo "No /etc/os-release found" && exit 1; fi; \
|
||||
echo "Detected base image: ${ID:-unknown} (like: ${ID_LIKE:-})"; \
|
||||
\
|
||||
if [ "$ID" = "arch" ]; then \
|
||||
pacman -Syu --noconfirm && \
|
||||
pacman -S --noconfirm --needed \
|
||||
base-devel \
|
||||
git \
|
||||
rsync \
|
||||
curl \
|
||||
ca-certificates \
|
||||
xz && \
|
||||
pacman -Scc --noconfirm; \
|
||||
elif [ "$ID" = "debian" ]; then \
|
||||
apt-get update && \
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
|
||||
build-essential \
|
||||
debhelper \
|
||||
dpkg-dev \
|
||||
git \
|
||||
rsync \
|
||||
bash \
|
||||
curl \
|
||||
ca-certificates \
|
||||
xz-utils && \
|
||||
rm -rf /var/lib/apt/lists/*; \
|
||||
elif [ "$ID" = "ubuntu" ]; then \
|
||||
apt-get update && \
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
|
||||
build-essential \
|
||||
debhelper \
|
||||
dpkg-dev \
|
||||
git \
|
||||
tzdata \
|
||||
lsb-release \
|
||||
rsync \
|
||||
bash \
|
||||
curl \
|
||||
ca-certificates \
|
||||
xz-utils && \
|
||||
rm -rf /var/lib/apt/lists/*; \
|
||||
elif [ "$ID" = "fedora" ]; then \
|
||||
dnf -y update && \
|
||||
dnf -y install \
|
||||
git \
|
||||
rsync \
|
||||
rpm-build \
|
||||
make \
|
||||
gcc \
|
||||
bash \
|
||||
curl \
|
||||
ca-certificates \
|
||||
xz && \
|
||||
dnf clean all; \
|
||||
elif [ "$ID" = "centos" ]; then \
|
||||
dnf -y update && \
|
||||
dnf -y install \
|
||||
git \
|
||||
rsync \
|
||||
rpm-build \
|
||||
make \
|
||||
gcc \
|
||||
bash \
|
||||
curl-minimal \
|
||||
ca-certificates \
|
||||
xz && \
|
||||
dnf clean all; \
|
||||
else \
|
||||
echo "Unsupported base image: ${ID}" && exit 1; \
|
||||
fi
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# Nix environment defaults
|
||||
#
|
||||
@@ -96,94 +15,38 @@ ENV NIX_CONFIG="experimental-features = nix-command flakes"
|
||||
# ------------------------------------------------------------
|
||||
# Unprivileged user for Arch package build (makepkg)
|
||||
# ------------------------------------------------------------
|
||||
RUN useradd -m builder || true
|
||||
RUN useradd -m aur_builder || true
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# Copy scripts and install distro dependencies
|
||||
# ------------------------------------------------------------
|
||||
WORKDIR /build
|
||||
|
||||
# Copy only scripts first so dependency installation can run early
|
||||
COPY scripts/ scripts/
|
||||
RUN find scripts -type f -name '*.sh' -exec chmod +x {} \;
|
||||
|
||||
# Install distro-specific build dependencies (and AUR builder on Arch)
|
||||
RUN scripts/installation/run-dependencies.sh
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# Select distro-specific Docker entrypoint
|
||||
# ------------------------------------------------------------
|
||||
# Docker entrypoint (distro-agnostic, nutzt run-package.sh)
|
||||
# ------------------------------------------------------------
|
||||
COPY scripts/docker/entry.sh /usr/local/bin/docker-entry.sh
|
||||
RUN chmod +x /usr/local/bin/docker-entry.sh
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# Build and install distro-native package-manager package
|
||||
#
|
||||
# - Arch: PKGBUILD -> pacman -U
|
||||
# - Debian: debhelper -> dpkg-buildpackage -> apt install ./package-manager_*.deb
|
||||
# - Ubuntu: same as Debian
|
||||
# - Fedora: rpmbuild -> dnf/dnf5/yum install package-manager-*.rpm
|
||||
# - CentOS: rpmbuild -> dnf/yum install package-manager-*.rpm
|
||||
#
|
||||
# Nix is NOT manually installed here; it is handled by init-nix.sh.
|
||||
# via Makefile `install` target (calls scripts/installation/run-package.sh)
|
||||
# ------------------------------------------------------------
|
||||
WORKDIR /build
|
||||
COPY . .
|
||||
RUN find scripts -type f -name '*.sh' -exec chmod +x {} \;
|
||||
|
||||
RUN set -e; \
|
||||
. /etc/os-release; \
|
||||
if [ "$ID" = "arch" ]; then \
|
||||
echo 'Building Arch package (makepkg --nodeps)...'; \
|
||||
chown -R builder:builder /build; \
|
||||
su builder -c "cd /build && rm -f package-manager-*.pkg.tar.* && makepkg --noconfirm --clean --nodeps"; \
|
||||
\
|
||||
echo 'Installing generated Arch package...'; \
|
||||
pacman -U --noconfirm package-manager-*.pkg.tar.*; \
|
||||
elif [ "$ID" = "debian" ] || [ "$ID" = "ubuntu" ]; then \
|
||||
echo 'Building Debian/Ubuntu package...'; \
|
||||
dpkg-buildpackage -us -uc -b; \
|
||||
\
|
||||
echo 'Installing generated DEB package...'; \
|
||||
apt-get update && \
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -y ./../package-manager_*.deb && \
|
||||
rm -rf /var/lib/apt/lists/*; \
|
||||
elif [ "$ID" = "fedora" ] || [ "$ID" = "centos" ]; then \
|
||||
echo 'Setting up rpmbuild dirs...'; \
|
||||
mkdir -p /root/rpmbuild/{BUILD,RPMS,SOURCES,SPECS,SRPMS}; \
|
||||
\
|
||||
echo "Extracting version from package-manager.spec..."; \
|
||||
version=$(grep -E '^Version:' /build/package-manager.spec | awk '{print $2}'); \
|
||||
if [ -z "$version" ]; then echo 'ERROR: Version missing!' && exit 1; fi; \
|
||||
srcdir="package-manager-${version}"; \
|
||||
\
|
||||
echo "Preparing source tree for RPM: $srcdir"; \
|
||||
rm -rf "/tmp/$srcdir"; \
|
||||
mkdir -p "/tmp/$srcdir"; \
|
||||
cp -a /build/. "/tmp/$srcdir/"; \
|
||||
\
|
||||
echo "Creating source tarball: /root/rpmbuild/SOURCES/$srcdir.tar.gz"; \
|
||||
tar czf "/root/rpmbuild/SOURCES/$srcdir.tar.gz" -C /tmp "$srcdir"; \
|
||||
\
|
||||
echo 'Copying SPEC...'; \
|
||||
cp /build/package-manager.spec /root/rpmbuild/SPECS/; \
|
||||
\
|
||||
echo 'Running rpmbuild...'; \
|
||||
cd /root/rpmbuild/SPECS && rpmbuild -bb package-manager.spec; \
|
||||
\
|
||||
echo 'Installing generated RPM (local, offline)...'; \
|
||||
rpm_path=$(find /root/rpmbuild/RPMS -name "package-manager-*.rpm" | head -n1); \
|
||||
if [ -z "$rpm_path" ]; then echo 'ERROR: RPM not found!' && exit 1; fi; \
|
||||
\
|
||||
if command -v dnf5 >/dev/null 2>&1; then \
|
||||
echo 'Using dnf5 to install local RPM (no remote repos)...'; \
|
||||
if ! dnf5 install -y --disablerepo='*' "$rpm_path"; then \
|
||||
echo 'dnf5 failed, falling back to rpm -i --nodeps'; \
|
||||
rpm -i --nodeps "$rpm_path"; \
|
||||
fi; \
|
||||
elif command -v dnf >/dev/null 2>&1; then \
|
||||
echo 'Using dnf to install local RPM (no remote repos)...'; \
|
||||
if ! dnf install -y --disablerepo='*' "$rpm_path"; then \
|
||||
echo 'dnf failed, falling back to rpm -i --nodeps'; \
|
||||
rpm -i --nodeps "$rpm_path"; \
|
||||
fi; \
|
||||
elif command -v yum >/dev/null 2>&1; then \
|
||||
echo 'Using yum to install local RPM (no remote repos)...'; \
|
||||
if ! yum localinstall -y --disablerepo='*' "$rpm_path"; then \
|
||||
echo 'yum failed, falling back to rpm -i --nodeps'; \
|
||||
rpm -i --nodeps "$rpm_path"; \
|
||||
fi; \
|
||||
else \
|
||||
echo 'No dnf/dnf5/yum found, falling back to rpm -i --nodeps...'; \
|
||||
rpm -i --nodeps "$rpm_path"; \
|
||||
fi; \
|
||||
\
|
||||
rm -rf "/tmp/$srcdir"; \
|
||||
else \
|
||||
echo "Unsupported distro: ${ID}"; \
|
||||
exit 1; \
|
||||
fi; \
|
||||
echo "Building and installing package-manager via make install..."; \
|
||||
make install; \
|
||||
rm -rf /build
|
||||
|
||||
# ------------------------------------------------------------
|
||||
@@ -191,8 +54,5 @@ RUN set -e; \
|
||||
# ------------------------------------------------------------
|
||||
WORKDIR /src
|
||||
|
||||
COPY scripts/docker-entry-dev.sh /usr/local/bin/docker-entry-dev.sh
|
||||
RUN chmod +x /usr/local/bin/docker-entry-dev.sh
|
||||
|
||||
ENTRYPOINT ["/usr/local/bin/docker-entry-dev.sh"]
|
||||
CMD ["--help"]
|
||||
ENTRYPOINT ["/usr/local/bin/docker-entry.sh"]
|
||||
CMD ["pkgmgr", "--help"]
|
||||
|
||||
292
Makefile
292
Makefile
@@ -1,275 +1,79 @@
|
||||
.PHONY: install setup uninstall aur_builder_setup \
|
||||
test build build-no-cache test-unit test-e2e test-integration
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# Local Nix cache directories in the repo
|
||||
# ------------------------------------------------------------
|
||||
NIX_STORE_VOLUME := pkgmgr_nix_store
|
||||
NIX_CACHE_VOLUME := pkgmgr_nix_cache
|
||||
.PHONY: install setup uninstall \
|
||||
test build build-no-cache test-unit test-e2e test-integration \
|
||||
test-container
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# Distro list and base images
|
||||
# (kept for documentation/reference; actual build logic is in scripts/build)
|
||||
# ------------------------------------------------------------
|
||||
DISTROS := arch debian ubuntu fedora centos
|
||||
DISTROS := arch debian ubuntu fedora centos
|
||||
BASE_IMAGE_ARCH := archlinux:latest
|
||||
BASE_IMAGE_DEBIAN := debian:stable-slim
|
||||
BASE_IMAGE_UBUNTU := ubuntu:latest
|
||||
BASE_IMAGE_FEDORA := fedora:latest
|
||||
BASE_IMAGE_CENTOS := quay.io/centos/centos:stream9
|
||||
|
||||
BASE_IMAGE_arch := archlinux:latest
|
||||
BASE_IMAGE_debian := debian:stable-slim
|
||||
BASE_IMAGE_ubuntu := ubuntu:latest
|
||||
BASE_IMAGE_fedora := fedora:latest
|
||||
BASE_IMAGE_centos := quay.io/centos/centos:stream9
|
||||
# Make them available in scripts
|
||||
export DISTROS
|
||||
export BASE_IMAGE_ARCH
|
||||
export BASE_IMAGE_DEBIAN
|
||||
export BASE_IMAGE_UBUNTU
|
||||
export BASE_IMAGE_FEDORA
|
||||
export BASE_IMAGE_CENTOS
|
||||
|
||||
# Helper to echo which image is used for which distro (purely informational)
|
||||
define echo_build_info
|
||||
@echo "Building image for distro '$(1)' with base image '$(2)'..."
|
||||
endef
|
||||
# PYthon Unittest Pattern
|
||||
TEST_PATTERN := test_*.py
|
||||
export TEST_PATTERN
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# PKGMGR setup (wrapper)
|
||||
# PKGMGR setup (developer wrapper -> scripts/installation/main.sh)
|
||||
# ------------------------------------------------------------
|
||||
setup: install
|
||||
@echo "Running pkgmgr setup via main.py..."
|
||||
@if [ -x "$$HOME/.venvs/pkgmgr/bin/python" ]; then \
|
||||
echo "Using virtualenv Python at $$HOME/.venvs/pkgmgr/bin/python"; \
|
||||
"$$HOME/.venvs/pkgmgr/bin/python" main.py install; \
|
||||
else \
|
||||
echo "Virtualenv not found, falling back to system python3"; \
|
||||
python3 main.py install; \
|
||||
fi
|
||||
setup:
|
||||
@bash scripts/installation/main.sh
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# Docker build targets: build all images
|
||||
# Docker build targets (delegated to scripts/build)
|
||||
# ------------------------------------------------------------
|
||||
build-no-cache:
|
||||
@for distro in $(DISTROS); do \
|
||||
case "$$distro" in \
|
||||
arch) base_image="$(BASE_IMAGE_arch)" ;; \
|
||||
debian) base_image="$(BASE_IMAGE_debian)" ;; \
|
||||
ubuntu) base_image="$(BASE_IMAGE_ubuntu)" ;; \
|
||||
fedora) base_image="$(BASE_IMAGE_fedora)" ;; \
|
||||
centos) base_image="$(BASE_IMAGE_centos)" ;; \
|
||||
*) echo "Unknown distro '$$distro'" >&2; exit 1 ;; \
|
||||
esac; \
|
||||
echo "Building test image 'package-manager-test-$$distro' with no cache (BASE_IMAGE=$$base_image)..."; \
|
||||
docker build --no-cache \
|
||||
--build-arg BASE_IMAGE="$$base_image" \
|
||||
-t "package-manager-test-$$distro" . || exit $$?; \
|
||||
done
|
||||
@bash scripts/build/build-image-no-cache.sh
|
||||
|
||||
build:
|
||||
@for distro in $(DISTROS); do \
|
||||
case "$$distro" in \
|
||||
arch) base_image="$(BASE_IMAGE_arch)" ;; \
|
||||
debian) base_image="$(BASE_IMAGE_debian)" ;; \
|
||||
ubuntu) base_image="$(BASE_IMAGE_ubuntu)" ;; \
|
||||
fedora) base_image="$(BASE_IMAGE_fedora)" ;; \
|
||||
centos) base_image="$(BASE_IMAGE_centos)" ;; \
|
||||
*) echo "Unknown distro '$$distro'" >&2; exit 1 ;; \
|
||||
esac; \
|
||||
echo "Building test image 'package-manager-test-$$distro' (BASE_IMAGE=$$base_image)..."; \
|
||||
docker build \
|
||||
--build-arg BASE_IMAGE="$$base_image" \
|
||||
-t "package-manager-test-$$distro" . || exit $$?; \
|
||||
done
|
||||
|
||||
build-arch:
|
||||
@base_image="$(BASE_IMAGE_arch)"; \
|
||||
echo "Building test image 'package-manager-test-arch' (BASE_IMAGE=$$base_image)..."; \
|
||||
docker build \
|
||||
--build-arg BASE_IMAGE="$$base_image" \
|
||||
-t "package-manager-test-arch" . || exit $$?;
|
||||
@bash scripts/build/build-image.sh
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# Test targets
|
||||
# Test targets (delegated to scripts/test)
|
||||
# ------------------------------------------------------------
|
||||
|
||||
# Unit tests: only in Arch container (fastest feedback), via Nix devShell
|
||||
test-unit: build-arch
|
||||
@echo "============================================================"
|
||||
@echo ">>> Running UNIT tests in Arch container (via Nix devShell)"
|
||||
@echo "============================================================"
|
||||
docker run --rm \
|
||||
-v "$$(pwd):/src" \
|
||||
--workdir /src \
|
||||
--entrypoint bash \
|
||||
"package-manager-test-arch" \
|
||||
-c '\
|
||||
set -e; \
|
||||
if [ -f /etc/os-release ]; then . /etc/os-release; fi; \
|
||||
echo "Detected container distro: $${ID:-unknown} (like: $${ID_LIKE:-})"; \
|
||||
echo "Running Python unit tests (tests/unit) via nix develop..."; \
|
||||
git config --global --add safe.directory /src || true; \
|
||||
cd /src; \
|
||||
nix develop .#default --no-write-lock-file -c \
|
||||
python -m unittest discover \
|
||||
-s tests/unit \
|
||||
-t /src \
|
||||
-p "test_*.py"; \
|
||||
'
|
||||
test-unit: build-missing
|
||||
@bash scripts/test/test-unit.sh
|
||||
|
||||
# Integration tests: also in Arch container, via Nix devShell
|
||||
test-integration: build-arch
|
||||
@echo "============================================================"
|
||||
@echo ">>> Running INTEGRATION tests in Arch container (via Nix devShell)"
|
||||
@echo "============================================================"
|
||||
docker run --rm \
|
||||
-v "$$(pwd):/src" \
|
||||
--workdir /src \
|
||||
--entrypoint bash \
|
||||
"package-manager-test-arch" \
|
||||
-c '\
|
||||
set -e; \
|
||||
if [ -f /etc/os-release ]; then . /etc/os-release; fi; \
|
||||
echo "Detected container distro: $${ID:-unknown} (like: $${ID_LIKE:-})"; \
|
||||
echo "Running Python integration tests (tests/integration) via nix develop..."; \
|
||||
git config --global --add safe.directory /src || true; \
|
||||
cd /src; \
|
||||
nix develop .#default --no-write-lock-file -c \
|
||||
python -m unittest discover \
|
||||
-s tests/integration \
|
||||
-t /src \
|
||||
-p "test_*.py"; \
|
||||
'
|
||||
test-integration: build-missing
|
||||
@bash scripts/test/test-integration.sh
|
||||
|
||||
# End-to-end tests: run in all distros via Nix devShell (tests/e2e)
|
||||
test-e2e: build
|
||||
@echo "Ensuring Docker Nix volumes exist (auto-created if missing)..."
|
||||
@echo "Running E2E tests inside Nix devShell with cached store for all distros: $(DISTROS)"
|
||||
test-e2e: build-missing
|
||||
@bash scripts/test/test-e2e.sh
|
||||
|
||||
@for distro in $(DISTROS); do \
|
||||
echo "============================================================"; \
|
||||
echo ">>> Running E2E tests in container for distro: $$distro"; \
|
||||
echo "============================================================"; \
|
||||
# Only for Arch: mount /nix as volume, for others use image-installed Nix \
|
||||
if [ "$$distro" = "arch" ]; then \
|
||||
NIX_STORE_MOUNT="-v $(NIX_STORE_VOLUME):/nix"; \
|
||||
else \
|
||||
NIX_STORE_MOUNT=""; \
|
||||
fi; \
|
||||
docker run --rm \
|
||||
-v "$$(pwd):/src" \
|
||||
$$NIX_STORE_MOUNT \
|
||||
-v "$(NIX_CACHE_VOLUME):/root/.cache/nix" \
|
||||
--workdir /src \
|
||||
--entrypoint bash \
|
||||
"package-manager-test-$$distro" \
|
||||
-c '\
|
||||
set -e; \
|
||||
if [ -f /etc/os-release ]; then . /etc/os-release; fi; \
|
||||
echo "Detected container distro: $${ID:-unknown} (like: $${ID_LIKE:-})"; \
|
||||
echo "Preparing Nix environment..."; \
|
||||
if [ -f "/nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh" ]; then \
|
||||
. "/nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh"; \
|
||||
fi; \
|
||||
if [ -f "$$HOME/.nix-profile/etc/profile.d/nix.sh" ]; then \
|
||||
. "$$HOME/.nix-profile/etc/profile.d/nix.sh"; \
|
||||
fi; \
|
||||
PATH="/nix/var/nix/profiles/default/bin:$$HOME/.nix-profile/bin:$$PATH"; \
|
||||
export PATH; \
|
||||
echo "PATH is now:"; \
|
||||
echo "$$PATH"; \
|
||||
NIX_CMD=""; \
|
||||
if command -v nix >/dev/null 2>&1; then \
|
||||
echo "Found nix on PATH:"; \
|
||||
command -v nix; \
|
||||
NIX_CMD="nix"; \
|
||||
else \
|
||||
echo "nix not found on PATH, scanning /nix/store for a nix binary..."; \
|
||||
for path in /nix/store/*-nix-*/bin/nix; do \
|
||||
if [ -x "$$path" ]; then \
|
||||
echo "Found nix binary at $$path"; \
|
||||
NIX_CMD="$$path"; \
|
||||
break; \
|
||||
fi; \
|
||||
done; \
|
||||
fi; \
|
||||
if [ -z "$$NIX_CMD" ]; then \
|
||||
echo "ERROR: nix binary not found anywhere – cannot run devShell"; \
|
||||
exit 1; \
|
||||
fi; \
|
||||
echo "Using Nix command: $$NIX_CMD"; \
|
||||
echo "Run E2E tests inside Nix devShell (tests/e2e)..."; \
|
||||
git config --global --add safe.directory /src || true; \
|
||||
cd /src; \
|
||||
"$$NIX_CMD" develop .#default --no-write-lock-file -c \
|
||||
python3 -m unittest discover \
|
||||
-s /src/tests/e2e \
|
||||
-p "test_*.py"; \
|
||||
' || exit $$?; \
|
||||
done
|
||||
|
||||
# Combined test target for local + CI (unit + e2e + integration)
|
||||
test: build test-unit test-e2e test-integration
|
||||
test-container: build-missing
|
||||
@bash scripts/test/test-container.sh
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# Installer for host systems (original logic)
|
||||
# Build only missing container images
|
||||
# ------------------------------------------------------------
|
||||
build-missing:
|
||||
@bash scripts/build/build-image-missing.sh
|
||||
|
||||
# Combined test target for local + CI (unit + integration + e2e)
|
||||
test: test-container test-unit test-integration test-e2e
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# System install (native packages, calls scripts/installation/run-package.sh)
|
||||
# ------------------------------------------------------------
|
||||
install:
|
||||
@if [ -n "$$IN_NIX_SHELL" ]; then \
|
||||
echo "Nix shell detected (IN_NIX_SHELL=1). Skipping venv/pip install – handled by Nix flake."; \
|
||||
else \
|
||||
echo "Making 'main.py' executable..."; \
|
||||
chmod +x main.py; \
|
||||
echo "Checking if global user virtual environment exists..."; \
|
||||
mkdir -p "$$HOME/.venvs"; \
|
||||
if [ ! -d "$$HOME/.venvs/pkgmgr" ]; then \
|
||||
echo "Creating global venv at $$HOME/.venvs/pkgmgr..."; \
|
||||
python3 -m venv "$$HOME/.venvs/pkgmgr"; \
|
||||
fi; \
|
||||
echo "Installing required Python packages into $$HOME/.venvs/pkgmgr..."; \
|
||||
"$$HOME/.venvs/pkgmgr/bin/python" -m ensurepip --upgrade; \
|
||||
"$$HOME/.venvs/pkgmgr/bin/pip" install --upgrade pip setuptools wheel; \
|
||||
echo "Looking for requirements.txt / _requirements.txt..."; \
|
||||
if [ -f requirements.txt ]; then \
|
||||
echo "Installing Python packages from requirements.txt..."; \
|
||||
"$$HOME/.venvs/pkgmgr/bin/pip" install -r requirements.txt; \
|
||||
elif [ -f _requirements.txt ]; then \
|
||||
echo "Installing Python packages from _requirements.txt..."; \
|
||||
"$$HOME/.venvs/pkgmgr/bin/pip" install -r _requirements.txt; \
|
||||
else \
|
||||
echo "No requirements.txt or _requirements.txt found, skipping dependency installation."; \
|
||||
fi; \
|
||||
echo "Ensuring $$HOME/.bashrc and $$HOME/.zshrc exist..."; \
|
||||
touch "$$HOME/.bashrc" "$$HOME/.zshrc"; \
|
||||
echo "Ensuring automatic activation of $$HOME/.venvs/pkgmgr for this user..."; \
|
||||
for rc in "$$HOME/.bashrc" "$$HOME/.zshrc"; do \
|
||||
rc_line='if [ -d "$${HOME}/.venvs/pkgmgr" ]; then . "$${HOME}/.venvs/pkgmgr/bin/activate"; if [ -n "$${PS1:-}" ]; then echo "Global Python virtual environment '\''~/.venvs/pkgmgr'\'' activated."; fi; fi'; \
|
||||
grep -qxF "$${rc_line}" "$$rc" || echo "$${rc_line}" >> "$$rc"; \
|
||||
done; \
|
||||
echo "Arch/Manjaro detection and optional AUR setup..."; \
|
||||
if command -v pacman >/dev/null 2>&1; then \
|
||||
$(MAKE) aur_builder_setup; \
|
||||
else \
|
||||
echo "Not Arch-based (no pacman). Skipping aur_builder/yay setup."; \
|
||||
fi; \
|
||||
echo "Installation complete. Please restart your shell (or 'exec bash' or 'exec zsh') for the changes to take effect."; \
|
||||
fi
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# AUR builder setup — only on Arch/Manjaro
|
||||
# ------------------------------------------------------------
|
||||
aur_builder_setup:
|
||||
@echo "Setting up aur_builder and yay (Arch/Manjaro)..."
|
||||
@sudo pacman -Syu --noconfirm
|
||||
@sudo pacman -S --needed --noconfirm base-devel git sudo
|
||||
@if ! getent group aur_builder >/dev/null; then sudo groupadd -r aur_builder; fi
|
||||
@if ! id -u aur_builder >/dev/null 2>&1; then sudo useradd -m -r -g aur_builder -s /bin/bash aur_builder; fi
|
||||
@echo '%aur_builder ALL=(ALL) NOPASSWD: /usr/bin/pacman' | sudo tee /etc/sudoers.d/aur_builder >/dev/null
|
||||
@sudo chmod 0440 /etc/sudoers.d/aur_builder
|
||||
@if ! sudo -u aur_builder bash -lc 'command -v yay >/dev/null'; then \
|
||||
sudo -u aur_builder bash -lc 'cd ~ && rm -rf yay && git clone https://aur.archlinux.org/yay.git && cd yay && makepkg -si --noconfirm'; \
|
||||
else \
|
||||
echo "yay already installed."; \
|
||||
fi
|
||||
@echo "aur_builder/yay setup complete."
|
||||
@echo "Building and installing distro-native package-manager for this system..."
|
||||
@bash scripts/installation/run-package.sh
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# Uninstall target
|
||||
# ------------------------------------------------------------
|
||||
uninstall:
|
||||
@echo "Removing global user virtual environment if it exists..."
|
||||
@rm -rf "$$HOME/.venvs/pkgmgr"
|
||||
@echo "Cleaning up $$HOME/.bashrc and $$HOME/.zshrc entries..."
|
||||
@for rc in "$$HOME/.bashrc" "$$HOME/.zshrc"; do \
|
||||
sed -i '/\.venvs\/pkgmgr\/bin\/activate"; if \[ -n "\$${PS1:-}" \]; then echo "Global Python virtual environment '\''~\/\.venvs\/pkgmgr'\'' activated."; fi; fi/d' "$$rc"; \
|
||||
done
|
||||
@echo "Uninstallation complete. Please restart your shell (or 'exec bash' or 'exec zsh') for the changes to fully apply."
|
||||
@bash scripts/uninstall.sh
|
||||
|
||||
2
PKGBUILD
2
PKGBUILD
@@ -1,7 +1,7 @@
|
||||
# Maintainer: Kevin Veen-Birkenbach <info@veen.world>
|
||||
|
||||
pkgname=package-manager
|
||||
pkgver=0.5.1
|
||||
pkgver=0.9.1
|
||||
pkgrel=1
|
||||
pkgdesc="Local-flake wrapper for Kevin's package-manager (Nix-based)."
|
||||
arch=('any')
|
||||
|
||||
64
README.md
64
README.md
@@ -24,6 +24,15 @@
|
||||
- **Custom Aliases:**
|
||||
Generate and manage custom aliases for easy command invocation.
|
||||
|
||||
## Architecture & Setup Map 🗺️
|
||||
|
||||
The following diagram provides a full overview of PKGMGR’s package structure,
|
||||
installation layers, and setup controller flow:
|
||||
|
||||

|
||||
|
||||
**Diagram status:** *Stand: 10. Dezember 2025*
|
||||
**Always-up-to-date version:** https://s.veen.world/pkgmgrmp
|
||||
|
||||
## Installation ⚙️
|
||||
|
||||
@@ -51,55 +60,6 @@ The `make setup` command will:
|
||||
- Install required packages from `requirements.txt`.
|
||||
- Execute `python main.py install` to complete the installation.
|
||||
|
||||
## Docker Quickstart 🐳
|
||||
|
||||
Alternatively to installing locally, you can use Docker: build the image with
|
||||
|
||||
```bash
|
||||
docker build --no-cache -t pkgmgr .
|
||||
```
|
||||
|
||||
or alternativ pull it via
|
||||
|
||||
```bash
|
||||
docker pull kevinveenbirkenbach/pkgmgr:latest
|
||||
```
|
||||
|
||||
and then run
|
||||
|
||||
```bash
|
||||
docker run --rm pkgmgr --help
|
||||
```
|
||||
|
||||
## Usage 📖
|
||||
|
||||
Run the script with different commands. For example:
|
||||
|
||||
- **Install all packages:**
|
||||
```bash
|
||||
pkgmgr install --all
|
||||
```
|
||||
- **Pull updates for a specific repository:**
|
||||
```bash
|
||||
pkgmgr pull pkgmgr
|
||||
```
|
||||
- **Commit changes with extra Git parameters:**
|
||||
```bash
|
||||
pkgmgr commit pkgmgr -- -m "Your commit message"
|
||||
```
|
||||
- **List all configured packages:**
|
||||
```bash
|
||||
pkgmgr config show
|
||||
```
|
||||
- **Manage configuration:**
|
||||
```bash
|
||||
pkgmgr config init
|
||||
pkgmgr config add
|
||||
pkgmgr config edit
|
||||
pkgmgr config delete <identifier>
|
||||
pkgmgr config ignore <identifier> --set true
|
||||
```
|
||||
|
||||
## License 📄
|
||||
|
||||
This project is licensed under the MIT License.
|
||||
@@ -108,9 +68,3 @@ This project is licensed under the MIT License.
|
||||
|
||||
Kevin Veen-Birkenbach
|
||||
[https://www.veen.world](https://www.veen.world)
|
||||
|
||||
---
|
||||
|
||||
**Repository:** [github.com/kevinveenbirkenbach/package-manager](https://github.com/kevinveenbirkenbach/package-manager)
|
||||
|
||||
*Created with AI 🤖 - [View conversation](https://chatgpt.com/share/67c728c4-92d0-800f-8945-003fa9bf27c6)*
|
||||
|
||||
BIN
assets/map.png
Normal file
BIN
assets/map.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 1.9 MiB |
@@ -380,17 +380,6 @@ repositories:
|
||||
- 44D8F11FD62F878E
|
||||
- B5690EEEBB952194
|
||||
|
||||
- account: kevinveenbirkenbach
|
||||
alias: infinito-presentation
|
||||
description: This repository contains a Infinito.Nexus presentation designed for customers, end-users, investors, developers, and administrators, offering tailored content and insights for each group.
|
||||
homepage: https://github.com/kevinveenbirkenbach/infinito-presentation
|
||||
provider: github.com
|
||||
repository: infinito-presentation
|
||||
verified:
|
||||
gpg_keys:
|
||||
- 44D8F11FD62F878E
|
||||
- B5690EEEBB952194
|
||||
|
||||
- account: kevinveenbirkenbach
|
||||
description: A lightweight Python utility to generate dynamic color schemes from a single base color. Provides HSL-based color transformations for theming, UI design, and CSS variable generation. Optimized for integration in Python projects, Flask applications, and Ansible roles.
|
||||
homepage: https://github.com/kevinveenbirkenbach/colorscheme-generator
|
||||
@@ -599,17 +588,6 @@ repositories:
|
||||
- 44D8F11FD62F878E
|
||||
- B5690EEEBB952194
|
||||
|
||||
- account: kevinveenbirkenbach
|
||||
desciption: Infinito Inventory Builder — a containerized web application that dynamically generates Ansible inventory files from invokable Infinito.Nexus roles through an interactive, browser-based interface.
|
||||
homepage: https://github.com/kevinveenbirkenbach/infinito-inventory-builder
|
||||
alias: invbuild
|
||||
provider: github.com
|
||||
repository: infinito-inventory-builder
|
||||
verified:
|
||||
gpg_keys:
|
||||
- 44D8F11FD62F878E
|
||||
- B5690EEEBB952194
|
||||
|
||||
- account: kevinveenbirkenbach
|
||||
desciption: A simple Python CLI tool to safely rename Linux user accounts using usermod — including home directory migration and validation checks.
|
||||
homepage: https://github.com/kevinveenbirkenbach/user-rename
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
- account: kevinveenbirkenbach
|
||||
alias: gkfdrtdtcntr
|
||||
provider: github.com
|
||||
repository: federated-to-central-social-network-bridge
|
||||
verified:
|
||||
gpg_keys:
|
||||
- 44D8F11FD62F878E
|
||||
124
debian/changelog
vendored
124
debian/changelog
vendored
@@ -1,3 +1,127 @@
|
||||
package-manager (0.9.1-1) unstable; urgency=medium
|
||||
|
||||
* * Refactored installer: new `venv-create.sh`, cleaner root/user setup flow, updated README with architecture map.
|
||||
* Split virgin tests into root/user workflows; stabilized Nix installer across distros; improved test scripts with dynamic distro selection and isolated Nix stores.
|
||||
* Fixed repository directory resolution; improved `pkgmgr path` and `pkgmgr shell`; added full unit/E2E coverage.
|
||||
* Removed deprecated files and updated `.gitignore`.
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Wed, 10 Dec 2025 22:56:01 +0100
|
||||
|
||||
package-manager (0.9.0-1) unstable; urgency=medium
|
||||
|
||||
* Introduce a virgin Arch-based Nix flake E2E workflow that validates pkgmgr’s full flake installation path using shared caches for faster and reproducible CI runs.
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Wed, 10 Dec 2025 18:38:07 +0100
|
||||
|
||||
package-manager (0.8.0-1) unstable; urgency=medium
|
||||
|
||||
* **v0.7.15 — Installer & Command Resolution Improvements**
|
||||
|
||||
* Introduced a unified **layer-based installer pipeline** with clear precedence (OS-packages, Nix, Python, Makefile).
|
||||
* Reworked installer structure and improved Python/Nix/Makefile installers, including isolated Python venvs and refined flake-output handling.
|
||||
* Fully rewrote **command resolution** with stronger typing, safer fallbacks, and explicit support for `command: null` to mark library-only repositories.
|
||||
* Added extensive **unit and integration tests** for installer capability ordering, command resolution, and Nix/Python installer behavior.
|
||||
* Expanded documentation with capability hierarchy diagrams and scenario matrices.
|
||||
* Removed deprecated repository entries and obsolete configuration files.
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Wed, 10 Dec 2025 17:31:57 +0100
|
||||
|
||||
package-manager (0.7.14-1) unstable; urgency=medium
|
||||
|
||||
* Fixed the clone-all integration test so that `SystemExit(0)` from the proxy is treated as a successful command instead of a failure.
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Wed, 10 Dec 2025 10:38:33 +0100
|
||||
|
||||
package-manager (0.7.13-1) unstable; urgency=medium
|
||||
|
||||
* Automated release.
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Wed, 10 Dec 2025 10:27:24 +0100
|
||||
|
||||
package-manager (0.7.12-1) unstable; urgency=medium
|
||||
|
||||
* Fixed self refering alias during setup
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 09 Dec 2025 23:36:35 +0100
|
||||
|
||||
package-manager (0.7.11-1) unstable; urgency=medium
|
||||
|
||||
* test: fix installer unit tests for OS packages and Nix dev shell
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 09 Dec 2025 23:16:46 +0100
|
||||
|
||||
package-manager (0.7.10-1) unstable; urgency=medium
|
||||
|
||||
* Fixed test_install_pkgmgr_shallow.py
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 09 Dec 2025 22:57:08 +0100
|
||||
|
||||
package-manager (0.7.9-1) unstable; urgency=medium
|
||||
|
||||
* 'main' and 'master' are now both accepted as branches for branch close merge
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 09 Dec 2025 21:19:13 +0100
|
||||
|
||||
package-manager (0.7.8-1) unstable; urgency=medium
|
||||
|
||||
* Missing pyproject.toml doesn't lead to an error during release
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 09 Dec 2025 21:03:24 +0100
|
||||
|
||||
package-manager (0.7.7-1) unstable; urgency=medium
|
||||
|
||||
* Added TEST_PATTERN parameter to execute dedicated tests
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 09 Dec 2025 17:54:38 +0100
|
||||
|
||||
package-manager (0.7.6-1) unstable; urgency=medium
|
||||
|
||||
* Fixed pull --preview bug in e2e test
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 09 Dec 2025 17:14:19 +0100
|
||||
|
||||
package-manager (0.7.5-1) unstable; urgency=medium
|
||||
|
||||
* Fixed wrong directory permissions for nix
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 09 Dec 2025 16:45:42 +0100
|
||||
|
||||
package-manager (0.7.4-1) unstable; urgency=medium
|
||||
|
||||
* Fixed missing build in test workflow -> Tests pass now
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 09 Dec 2025 16:22:00 +0100
|
||||
|
||||
package-manager (0.7.3-1) unstable; urgency=medium
|
||||
|
||||
* Fixed bug: Ignored packages are now ignored
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 09 Dec 2025 16:08:31 +0100
|
||||
|
||||
package-manager (0.7.2-1) unstable; urgency=medium
|
||||
|
||||
* Implemented Changelog Support for Fedora and Debian
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 09 Dec 2025 15:48:58 +0100
|
||||
|
||||
package-manager (0.7.1-1) unstable; urgency=medium
|
||||
|
||||
* Fix floating 'latest' tag logic: dereference annotated target (vX.Y.Z^{}), add tag message to avoid Git errors, ensure best-effort update without blocking releases, and update unit tests (see ChatGPT conversation: https://chatgpt.com/share/69383024-efa4-800f-a875-129b81fa40ff).
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 09 Dec 2025 15:26:54 +0100
|
||||
|
||||
package-manager (0.7.0-1) unstable; urgency=medium
|
||||
|
||||
* Add Git helpers for branch sync and floating 'latest' tag in the release workflow, ensure main/master are updated from origin before tagging, and extend unit/e2e tests including 'pkgmgr release --help' coverage (see ChatGPT conversation: https://chatgpt.com/share/69383024-efa4-800f-a875-129b81fa40ff)
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 09 Dec 2025 15:21:03 +0100
|
||||
|
||||
package-manager (0.6.0-1) unstable; urgency=medium
|
||||
|
||||
* Expose DISTROS and BASE_IMAGE_* variables as exported Makefile environment variables so all build and test commands can consume them dynamically. By exporting these values, every Make target (e.g., build, build-no-cache, build-missing, test-container, test-unit, test-e2e) and every delegated script in scripts/build/ and scripts/test/ now receives a consistent view of the supported distributions and their base container images. This change removes duplicated definitions across scripts, ensures reproducible builds, and allows build tooling to react automatically when new distros or base images are added to the Makefile.
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 09 Dec 2025 05:59:58 +0100
|
||||
|
||||
package-manager (0.5.1-1) unstable; urgency=medium
|
||||
|
||||
* Refine pkgmgr release CLI close wiring and integration tests for --close flag (ChatGPT: https://chatgpt.com/share/69376b4e-8440-800f-9d06-535ec1d7a40e)
|
||||
|
||||
12
flake.nix
12
flake.nix
@@ -31,7 +31,7 @@
|
||||
rec {
|
||||
pkgmgr = pyPkgs.buildPythonApplication {
|
||||
pname = "package-manager";
|
||||
version = "0.5.1";
|
||||
version = "0.9.1";
|
||||
|
||||
# Use the git repo as source
|
||||
src = ./.;
|
||||
@@ -48,9 +48,7 @@
|
||||
# Runtime dependencies (matches [project.dependencies])
|
||||
propagatedBuildInputs = [
|
||||
pyPkgs.pyyaml
|
||||
# Add more here if needed, e.g.:
|
||||
# pyPkgs.click
|
||||
# pyPkgs.rich
|
||||
pyPkgs.pip
|
||||
];
|
||||
|
||||
doCheck = false;
|
||||
@@ -72,10 +70,16 @@
|
||||
ansiblePkg =
|
||||
if pkgs ? ansible-core then pkgs.ansible-core
|
||||
else pkgs.ansible;
|
||||
|
||||
# Python 3 + pip für alles, was "python3 -m pip" macht
|
||||
pythonWithPip = pkgs.python3.withPackages (ps: [
|
||||
ps.pip
|
||||
]);
|
||||
in
|
||||
{
|
||||
default = pkgs.mkShell {
|
||||
buildInputs = [
|
||||
pythonWithPip
|
||||
pkgmgrPkg
|
||||
pkgs.git
|
||||
ansiblePkg
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
Name: package-manager
|
||||
Version: 0.5.1
|
||||
Version: 0.9.1
|
||||
Release: 1%{?dist}
|
||||
Summary: Wrapper that runs Kevin's package-manager via Nix flake
|
||||
|
||||
@@ -35,35 +35,36 @@ available on the system.
|
||||
%install
|
||||
rm -rf %{buildroot}
|
||||
install -d %{buildroot}%{_bindir}
|
||||
install -d %{buildroot}%{_libdir}/package-manager
|
||||
# Install project tree into a fixed, architecture-independent location.
|
||||
install -d %{buildroot}/usr/lib/package-manager
|
||||
|
||||
# Copy full project source into /usr/lib/package-manager
|
||||
cp -a . %{buildroot}%{_libdir}/package-manager/
|
||||
cp -a . %{buildroot}/usr/lib/package-manager/
|
||||
|
||||
# Wrapper
|
||||
install -m0755 scripts/pkgmgr-wrapper.sh %{buildroot}%{_bindir}/pkgmgr
|
||||
|
||||
# Shared Nix init script (ensure it is executable in the installed tree)
|
||||
install -m0755 scripts/init-nix.sh %{buildroot}%{_libdir}/package-manager/init-nix.sh
|
||||
install -m0755 scripts/init-nix.sh %{buildroot}/usr/lib/package-manager/init-nix.sh
|
||||
|
||||
# Remove packaging-only and development artefacts from the installed tree
|
||||
rm -rf \
|
||||
%{buildroot}%{_libdir}/package-manager/PKGBUILD \
|
||||
%{buildroot}%{_libdir}/package-manager/Dockerfile \
|
||||
%{buildroot}%{_libdir}/package-manager/debian \
|
||||
%{buildroot}%{_libdir}/package-manager/.git \
|
||||
%{buildroot}%{_libdir}/package-manager/.github \
|
||||
%{buildroot}%{_libdir}/package-manager/tests \
|
||||
%{buildroot}%{_libdir}/package-manager/.gitignore \
|
||||
%{buildroot}%{_libdir}/package-manager/__pycache__ \
|
||||
%{buildroot}%{_libdir}/package-manager/.gitkeep || true
|
||||
%{buildroot}/usr/lib/package-manager/PKGBUILD \
|
||||
%{buildroot}/usr/lib/package-manager/Dockerfile \
|
||||
%{buildroot}/usr/lib/package-manager/debian \
|
||||
%{buildroot}/usr/lib/package-manager/.git \
|
||||
%{buildroot}/usr/lib/package-manager/.github \
|
||||
%{buildroot}/usr/lib/package-manager/tests \
|
||||
%{buildroot}/usr/lib/package-manager/.gitignore \
|
||||
%{buildroot}/usr/lib/package-manager/__pycache__ \
|
||||
%{buildroot}/usr/lib/package-manager/.gitkeep || true
|
||||
|
||||
%post
|
||||
# Initialize Nix (if needed) after installing the package-manager files.
|
||||
if [ -x %{_libdir}/package-manager/init-nix.sh ]; then
|
||||
%{_libdir}/package-manager/init-nix.sh || true
|
||||
if [ -x /usr/lib/package-manager/init-nix.sh ]; then
|
||||
/usr/lib/package-manager/init-nix.sh || true
|
||||
else
|
||||
echo ">>> Warning: %{_libdir}/package-manager/init-nix.sh not found or not executable."
|
||||
echo ">>> Warning: /usr/lib/package-manager/init-nix.sh not found or not executable."
|
||||
fi
|
||||
|
||||
%postun
|
||||
@@ -73,8 +74,66 @@ echo ">>> package-manager removed. Nix itself was not removed."
|
||||
%doc README.md
|
||||
%license LICENSE
|
||||
%{_bindir}/pkgmgr
|
||||
%{_libdir}/package-manager/
|
||||
/usr/lib/package-manager/
|
||||
|
||||
%changelog
|
||||
* Wed Dec 10 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 0.9.1-1
|
||||
- * Refactored installer: new `venv-create.sh`, cleaner root/user setup flow, updated README with architecture map.
|
||||
* Split virgin tests into root/user workflows; stabilized Nix installer across distros; improved test scripts with dynamic distro selection and isolated Nix stores.
|
||||
* Fixed repository directory resolution; improved `pkgmgr path` and `pkgmgr shell`; added full unit/E2E coverage.
|
||||
* Removed deprecated files and updated `.gitignore`.
|
||||
|
||||
* Wed Dec 10 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 0.9.0-1
|
||||
- Introduce a virgin Arch-based Nix flake E2E workflow that validates pkgmgr’s full flake installation path using shared caches for faster and reproducible CI runs.
|
||||
|
||||
* Wed Dec 10 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 0.8.0-1
|
||||
- **v0.7.15 — Installer & Command Resolution Improvements**
|
||||
|
||||
* Introduced a unified **layer-based installer pipeline** with clear precedence (OS-packages, Nix, Python, Makefile).
|
||||
* Reworked installer structure and improved Python/Nix/Makefile installers, including isolated Python venvs and refined flake-output handling.
|
||||
* Fully rewrote **command resolution** with stronger typing, safer fallbacks, and explicit support for `command: null` to mark library-only repositories.
|
||||
* Added extensive **unit and integration tests** for installer capability ordering, command resolution, and Nix/Python installer behavior.
|
||||
* Expanded documentation with capability hierarchy diagrams and scenario matrices.
|
||||
* Removed deprecated repository entries and obsolete configuration files.
|
||||
|
||||
* Wed Dec 10 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 0.7.14-1
|
||||
- Fixed the clone-all integration test so that `SystemExit(0)` from the proxy is treated as a successful command instead of a failure.
|
||||
|
||||
* Wed Dec 10 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 0.7.13-1
|
||||
- Automated release.
|
||||
|
||||
* Tue Dec 09 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 0.7.12-1
|
||||
- Fixed self refering alias during setup
|
||||
|
||||
* Tue Dec 09 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 0.7.11-1
|
||||
- test: fix installer unit tests for OS packages and Nix dev shell
|
||||
|
||||
* Tue Dec 09 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 0.7.10-1
|
||||
- Fixed test_install_pkgmgr_shallow.py
|
||||
|
||||
* Tue Dec 09 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 0.7.9-1
|
||||
- 'main' and 'master' are now both accepted as branches for branch close merge
|
||||
|
||||
* Tue Dec 09 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 0.7.8-1
|
||||
- Missing pyproject.toml doesn't lead to an error during release
|
||||
|
||||
* Tue Dec 09 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 0.7.7-1
|
||||
- Added TEST_PATTERN parameter to execute dedicated tests
|
||||
|
||||
* Tue Dec 09 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 0.7.6-1
|
||||
- Fixed pull --preview bug in e2e test
|
||||
|
||||
* Tue Dec 09 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 0.7.5-1
|
||||
- Fixed wrong directory permissions for nix
|
||||
|
||||
* Tue Dec 09 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 0.7.4-1
|
||||
- Fixed missing build in test workflow -> Tests pass now
|
||||
|
||||
* Tue Dec 09 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 0.7.3-1
|
||||
- Fixed bug: Ignored packages are now ignored
|
||||
|
||||
* Tue Dec 09 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 0.7.2-1
|
||||
- Implemented Changelog Support for Fedora and Debian
|
||||
|
||||
* Sat Dec 06 2025 Kevin Veen-Birkenbach <info@veen.world> - 0.1.1-1
|
||||
- Initial RPM packaging for package-manager
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
version: 1
|
||||
|
||||
author: "Kevin Veen-Birkenbach"
|
||||
url: "https://github.com/kevinveenbirkenbach/package-manager"
|
||||
description: "A configurable Python-based package manager for managing multiple repositories via Bash."
|
||||
|
||||
dependencies: []
|
||||
@@ -1,4 +1,4 @@
|
||||
# pkgmgr/branch_commands.py
|
||||
# pkgmgr/actions/branch/__init__.py
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
@@ -6,40 +6,53 @@
|
||||
High-level helpers for branch-related operations.
|
||||
|
||||
This module encapsulates the actual Git logic so the CLI layer
|
||||
(pkgmgr.cli_core.commands.branch) stays thin and testable.
|
||||
(pkgmgr.cli.commands.branch) stays thin and testable.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from pkgmgr.git_utils import run_git, GitError, get_current_branch
|
||||
from pkgmgr.core.git import run_git, GitError, get_current_branch
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Branch creation (open)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def open_branch(
|
||||
name: Optional[str],
|
||||
base_branch: str = "main",
|
||||
fallback_base: str = "master",
|
||||
cwd: str = ".",
|
||||
) -> None:
|
||||
"""
|
||||
Create and push a new feature branch on top of `base_branch`.
|
||||
Create and push a new feature branch on top of a base branch.
|
||||
|
||||
The base branch is resolved by:
|
||||
1. Trying 'base_branch' (default: 'main')
|
||||
2. Falling back to 'fallback_base' (default: 'master')
|
||||
|
||||
Steps:
|
||||
1) git fetch origin
|
||||
2) git checkout <base_branch>
|
||||
3) git pull origin <base_branch>
|
||||
4) git checkout -b <name>
|
||||
5) git push -u origin <name>
|
||||
1) git fetch origin
|
||||
2) git checkout <resolved_base>
|
||||
3) git pull origin <resolved_base>
|
||||
4) git checkout -b <name>
|
||||
5) git push -u origin <name>
|
||||
|
||||
If `name` is None or empty, the user is prompted on stdin.
|
||||
If `name` is None or empty, the user is prompted to enter one.
|
||||
"""
|
||||
|
||||
# Request name interactively if not provided
|
||||
if not name:
|
||||
name = input("Enter new branch name: ").strip()
|
||||
|
||||
if not name:
|
||||
raise RuntimeError("Branch name must not be empty.")
|
||||
|
||||
# Resolve which base branch to use (main or master)
|
||||
resolved_base = _resolve_base_branch(base_branch, fallback_base, cwd=cwd)
|
||||
|
||||
# 1) Fetch from origin
|
||||
try:
|
||||
run_git(["fetch", "origin"], cwd=cwd)
|
||||
@@ -50,18 +63,18 @@ def open_branch(
|
||||
|
||||
# 2) Checkout base branch
|
||||
try:
|
||||
run_git(["checkout", base_branch], cwd=cwd)
|
||||
run_git(["checkout", resolved_base], cwd=cwd)
|
||||
except GitError as exc:
|
||||
raise RuntimeError(
|
||||
f"Failed to checkout base branch {base_branch!r}: {exc}"
|
||||
f"Failed to checkout base branch {resolved_base!r}: {exc}"
|
||||
) from exc
|
||||
|
||||
# 3) Pull latest changes on base
|
||||
# 3) Pull latest changes for base branch
|
||||
try:
|
||||
run_git(["pull", "origin", base_branch], cwd=cwd)
|
||||
run_git(["pull", "origin", resolved_base], cwd=cwd)
|
||||
except GitError as exc:
|
||||
raise RuntimeError(
|
||||
f"Failed to pull latest changes for base branch {base_branch!r}: {exc}"
|
||||
f"Failed to pull latest changes for base branch {resolved_base!r}: {exc}"
|
||||
) from exc
|
||||
|
||||
# 4) Create new branch
|
||||
@@ -69,10 +82,10 @@ def open_branch(
|
||||
run_git(["checkout", "-b", name], cwd=cwd)
|
||||
except GitError as exc:
|
||||
raise RuntimeError(
|
||||
f"Failed to create new branch {name!r} from base {base_branch!r}: {exc}"
|
||||
f"Failed to create new branch {name!r} from base {resolved_base!r}: {exc}"
|
||||
) from exc
|
||||
|
||||
# 5) Push and set upstream
|
||||
# 5) Push new branch to origin
|
||||
try:
|
||||
run_git(["push", "-u", "origin", name], cwd=cwd)
|
||||
except GitError as exc:
|
||||
@@ -81,15 +94,21 @@ def open_branch(
|
||||
) from exc
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Base branch resolver (shared by open/close)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _resolve_base_branch(
|
||||
preferred: str,
|
||||
fallback: str,
|
||||
cwd: str,
|
||||
) -> str:
|
||||
"""
|
||||
Resolve the base branch to use for merging.
|
||||
Resolve the base branch to use.
|
||||
|
||||
Try `preferred` first (default: main),
|
||||
fall back to `fallback` (default: master).
|
||||
|
||||
Try `preferred` (default: main) first, then `fallback` (default: master).
|
||||
Raise RuntimeError if neither exists.
|
||||
"""
|
||||
for candidate in (preferred, fallback):
|
||||
@@ -104,6 +123,10 @@ def _resolve_base_branch(
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Branch closing (merge + deletion)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def close_branch(
|
||||
name: Optional[str],
|
||||
base_branch: str = "main",
|
||||
@@ -111,23 +134,22 @@ def close_branch(
|
||||
cwd: str = ".",
|
||||
) -> None:
|
||||
"""
|
||||
Merge a feature branch into the main/master branch and optionally delete it.
|
||||
Merge a feature branch into the base branch and delete it afterwards.
|
||||
|
||||
Steps:
|
||||
1) Determine branch name (argument or current branch)
|
||||
2) Resolve base branch (prefers `base_branch`, falls back to `fallback_base`)
|
||||
3) Ask for confirmation (y/N)
|
||||
4) git fetch origin
|
||||
5) git checkout <base>
|
||||
6) git pull origin <base>
|
||||
7) git merge --no-ff <name>
|
||||
8) git push origin <base>
|
||||
9) Delete branch locally and on origin
|
||||
|
||||
If the user does not confirm with 'y', the operation is aborted.
|
||||
1) Determine the branch name (argument or current branch)
|
||||
2) Resolve base branch (main/master)
|
||||
3) Ask for confirmation
|
||||
4) git fetch origin
|
||||
5) git checkout <base>
|
||||
6) git pull origin <base>
|
||||
7) git merge --no-ff <name>
|
||||
8) git push origin <base>
|
||||
9) Delete branch locally
|
||||
10) Delete branch on origin (best effort)
|
||||
"""
|
||||
|
||||
# 1) Determine which branch to close
|
||||
# 1) Determine which branch should be closed
|
||||
if not name:
|
||||
try:
|
||||
name = get_current_branch(cwd=cwd)
|
||||
@@ -137,7 +159,7 @@ def close_branch(
|
||||
if not name:
|
||||
raise RuntimeError("Branch name must not be empty.")
|
||||
|
||||
# 2) Resolve base branch (main/master)
|
||||
# 2) Resolve base branch
|
||||
target_base = _resolve_base_branch(base_branch, fallback_base, cwd=cwd)
|
||||
|
||||
if name == target_base:
|
||||
@@ -146,7 +168,7 @@ def close_branch(
|
||||
"Please specify a feature branch."
|
||||
)
|
||||
|
||||
# 3) Confirmation prompt
|
||||
# 3) Ask user for confirmation
|
||||
prompt = (
|
||||
f"Merge branch '{name}' into '{target_base}' and delete it afterwards? "
|
||||
"(y/N): "
|
||||
@@ -164,7 +186,7 @@ def close_branch(
|
||||
f"Failed to fetch from origin before closing branch {name!r}: {exc}"
|
||||
) from exc
|
||||
|
||||
# 5) Checkout base branch
|
||||
# 5) Checkout base
|
||||
try:
|
||||
run_git(["checkout", target_base], cwd=cwd)
|
||||
except GitError as exc:
|
||||
@@ -172,7 +194,7 @@ def close_branch(
|
||||
f"Failed to checkout base branch {target_base!r}: {exc}"
|
||||
) from exc
|
||||
|
||||
# 6) Pull latest base
|
||||
# 6) Pull latest base state
|
||||
try:
|
||||
run_git(["pull", "origin", target_base], cwd=cwd)
|
||||
except GitError as exc:
|
||||
@@ -180,7 +202,7 @@ def close_branch(
|
||||
f"Failed to pull latest changes for base branch {target_base!r}: {exc}"
|
||||
) from exc
|
||||
|
||||
# 7) Merge feature branch into base
|
||||
# 7) Merge the feature branch
|
||||
try:
|
||||
run_git(["merge", "--no-ff", name], cwd=cwd)
|
||||
except GitError as exc:
|
||||
@@ -193,22 +215,21 @@ def close_branch(
|
||||
run_git(["push", "origin", target_base], cwd=cwd)
|
||||
except GitError as exc:
|
||||
raise RuntimeError(
|
||||
f"Failed to push base branch {target_base!r} to origin after merge: {exc}"
|
||||
f"Failed to push base branch {target_base!r} after merge: {exc}"
|
||||
) from exc
|
||||
|
||||
# 9) Delete feature branch locally
|
||||
# 9) Delete branch locally
|
||||
try:
|
||||
run_git(["branch", "-d", name], cwd=cwd)
|
||||
except GitError as exc:
|
||||
raise RuntimeError(
|
||||
f"Failed to delete local branch {name!r} after merge: {exc}"
|
||||
f"Failed to delete local branch {name!r}: {exc}"
|
||||
) from exc
|
||||
|
||||
# 10) Delete feature branch on origin (best effort)
|
||||
# 10) Delete branch on origin (best effort)
|
||||
try:
|
||||
run_git(["push", "origin", "--delete", name], cwd=cwd)
|
||||
except GitError as exc:
|
||||
# Remote delete is nice-to-have; surface as RuntimeError for clarity.
|
||||
raise RuntimeError(
|
||||
f"Branch {name!r} was deleted locally, but remote deletion failed: {exc}"
|
||||
) from exc
|
||||
@@ -13,7 +13,7 @@ from __future__ import annotations
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from pkgmgr.git_utils import run_git, GitError
|
||||
from pkgmgr.core.git import run_git, GitError
|
||||
|
||||
|
||||
def generate_changelog(
|
||||
@@ -26,8 +26,8 @@ import os
|
||||
import subprocess
|
||||
from typing import Any, Dict
|
||||
|
||||
from pkgmgr.generate_alias import generate_alias
|
||||
from pkgmgr.save_user_config import save_user_config
|
||||
from pkgmgr.core.command.alias import generate_alias
|
||||
from pkgmgr.core.config.save import save_user_config
|
||||
|
||||
|
||||
def config_init(
|
||||
@@ -1,5 +1,5 @@
|
||||
import yaml
|
||||
from .load_config import load_config
|
||||
from pkgmgr.core.config.load import load_config
|
||||
|
||||
def show_config(selected_repos, user_config_path, full_config=False):
|
||||
"""Display configuration for one or more repositories, or the entire merged config."""
|
||||
218
pkgmgr/actions/install/__init__.py
Normal file
218
pkgmgr/actions/install/__init__.py
Normal file
@@ -0,0 +1,218 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
High-level entry point for repository installation.
|
||||
|
||||
Responsibilities:
|
||||
|
||||
- Ensure the repository directory exists (clone if necessary).
|
||||
- Verify the repository (GPG / commit checks).
|
||||
- Build a RepoContext object.
|
||||
- Delegate the actual installation decision logic to InstallationPipeline.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from pkgmgr.core.repository.identifier import get_repo_identifier
|
||||
from pkgmgr.core.repository.dir import get_repo_dir
|
||||
from pkgmgr.core.repository.verify import verify_repository
|
||||
from pkgmgr.actions.repository.clone import clone_repos
|
||||
from pkgmgr.actions.install.context import RepoContext
|
||||
from pkgmgr.actions.install.installers.os_packages import (
|
||||
ArchPkgbuildInstaller,
|
||||
DebianControlInstaller,
|
||||
RpmSpecInstaller,
|
||||
)
|
||||
from pkgmgr.actions.install.installers.nix_flake import (
|
||||
NixFlakeInstaller,
|
||||
)
|
||||
from pkgmgr.actions.install.installers.python import PythonInstaller
|
||||
from pkgmgr.actions.install.installers.makefile import (
|
||||
MakefileInstaller,
|
||||
)
|
||||
from pkgmgr.actions.install.pipeline import InstallationPipeline
|
||||
|
||||
|
||||
Repository = Dict[str, Any]
|
||||
|
||||
# All available installers, in the order they should be considered.
|
||||
INSTALLERS = [
|
||||
ArchPkgbuildInstaller(),
|
||||
DebianControlInstaller(),
|
||||
RpmSpecInstaller(),
|
||||
NixFlakeInstaller(),
|
||||
PythonInstaller(),
|
||||
MakefileInstaller(),
|
||||
]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Internal helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _ensure_repo_dir(
|
||||
repo: Repository,
|
||||
repositories_base_dir: str,
|
||||
all_repos: List[Repository],
|
||||
preview: bool,
|
||||
no_verification: bool,
|
||||
clone_mode: str,
|
||||
identifier: str,
|
||||
) -> str | None:
|
||||
"""
|
||||
Compute and, if necessary, clone the repository directory.
|
||||
|
||||
Returns the absolute repository path or None if cloning ultimately failed.
|
||||
"""
|
||||
repo_dir = get_repo_dir(repositories_base_dir, repo)
|
||||
|
||||
if not os.path.exists(repo_dir):
|
||||
print(
|
||||
f"Repository directory '{repo_dir}' does not exist. "
|
||||
f"Cloning it now..."
|
||||
)
|
||||
clone_repos(
|
||||
[repo],
|
||||
repositories_base_dir,
|
||||
all_repos,
|
||||
preview,
|
||||
no_verification,
|
||||
clone_mode,
|
||||
)
|
||||
if not os.path.exists(repo_dir):
|
||||
print(
|
||||
f"Cloning failed for repository {identifier}. "
|
||||
f"Skipping installation."
|
||||
)
|
||||
return None
|
||||
|
||||
return repo_dir
|
||||
|
||||
|
||||
def _verify_repo(
|
||||
repo: Repository,
|
||||
repo_dir: str,
|
||||
no_verification: bool,
|
||||
identifier: str,
|
||||
) -> bool:
|
||||
"""
|
||||
Verify a repository using the configured verification data.
|
||||
|
||||
Returns True if verification is considered okay and installation may continue.
|
||||
"""
|
||||
verified_info = repo.get("verified")
|
||||
verified_ok, errors, _commit_hash, _signing_key = verify_repository(
|
||||
repo,
|
||||
repo_dir,
|
||||
mode="local",
|
||||
no_verification=no_verification,
|
||||
)
|
||||
|
||||
if not no_verification and verified_info and not verified_ok:
|
||||
print(f"Warning: Verification failed for {identifier}:")
|
||||
for err in errors:
|
||||
print(f" - {err}")
|
||||
choice = input("Continue anyway? [y/N]: ").strip().lower()
|
||||
if choice != "y":
|
||||
print(f"Skipping installation for {identifier}.")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def _create_context(
|
||||
repo: Repository,
|
||||
identifier: str,
|
||||
repo_dir: str,
|
||||
repositories_base_dir: str,
|
||||
bin_dir: str,
|
||||
all_repos: List[Repository],
|
||||
no_verification: bool,
|
||||
preview: bool,
|
||||
quiet: bool,
|
||||
clone_mode: str,
|
||||
update_dependencies: bool,
|
||||
) -> RepoContext:
|
||||
"""
|
||||
Build a RepoContext instance for the given repository.
|
||||
"""
|
||||
return RepoContext(
|
||||
repo=repo,
|
||||
identifier=identifier,
|
||||
repo_dir=repo_dir,
|
||||
repositories_base_dir=repositories_base_dir,
|
||||
bin_dir=bin_dir,
|
||||
all_repos=all_repos,
|
||||
no_verification=no_verification,
|
||||
preview=preview,
|
||||
quiet=quiet,
|
||||
clone_mode=clone_mode,
|
||||
update_dependencies=update_dependencies,
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Public API
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def install_repos(
|
||||
selected_repos: List[Repository],
|
||||
repositories_base_dir: str,
|
||||
bin_dir: str,
|
||||
all_repos: List[Repository],
|
||||
no_verification: bool,
|
||||
preview: bool,
|
||||
quiet: bool,
|
||||
clone_mode: str,
|
||||
update_dependencies: bool,
|
||||
) -> None:
|
||||
"""
|
||||
Install one or more repositories according to the configured installers
|
||||
and the CLI layer precedence rules.
|
||||
"""
|
||||
pipeline = InstallationPipeline(INSTALLERS)
|
||||
|
||||
for repo in selected_repos:
|
||||
identifier = get_repo_identifier(repo, all_repos)
|
||||
|
||||
repo_dir = _ensure_repo_dir(
|
||||
repo=repo,
|
||||
repositories_base_dir=repositories_base_dir,
|
||||
all_repos=all_repos,
|
||||
preview=preview,
|
||||
no_verification=no_verification,
|
||||
clone_mode=clone_mode,
|
||||
identifier=identifier,
|
||||
)
|
||||
if not repo_dir:
|
||||
continue
|
||||
|
||||
if not _verify_repo(
|
||||
repo=repo,
|
||||
repo_dir=repo_dir,
|
||||
no_verification=no_verification,
|
||||
identifier=identifier,
|
||||
):
|
||||
continue
|
||||
|
||||
ctx = _create_context(
|
||||
repo=repo,
|
||||
identifier=identifier,
|
||||
repo_dir=repo_dir,
|
||||
repositories_base_dir=repositories_base_dir,
|
||||
bin_dir=bin_dir,
|
||||
all_repos=all_repos,
|
||||
no_verification=no_verification,
|
||||
preview=preview,
|
||||
quiet=quiet,
|
||||
clone_mode=clone_mode,
|
||||
update_dependencies=update_dependencies,
|
||||
)
|
||||
|
||||
pipeline.run(ctx)
|
||||
@@ -38,7 +38,7 @@ from abc import ABC, abstractmethod
|
||||
from typing import Iterable, TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pkgmgr.context import RepoContext
|
||||
from pkgmgr.actions.install.context import RepoContext
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
19
pkgmgr/actions/install/installers/__init__.py
Normal file
19
pkgmgr/actions/install/installers/__init__.py
Normal file
@@ -0,0 +1,19 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Installer package for pkgmgr.
|
||||
|
||||
This exposes all installer classes so users can import them directly from
|
||||
pkgmgr.actions.install.installers.
|
||||
"""
|
||||
|
||||
from pkgmgr.actions.install.installers.base import BaseInstaller # noqa: F401
|
||||
from pkgmgr.actions.install.installers.nix_flake import NixFlakeInstaller # noqa: F401
|
||||
from pkgmgr.actions.install.installers.python import PythonInstaller # noqa: F401
|
||||
from pkgmgr.actions.install.installers.makefile import MakefileInstaller # noqa: F401
|
||||
|
||||
# OS-specific installers
|
||||
from pkgmgr.actions.install.installers.os_packages.arch_pkgbuild import ArchPkgbuildInstaller # noqa: F401
|
||||
from pkgmgr.actions.install.installers.os_packages.debian_control import DebianControlInstaller # noqa: F401
|
||||
from pkgmgr.actions.install.installers.os_packages.rpm_spec import RpmSpecInstaller # noqa: F401
|
||||
@@ -8,8 +8,8 @@ Base interface for all installer components in the pkgmgr installation pipeline.
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Set
|
||||
|
||||
from pkgmgr.context import RepoContext
|
||||
from pkgmgr.capabilities import CAPABILITY_MATCHERS
|
||||
from pkgmgr.actions.install.context import RepoContext
|
||||
from pkgmgr.actions.install.capabilities import CAPABILITY_MATCHERS
|
||||
|
||||
|
||||
class BaseInstaller(ABC):
|
||||
97
pkgmgr/actions/install/installers/makefile.py
Normal file
97
pkgmgr/actions/install/installers/makefile.py
Normal file
@@ -0,0 +1,97 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
from pkgmgr.actions.install.context import RepoContext
|
||||
from pkgmgr.actions.install.installers.base import BaseInstaller
|
||||
from pkgmgr.core.command.run import run_command
|
||||
|
||||
|
||||
class MakefileInstaller(BaseInstaller):
|
||||
"""
|
||||
Generic installer that runs `make install` if a Makefile with an
|
||||
install target is present.
|
||||
|
||||
Safety rules:
|
||||
- If PKGMGR_DISABLE_MAKEFILE_INSTALLER=1 is set, this installer
|
||||
is globally disabled.
|
||||
- The higher-level InstallationPipeline ensures that Makefile
|
||||
installation does not run if a stronger CLI layer already owns
|
||||
the command (e.g. Nix or OS packages).
|
||||
"""
|
||||
|
||||
layer = "makefile"
|
||||
MAKEFILE_NAME = "Makefile"
|
||||
|
||||
def supports(self, ctx: RepoContext) -> bool:
|
||||
"""
|
||||
Return True if this repository has a Makefile and the installer
|
||||
is not globally disabled.
|
||||
"""
|
||||
# Optional global kill switch.
|
||||
if os.environ.get("PKGMGR_DISABLE_MAKEFILE_INSTALLER") == "1":
|
||||
if not ctx.quiet:
|
||||
print(
|
||||
"[INFO] MakefileInstaller is disabled via "
|
||||
"PKGMGR_DISABLE_MAKEFILE_INSTALLER."
|
||||
)
|
||||
return False
|
||||
|
||||
makefile_path = os.path.join(ctx.repo_dir, self.MAKEFILE_NAME)
|
||||
return os.path.exists(makefile_path)
|
||||
|
||||
def _has_install_target(self, makefile_path: str) -> bool:
|
||||
"""
|
||||
Heuristically check whether the Makefile defines an install target.
|
||||
|
||||
We look for:
|
||||
|
||||
- a plain 'install:' target, or
|
||||
- any 'install-*:' style target.
|
||||
"""
|
||||
try:
|
||||
with open(makefile_path, "r", encoding="utf-8", errors="ignore") as f:
|
||||
content = f.read()
|
||||
except OSError:
|
||||
return False
|
||||
|
||||
# Simple heuristics: look for "install:" or targets starting with "install-"
|
||||
if re.search(r"^install\s*:", content, flags=re.MULTILINE):
|
||||
return True
|
||||
|
||||
if re.search(r"^install-[a-zA-Z0-9_-]*\s*:", content, flags=re.MULTILINE):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def run(self, ctx: RepoContext) -> None:
|
||||
"""
|
||||
Execute `make install` in the repository directory if an install
|
||||
target exists.
|
||||
"""
|
||||
makefile_path = os.path.join(ctx.repo_dir, self.MAKEFILE_NAME)
|
||||
|
||||
if not os.path.exists(makefile_path):
|
||||
if not ctx.quiet:
|
||||
print(
|
||||
f"[pkgmgr] Makefile '{makefile_path}' not found, "
|
||||
"skipping MakefileInstaller."
|
||||
)
|
||||
return
|
||||
|
||||
if not self._has_install_target(makefile_path):
|
||||
if not ctx.quiet:
|
||||
print(
|
||||
f"[pkgmgr] No 'install' target found in {makefile_path}."
|
||||
)
|
||||
return
|
||||
|
||||
if not ctx.quiet:
|
||||
print(
|
||||
f"[pkgmgr] Running 'make install' in {ctx.repo_dir} "
|
||||
f"(MakefileInstaller)"
|
||||
)
|
||||
|
||||
cmd = "make install"
|
||||
run_command(cmd, cwd=ctx.repo_dir, preview=ctx.preview)
|
||||
160
pkgmgr/actions/install/installers/nix_flake.py
Normal file
160
pkgmgr/actions/install/installers/nix_flake.py
Normal file
@@ -0,0 +1,160 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Installer for Nix flakes.
|
||||
|
||||
If a repository contains flake.nix and the 'nix' command is available, this
|
||||
installer will try to install profile outputs from the flake.
|
||||
|
||||
Behavior:
|
||||
- If flake.nix is present and `nix` exists on PATH:
|
||||
* First remove any existing `package-manager` profile entry (best-effort).
|
||||
* Then install one or more flake outputs via `nix profile install`.
|
||||
- For the package-manager repo:
|
||||
* `pkgmgr` is mandatory (CLI), `default` is optional.
|
||||
- For all other repos:
|
||||
* `default` is mandatory.
|
||||
|
||||
Special handling:
|
||||
- If PKGMGR_DISABLE_NIX_FLAKE_INSTALLER=1 is set, the installer is
|
||||
globally disabled (useful for CI or debugging).
|
||||
|
||||
The higher-level InstallationPipeline and CLI-layer model decide when this
|
||||
installer is allowed to run, based on where the current CLI comes from
|
||||
(e.g. Nix, OS packages, Python, Makefile).
|
||||
"""
|
||||
|
||||
import os
|
||||
import shutil
|
||||
from typing import TYPE_CHECKING, List, Tuple
|
||||
|
||||
from pkgmgr.actions.install.installers.base import BaseInstaller
|
||||
from pkgmgr.core.command.run import run_command
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pkgmgr.actions.install.context import RepoContext
|
||||
from pkgmgr.actions.install import InstallContext
|
||||
|
||||
|
||||
class NixFlakeInstaller(BaseInstaller):
|
||||
"""Install Nix flake profiles for repositories that define flake.nix."""
|
||||
|
||||
# Logical layer name, used by capability matchers.
|
||||
layer = "nix"
|
||||
|
||||
FLAKE_FILE = "flake.nix"
|
||||
PROFILE_NAME = "package-manager"
|
||||
|
||||
def supports(self, ctx: "RepoContext") -> bool:
|
||||
"""
|
||||
Only support repositories that:
|
||||
- Are NOT explicitly disabled via PKGMGR_DISABLE_NIX_FLAKE_INSTALLER=1,
|
||||
- Have a flake.nix,
|
||||
- And have the `nix` command available.
|
||||
"""
|
||||
# Optional global kill-switch for CI or debugging.
|
||||
if os.environ.get("PKGMGR_DISABLE_NIX_FLAKE_INSTALLER") == "1":
|
||||
print(
|
||||
"[INFO] PKGMGR_DISABLE_NIX_FLAKE_INSTALLER=1 – "
|
||||
"NixFlakeInstaller is disabled."
|
||||
)
|
||||
return False
|
||||
|
||||
# Nix must be available.
|
||||
if shutil.which("nix") is None:
|
||||
return False
|
||||
|
||||
# flake.nix must exist in the repository.
|
||||
flake_path = os.path.join(ctx.repo_dir, self.FLAKE_FILE)
|
||||
return os.path.exists(flake_path)
|
||||
|
||||
def _ensure_old_profile_removed(self, ctx: "RepoContext") -> None:
|
||||
"""
|
||||
Best-effort removal of an existing profile entry.
|
||||
|
||||
This handles the "already provides the following file" conflict by
|
||||
removing previous `package-manager` installations before we install
|
||||
the new one.
|
||||
|
||||
Any error in `nix profile remove` is intentionally ignored, because
|
||||
a missing profile entry is not a fatal condition.
|
||||
"""
|
||||
if shutil.which("nix") is None:
|
||||
return
|
||||
|
||||
cmd = f"nix profile remove {self.PROFILE_NAME} || true"
|
||||
try:
|
||||
# NOTE: no allow_failure here → matches the existing unit tests
|
||||
run_command(cmd, cwd=ctx.repo_dir, preview=ctx.preview)
|
||||
except SystemExit:
|
||||
# Unit tests explicitly assert this is swallowed
|
||||
pass
|
||||
|
||||
def _profile_outputs(self, ctx: "RepoContext") -> List[Tuple[str, bool]]:
|
||||
"""
|
||||
Decide which flake outputs to install and whether failures are fatal.
|
||||
|
||||
Returns a list of (output_name, allow_failure) tuples.
|
||||
|
||||
Rules:
|
||||
- For the package-manager repo (identifier 'pkgmgr' or 'package-manager'):
|
||||
[("pkgmgr", False), ("default", True)]
|
||||
- For all other repos:
|
||||
[("default", False)]
|
||||
"""
|
||||
ident = ctx.identifier
|
||||
|
||||
if ident in {"pkgmgr", "package-manager"}:
|
||||
# pkgmgr: main CLI output is "pkgmgr" (mandatory),
|
||||
# "default" is nice-to-have (non-fatal).
|
||||
return [("pkgmgr", False), ("default", True)]
|
||||
|
||||
# Generic repos: we expect a sensible "default" package/app.
|
||||
# Failure to install it is considered fatal.
|
||||
return [("default", False)]
|
||||
|
||||
def run(self, ctx: "InstallContext") -> None:
|
||||
"""
|
||||
Install Nix flake profile outputs.
|
||||
|
||||
For the package-manager repo, failure installing 'pkgmgr' is fatal,
|
||||
failure installing 'default' is non-fatal.
|
||||
For other repos, failure installing 'default' is fatal.
|
||||
"""
|
||||
# Reuse supports() to keep logic in one place.
|
||||
if not self.supports(ctx): # type: ignore[arg-type]
|
||||
return
|
||||
|
||||
outputs = self._profile_outputs(ctx) # list of (name, allow_failure)
|
||||
|
||||
print(
|
||||
"Nix flake detected in "
|
||||
f"{ctx.identifier}, attempting to install profile outputs: "
|
||||
+ ", ".join(name for name, _ in outputs)
|
||||
)
|
||||
|
||||
# Handle the "already installed" case up-front for the shared profile.
|
||||
self._ensure_old_profile_removed(ctx) # type: ignore[arg-type]
|
||||
|
||||
for output, allow_failure in outputs:
|
||||
cmd = f"nix profile install {ctx.repo_dir}#{output}"
|
||||
|
||||
try:
|
||||
run_command(
|
||||
cmd,
|
||||
cwd=ctx.repo_dir,
|
||||
preview=ctx.preview,
|
||||
allow_failure=allow_failure,
|
||||
)
|
||||
print(f"Nix flake output '{output}' successfully installed.")
|
||||
except SystemExit as e:
|
||||
print(f"[Error] Failed to install Nix flake output '{output}': {e}")
|
||||
if not allow_failure:
|
||||
# Mandatory output failed → fatal for the pipeline.
|
||||
raise
|
||||
# Optional output failed → log and continue.
|
||||
print(
|
||||
"[Warning] Continuing despite failure to install "
|
||||
f"optional output '{output}'."
|
||||
)
|
||||
@@ -3,9 +3,9 @@
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from pkgmgr.context import RepoContext
|
||||
from pkgmgr.installers.base import BaseInstaller
|
||||
from pkgmgr.run_command import run_command
|
||||
from pkgmgr.actions.install.context import RepoContext
|
||||
from pkgmgr.actions.install.installers.base import BaseInstaller
|
||||
from pkgmgr.core.command.run import run_command
|
||||
|
||||
|
||||
class ArchPkgbuildInstaller(BaseInstaller):
|
||||
@@ -17,12 +17,11 @@ apt/dpkg tooling are available.
|
||||
import glob
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from typing import List
|
||||
|
||||
from pkgmgr.context import RepoContext
|
||||
from pkgmgr.installers.base import BaseInstaller
|
||||
from pkgmgr.run_command import run_command
|
||||
from pkgmgr.actions.install.context import RepoContext
|
||||
from pkgmgr.actions.install.installers.base import BaseInstaller
|
||||
from pkgmgr.core.command.run import run_command
|
||||
|
||||
|
||||
class DebianControlInstaller(BaseInstaller):
|
||||
@@ -68,6 +67,32 @@ class DebianControlInstaller(BaseInstaller):
|
||||
pattern = os.path.join(parent, "*.deb")
|
||||
return sorted(glob.glob(pattern))
|
||||
|
||||
def _privileged_prefix(self) -> str | None:
|
||||
"""
|
||||
Determine how to run privileged commands:
|
||||
|
||||
- If 'sudo' is available, return 'sudo '.
|
||||
- If we are running as root (e.g. inside CI/container), return ''.
|
||||
- Otherwise, return None, meaning we cannot safely elevate.
|
||||
|
||||
Callers are responsible for handling the None case (usually by
|
||||
warning and skipping automatic installation).
|
||||
"""
|
||||
sudo_path = shutil.which("sudo")
|
||||
|
||||
is_root = False
|
||||
try:
|
||||
is_root = os.geteuid() == 0
|
||||
except AttributeError: # pragma: no cover - non-POSIX platforms
|
||||
# On non-POSIX systems, fall back to assuming "not root".
|
||||
is_root = False
|
||||
|
||||
if sudo_path is not None:
|
||||
return "sudo "
|
||||
if is_root:
|
||||
return ""
|
||||
return None
|
||||
|
||||
def _install_build_dependencies(self, ctx: RepoContext) -> None:
|
||||
"""
|
||||
Install build dependencies using `apt-get build-dep ./`.
|
||||
@@ -86,12 +111,25 @@ class DebianControlInstaller(BaseInstaller):
|
||||
)
|
||||
return
|
||||
|
||||
prefix = self._privileged_prefix()
|
||||
if prefix is None:
|
||||
print(
|
||||
"[Warning] Neither 'sudo' is available nor running as root. "
|
||||
"Skipping automatic build-dep installation for Debian. "
|
||||
"Please install build dependencies from debian/control manually."
|
||||
)
|
||||
return
|
||||
|
||||
# Update package lists first for reliable build-dep resolution.
|
||||
run_command("sudo apt-get update", cwd=ctx.repo_dir, preview=ctx.preview)
|
||||
run_command(
|
||||
f"{prefix}apt-get update",
|
||||
cwd=ctx.repo_dir,
|
||||
preview=ctx.preview,
|
||||
)
|
||||
|
||||
# Install build dependencies based on debian/control in the current tree.
|
||||
# `apt-get build-dep ./` uses the source in the current directory.
|
||||
builddep_cmd = "sudo apt-get build-dep -y ./"
|
||||
builddep_cmd = f"{prefix}apt-get build-dep -y ./"
|
||||
run_command(builddep_cmd, cwd=ctx.repo_dir, preview=ctx.preview)
|
||||
|
||||
def run(self, ctx: RepoContext) -> None:
|
||||
@@ -101,7 +139,7 @@ class DebianControlInstaller(BaseInstaller):
|
||||
Steps:
|
||||
1. apt-get build-dep ./ (automatic build dependency installation)
|
||||
2. dpkg-buildpackage -b -us -uc
|
||||
3. sudo dpkg -i ../*.deb
|
||||
3. sudo dpkg -i ../*.deb (or plain dpkg -i when running as root)
|
||||
"""
|
||||
control_path = self._control_path(ctx)
|
||||
if not os.path.exists(control_path):
|
||||
@@ -123,7 +161,17 @@ class DebianControlInstaller(BaseInstaller):
|
||||
)
|
||||
return
|
||||
|
||||
prefix = self._privileged_prefix()
|
||||
if prefix is None:
|
||||
print(
|
||||
"[Warning] Neither 'sudo' is available nor running as root. "
|
||||
"Skipping automatic .deb installation. "
|
||||
"You can manually install the following files with dpkg -i:\n "
|
||||
+ "\n ".join(debs)
|
||||
)
|
||||
return
|
||||
|
||||
# 4) Install .deb files
|
||||
install_cmd = "sudo dpkg -i " + " ".join(os.path.basename(d) for d in debs)
|
||||
install_cmd = prefix + "dpkg -i " + " ".join(os.path.basename(d) for d in debs)
|
||||
parent = os.path.dirname(ctx.repo_dir)
|
||||
run_command(install_cmd, cwd=parent, preview=ctx.preview)
|
||||
282
pkgmgr/actions/install/installers/os_packages/rpm_spec.py
Normal file
282
pkgmgr/actions/install/installers/os_packages/rpm_spec.py
Normal file
@@ -0,0 +1,282 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Installer for RPM-based packages defined in *.spec files.
|
||||
|
||||
This installer:
|
||||
|
||||
1. Installs build dependencies via dnf/yum builddep (where available)
|
||||
2. Prepares a source tarball in ~/rpmbuild/SOURCES based on the .spec
|
||||
3. Uses rpmbuild to build RPMs from the provided .spec file
|
||||
4. Installs the resulting RPMs via the system package manager (dnf/yum)
|
||||
or rpm as a fallback.
|
||||
|
||||
It targets RPM-based systems (Fedora / RHEL / CentOS / Rocky / Alma, etc.).
|
||||
"""
|
||||
|
||||
import glob
|
||||
import os
|
||||
import shutil
|
||||
import tarfile
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
from pkgmgr.actions.install.context import RepoContext
|
||||
from pkgmgr.actions.install.installers.base import BaseInstaller
|
||||
from pkgmgr.core.command.run import run_command
|
||||
|
||||
|
||||
class RpmSpecInstaller(BaseInstaller):
|
||||
"""
|
||||
Build and install RPM-based packages from *.spec files.
|
||||
|
||||
This installer is responsible for the full build + install of the
|
||||
application on RPM-like systems.
|
||||
"""
|
||||
|
||||
# Logical layer name, used by capability matchers.
|
||||
layer = "os-packages"
|
||||
|
||||
def _is_rpm_like(self) -> bool:
|
||||
"""
|
||||
Basic RPM-like detection:
|
||||
|
||||
- rpmbuild must be available
|
||||
- at least one of dnf / yum / yum-builddep must be present
|
||||
"""
|
||||
if shutil.which("rpmbuild") is None:
|
||||
return False
|
||||
|
||||
has_dnf = shutil.which("dnf") is not None
|
||||
has_yum = shutil.which("yum") is not None
|
||||
has_yum_builddep = shutil.which("yum-builddep") is not None
|
||||
|
||||
return has_dnf or has_yum or has_yum_builddep
|
||||
|
||||
def _spec_path(self, ctx: RepoContext) -> Optional[str]:
|
||||
"""Return the first *.spec file in the repository root, if any."""
|
||||
pattern = os.path.join(ctx.repo_dir, "*.spec")
|
||||
matches = sorted(glob.glob(pattern))
|
||||
if not matches:
|
||||
return None
|
||||
return matches[0]
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Helpers for preparing rpmbuild topdir and source tarball
|
||||
# ------------------------------------------------------------------
|
||||
def _rpmbuild_topdir(self) -> str:
|
||||
"""
|
||||
Return the rpmbuild topdir that rpmbuild will use by default.
|
||||
|
||||
By default this is: ~/rpmbuild
|
||||
|
||||
In the self-install tests, $HOME is set to /tmp/pkgmgr-self-install,
|
||||
so this becomes /tmp/pkgmgr-self-install/rpmbuild which matches the
|
||||
paths in the RPM build logs.
|
||||
"""
|
||||
home = os.path.expanduser("~")
|
||||
return os.path.join(home, "rpmbuild")
|
||||
|
||||
def _ensure_rpmbuild_tree(self, topdir: str) -> None:
|
||||
"""
|
||||
Ensure the standard rpmbuild directory tree exists:
|
||||
|
||||
<topdir>/
|
||||
BUILD/
|
||||
BUILDROOT/
|
||||
RPMS/
|
||||
SOURCES/
|
||||
SPECS/
|
||||
SRPMS/
|
||||
"""
|
||||
for sub in ("BUILD", "BUILDROOT", "RPMS", "SOURCES", "SPECS", "SRPMS"):
|
||||
os.makedirs(os.path.join(topdir, sub), exist_ok=True)
|
||||
|
||||
def _parse_name_version(self, spec_path: str) -> Optional[Tuple[str, str]]:
|
||||
"""
|
||||
Parse Name and Version from the given .spec file.
|
||||
|
||||
Returns (name, version) or None if either cannot be determined.
|
||||
"""
|
||||
name = None
|
||||
version = None
|
||||
|
||||
with open(spec_path, "r", encoding="utf-8") as f:
|
||||
for raw_line in f:
|
||||
line = raw_line.strip()
|
||||
# Ignore comments
|
||||
if not line or line.startswith("#"):
|
||||
continue
|
||||
|
||||
lower = line.lower()
|
||||
if lower.startswith("name:"):
|
||||
# e.g. "Name: package-manager"
|
||||
parts = line.split(":", 1)
|
||||
if len(parts) == 2:
|
||||
name = parts[1].strip()
|
||||
elif lower.startswith("version:"):
|
||||
# e.g. "Version: 0.7.7"
|
||||
parts = line.split(":", 1)
|
||||
if len(parts) == 2:
|
||||
version = parts[1].strip()
|
||||
|
||||
if name and version:
|
||||
break
|
||||
|
||||
if not name or not version:
|
||||
print(
|
||||
"[Warning] Could not determine Name/Version from spec file "
|
||||
f"'{spec_path}'. Skipping RPM source tarball preparation."
|
||||
)
|
||||
return None
|
||||
|
||||
return name, version
|
||||
|
||||
def _prepare_source_tarball(self, ctx: RepoContext, spec_path: str) -> None:
|
||||
"""
|
||||
Prepare a source tarball in <HOME>/rpmbuild/SOURCES that matches
|
||||
the Name/Version in the .spec file.
|
||||
"""
|
||||
parsed = self._parse_name_version(spec_path)
|
||||
if parsed is None:
|
||||
return
|
||||
|
||||
name, version = parsed
|
||||
topdir = self._rpmbuild_topdir()
|
||||
self._ensure_rpmbuild_tree(topdir)
|
||||
|
||||
build_dir = os.path.join(topdir, "BUILD")
|
||||
sources_dir = os.path.join(topdir, "SOURCES")
|
||||
|
||||
source_root = os.path.join(build_dir, f"{name}-{version}")
|
||||
tarball_path = os.path.join(sources_dir, f"{name}-{version}.tar.gz")
|
||||
|
||||
# Clean any previous build directory for this name/version.
|
||||
if os.path.exists(source_root):
|
||||
shutil.rmtree(source_root)
|
||||
|
||||
# Copy the repository tree into BUILD/<name>-<version>.
|
||||
shutil.copytree(ctx.repo_dir, source_root)
|
||||
|
||||
# Create the tarball with the top-level directory <name>-<version>.
|
||||
if os.path.exists(tarball_path):
|
||||
os.remove(tarball_path)
|
||||
|
||||
with tarfile.open(tarball_path, "w:gz") as tar:
|
||||
tar.add(source_root, arcname=f"{name}-{version}")
|
||||
|
||||
print(
|
||||
f"[INFO] Prepared RPM source tarball at '{tarball_path}' "
|
||||
f"from '{ctx.repo_dir}'."
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def supports(self, ctx: RepoContext) -> bool:
|
||||
"""
|
||||
This installer is supported if:
|
||||
- we are on an RPM-based system (rpmbuild + dnf/yum/yum-builddep available), and
|
||||
- a *.spec file exists in the repository root.
|
||||
"""
|
||||
if not self._is_rpm_like():
|
||||
return False
|
||||
|
||||
return self._spec_path(ctx) is not None
|
||||
|
||||
def _find_built_rpms(self) -> List[str]:
|
||||
"""
|
||||
Find RPMs built by rpmbuild.
|
||||
|
||||
By default, rpmbuild outputs RPMs into:
|
||||
~/rpmbuild/RPMS/*/*.rpm
|
||||
"""
|
||||
topdir = self._rpmbuild_topdir()
|
||||
pattern = os.path.join(topdir, "RPMS", "**", "*.rpm")
|
||||
return sorted(glob.glob(pattern, recursive=True))
|
||||
|
||||
def _install_build_dependencies(self, ctx: RepoContext, spec_path: str) -> None:
|
||||
"""
|
||||
Install build dependencies for the given .spec file.
|
||||
"""
|
||||
spec_basename = os.path.basename(spec_path)
|
||||
|
||||
if shutil.which("dnf") is not None:
|
||||
cmd = f"sudo dnf builddep -y {spec_basename}"
|
||||
elif shutil.which("yum-builddep") is not None:
|
||||
cmd = f"sudo yum-builddep -y {spec_basename}"
|
||||
elif shutil.which("yum") is not None:
|
||||
cmd = f"sudo yum-builddep -y {spec_basename}"
|
||||
else:
|
||||
print(
|
||||
"[Warning] No suitable RPM builddep tool (dnf/yum-builddep/yum) found. "
|
||||
"Skipping automatic build dependency installation for RPM."
|
||||
)
|
||||
return
|
||||
|
||||
run_command(cmd, cwd=ctx.repo_dir, preview=ctx.preview)
|
||||
|
||||
def _install_built_rpms(self, ctx: RepoContext, rpms: List[str]) -> None:
|
||||
"""
|
||||
Install or upgrade the built RPMs.
|
||||
|
||||
Strategy:
|
||||
- Prefer dnf install -y <rpms> (handles upgrades cleanly)
|
||||
- Else yum install -y <rpms>
|
||||
- Else fallback to rpm -Uvh <rpms> (upgrade/replace existing)
|
||||
"""
|
||||
if not rpms:
|
||||
print(
|
||||
"[Warning] No RPM files found after rpmbuild. "
|
||||
"Skipping RPM package installation."
|
||||
)
|
||||
return
|
||||
|
||||
dnf = shutil.which("dnf")
|
||||
yum = shutil.which("yum")
|
||||
rpm = shutil.which("rpm")
|
||||
|
||||
if dnf is not None:
|
||||
install_cmd = "sudo dnf install -y " + " ".join(rpms)
|
||||
elif yum is not None:
|
||||
install_cmd = "sudo yum install -y " + " ".join(rpms)
|
||||
elif rpm is not None:
|
||||
# Fallback: use rpm in upgrade mode so an existing older
|
||||
# version is replaced instead of causing file conflicts.
|
||||
install_cmd = "sudo rpm -Uvh " + " ".join(rpms)
|
||||
else:
|
||||
print(
|
||||
"[Warning] No suitable RPM installer (dnf/yum/rpm) found. "
|
||||
"Cannot install built RPMs."
|
||||
)
|
||||
return
|
||||
|
||||
run_command(install_cmd, cwd=ctx.repo_dir, preview=ctx.preview)
|
||||
|
||||
def run(self, ctx: RepoContext) -> None:
|
||||
"""
|
||||
Build and install RPM-based packages.
|
||||
|
||||
Steps:
|
||||
1. Prepare source tarball in ~/rpmbuild/SOURCES matching Name/Version
|
||||
2. dnf/yum builddep <spec> (automatic build dependency installation)
|
||||
3. rpmbuild -ba path/to/spec
|
||||
4. Install built RPMs via dnf/yum (or rpm as fallback)
|
||||
"""
|
||||
spec_path = self._spec_path(ctx)
|
||||
if not spec_path:
|
||||
return
|
||||
|
||||
# 1) Prepare source tarball so rpmbuild finds Source0 in SOURCES.
|
||||
self._prepare_source_tarball(ctx, spec_path)
|
||||
|
||||
# 2) Install build dependencies
|
||||
self._install_build_dependencies(ctx, spec_path)
|
||||
|
||||
# 3) Build RPMs
|
||||
spec_basename = os.path.basename(spec_path)
|
||||
build_cmd = f"rpmbuild -ba {spec_basename}"
|
||||
run_command(build_cmd, cwd=ctx.repo_dir, preview=ctx.preview)
|
||||
|
||||
# 4) Find and install built RPMs
|
||||
rpms = self._find_built_rpms()
|
||||
self._install_built_rpms(ctx, rpms)
|
||||
139
pkgmgr/actions/install/installers/python.py
Normal file
139
pkgmgr/actions/install/installers/python.py
Normal file
@@ -0,0 +1,139 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
PythonInstaller — install Python projects defined via pyproject.toml.
|
||||
|
||||
Installation rules:
|
||||
|
||||
1. pip command resolution:
|
||||
a) If PKGMGR_PIP is set → use it exactly as provided.
|
||||
b) Else if running inside a virtualenv → use `sys.executable -m pip`.
|
||||
c) Else → create/use a per-repository virtualenv under ~/.venvs/<repo>/.
|
||||
|
||||
2. Installation target:
|
||||
- Always install into the resolved pip environment.
|
||||
- Never modify system Python, never rely on --user.
|
||||
- Nix-immutable systems (PEP 668) are automatically avoided because we
|
||||
never touch system Python.
|
||||
|
||||
3. The installer is skipped when:
|
||||
- PKGMGR_DISABLE_PYTHON_INSTALLER=1 is set.
|
||||
- The repository has no pyproject.toml.
|
||||
|
||||
All pip failures are treated as fatal.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from pkgmgr.actions.install.installers.base import BaseInstaller
|
||||
from pkgmgr.core.command.run import run_command
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pkgmgr.actions.install.context import RepoContext
|
||||
from pkgmgr.actions.install import InstallContext
|
||||
|
||||
|
||||
class PythonInstaller(BaseInstaller):
|
||||
"""Install Python projects and dependencies via pip using isolated environments."""
|
||||
|
||||
layer = "python"
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# Installer activation logic
|
||||
# ----------------------------------------------------------------------
|
||||
def supports(self, ctx: "RepoContext") -> bool:
|
||||
"""
|
||||
Return True if this installer should handle this repository.
|
||||
|
||||
The installer is active only when:
|
||||
- A pyproject.toml exists in the repo, and
|
||||
- PKGMGR_DISABLE_PYTHON_INSTALLER is not set.
|
||||
"""
|
||||
if os.environ.get("PKGMGR_DISABLE_PYTHON_INSTALLER") == "1":
|
||||
print("[INFO] PythonInstaller disabled via PKGMGR_DISABLE_PYTHON_INSTALLER.")
|
||||
return False
|
||||
|
||||
return os.path.exists(os.path.join(ctx.repo_dir, "pyproject.toml"))
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# Virtualenv handling
|
||||
# ----------------------------------------------------------------------
|
||||
def _in_virtualenv(self) -> bool:
|
||||
"""Detect whether the current interpreter is inside a venv."""
|
||||
if os.environ.get("VIRTUAL_ENV"):
|
||||
return True
|
||||
|
||||
base = getattr(sys, "base_prefix", sys.prefix)
|
||||
return sys.prefix != base
|
||||
|
||||
def _ensure_repo_venv(self, ctx: "InstallContext") -> str:
|
||||
"""
|
||||
Ensure that ~/.venvs/<identifier>/ exists and contains a minimal venv.
|
||||
|
||||
Returns the venv directory path.
|
||||
"""
|
||||
venv_dir = os.path.expanduser(f"~/.venvs/{ctx.identifier}")
|
||||
python = sys.executable
|
||||
|
||||
if not os.path.isdir(venv_dir):
|
||||
print(f"[python-installer] Creating virtualenv: {venv_dir}")
|
||||
subprocess.check_call([python, "-m", "venv", venv_dir])
|
||||
|
||||
return venv_dir
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# pip command resolution
|
||||
# ----------------------------------------------------------------------
|
||||
def _pip_cmd(self, ctx: "InstallContext") -> str:
|
||||
"""
|
||||
Determine which pip command to use.
|
||||
|
||||
Priority:
|
||||
1. PKGMGR_PIP override given by user or automation.
|
||||
2. Active virtualenv → use sys.executable -m pip.
|
||||
3. Per-repository venv → ~/.venvs/<repo>/bin/pip
|
||||
"""
|
||||
explicit = os.environ.get("PKGMGR_PIP", "").strip()
|
||||
if explicit:
|
||||
return explicit
|
||||
|
||||
if self._in_virtualenv():
|
||||
return f"{sys.executable} -m pip"
|
||||
|
||||
venv_dir = self._ensure_repo_venv(ctx)
|
||||
pip_path = os.path.join(venv_dir, "bin", "pip")
|
||||
return pip_path
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# Execution
|
||||
# ----------------------------------------------------------------------
|
||||
def run(self, ctx: "InstallContext") -> None:
|
||||
"""
|
||||
Install the project defined by pyproject.toml.
|
||||
|
||||
Uses the resolved pip environment. Installation is isolated and never
|
||||
touches system Python.
|
||||
"""
|
||||
if not self.supports(ctx): # type: ignore[arg-type]
|
||||
return
|
||||
|
||||
pyproject = os.path.join(ctx.repo_dir, "pyproject.toml")
|
||||
if not os.path.exists(pyproject):
|
||||
return
|
||||
|
||||
print(f"[python-installer] Installing Python project for {ctx.identifier}...")
|
||||
|
||||
pip_cmd = self._pip_cmd(ctx)
|
||||
|
||||
# Final install command: ALWAYS isolated, never system-wide.
|
||||
install_cmd = f"{pip_cmd} install ."
|
||||
|
||||
run_command(install_cmd, cwd=ctx.repo_dir, preview=ctx.preview)
|
||||
|
||||
print(f"[python-installer] Installation finished for {ctx.identifier}.")
|
||||
91
pkgmgr/actions/install/layers.py
Normal file
91
pkgmgr/actions/install/layers.py
Normal file
@@ -0,0 +1,91 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
CLI layer model for the pkgmgr installation pipeline.
|
||||
|
||||
We treat CLI entry points as coming from one of four conceptual layers:
|
||||
|
||||
- os-packages : system package managers (pacman/apt/dnf/…)
|
||||
- nix : Nix flake / nix profile
|
||||
- python : pip / virtualenv / user-local scripts
|
||||
- makefile : repo-local Makefile / scripts inside the repo
|
||||
|
||||
The layer order defines precedence: higher layers "own" the CLI and
|
||||
lower layers will not be executed once a higher-priority CLI exists.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from enum import Enum
|
||||
from typing import Optional
|
||||
|
||||
|
||||
class CliLayer(str, Enum):
|
||||
OS_PACKAGES = "os-packages"
|
||||
NIX = "nix"
|
||||
PYTHON = "python"
|
||||
MAKEFILE = "makefile"
|
||||
|
||||
|
||||
# Highest priority first
|
||||
CLI_LAYERS: list[CliLayer] = [
|
||||
CliLayer.OS_PACKAGES,
|
||||
CliLayer.NIX,
|
||||
CliLayer.PYTHON,
|
||||
CliLayer.MAKEFILE,
|
||||
]
|
||||
|
||||
|
||||
def layer_priority(layer: Optional[CliLayer]) -> int:
|
||||
"""
|
||||
Return a numeric priority index for a given layer.
|
||||
|
||||
Lower index → higher priority.
|
||||
Unknown / None → very low priority.
|
||||
"""
|
||||
if layer is None:
|
||||
return len(CLI_LAYERS)
|
||||
try:
|
||||
return CLI_LAYERS.index(layer)
|
||||
except ValueError:
|
||||
return len(CLI_LAYERS)
|
||||
|
||||
|
||||
def classify_command_layer(command: str, repo_dir: str) -> CliLayer:
|
||||
"""
|
||||
Heuristically classify a resolved command path into a CLI layer.
|
||||
|
||||
Rules (best effort):
|
||||
|
||||
- /usr/... or /bin/... → os-packages
|
||||
- /nix/store/... or ~/.nix-profile → nix
|
||||
- ~/.local/bin/... → python
|
||||
- inside repo_dir → makefile
|
||||
- everything else → python (user/venv scripts, etc.)
|
||||
"""
|
||||
command_abs = os.path.abspath(os.path.expanduser(command))
|
||||
repo_abs = os.path.abspath(repo_dir)
|
||||
home = os.path.expanduser("~")
|
||||
|
||||
# OS package managers
|
||||
if command_abs.startswith("/usr/") or command_abs.startswith("/bin/"):
|
||||
return CliLayer.OS_PACKAGES
|
||||
|
||||
# Nix store / profile
|
||||
if command_abs.startswith("/nix/store/") or command_abs.startswith(
|
||||
os.path.join(home, ".nix-profile")
|
||||
):
|
||||
return CliLayer.NIX
|
||||
|
||||
# User-local bin
|
||||
if command_abs.startswith(os.path.join(home, ".local", "bin")):
|
||||
return CliLayer.PYTHON
|
||||
|
||||
# Inside the repository → usually a Makefile/script
|
||||
if command_abs.startswith(repo_abs):
|
||||
return CliLayer.MAKEFILE
|
||||
|
||||
# Fallback: treat as Python-style/user-level script
|
||||
return CliLayer.PYTHON
|
||||
257
pkgmgr/actions/install/pipeline.py
Normal file
257
pkgmgr/actions/install/pipeline.py
Normal file
@@ -0,0 +1,257 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Installation pipeline orchestration for repositories.
|
||||
|
||||
This module implements the "Setup Controller" logic:
|
||||
|
||||
1. Detect current CLI command for the repo (if any).
|
||||
2. Classify it into a layer (os-packages, nix, python, makefile).
|
||||
3. Iterate over installers in layer order:
|
||||
- Skip installers whose layer is weaker than an already-loaded one.
|
||||
- Run only installers that support() the repo and add new capabilities.
|
||||
- After each installer, re-resolve the command and update the layer.
|
||||
4. Maintain the repo["command"] field and create/update symlinks via create_ink().
|
||||
|
||||
The goal is to prevent conflicting installations and make the layering
|
||||
behaviour explicit and testable.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional, Sequence, Set
|
||||
|
||||
from pkgmgr.actions.install.context import RepoContext
|
||||
from pkgmgr.actions.install.installers.base import BaseInstaller
|
||||
from pkgmgr.actions.install.layers import (
|
||||
CliLayer,
|
||||
classify_command_layer,
|
||||
layer_priority,
|
||||
)
|
||||
from pkgmgr.core.command.ink import create_ink
|
||||
from pkgmgr.core.command.resolve import resolve_command_for_repo
|
||||
|
||||
|
||||
@dataclass
|
||||
class CommandState:
|
||||
"""
|
||||
Represents the current CLI state for a repository:
|
||||
|
||||
- command: absolute or relative path to the CLI entry point
|
||||
- layer: which conceptual layer this command belongs to
|
||||
"""
|
||||
|
||||
command: Optional[str]
|
||||
layer: Optional[CliLayer]
|
||||
|
||||
|
||||
class CommandResolver:
|
||||
"""
|
||||
Small helper responsible for resolving the current command for a repo
|
||||
and mapping it into a CommandState.
|
||||
"""
|
||||
|
||||
def __init__(self, ctx: RepoContext) -> None:
|
||||
self._ctx = ctx
|
||||
|
||||
def resolve(self) -> CommandState:
|
||||
"""
|
||||
Resolve the current command for this repository.
|
||||
|
||||
If resolve_command_for_repo raises SystemExit (e.g. Python package
|
||||
without installed entry point), we treat this as "no command yet"
|
||||
from the point of view of the installers.
|
||||
"""
|
||||
repo = self._ctx.repo
|
||||
identifier = self._ctx.identifier
|
||||
repo_dir = self._ctx.repo_dir
|
||||
|
||||
try:
|
||||
cmd = resolve_command_for_repo(
|
||||
repo=repo,
|
||||
repo_identifier=identifier,
|
||||
repo_dir=repo_dir,
|
||||
)
|
||||
except SystemExit:
|
||||
cmd = None
|
||||
|
||||
if not cmd:
|
||||
return CommandState(command=None, layer=None)
|
||||
|
||||
layer = classify_command_layer(cmd, repo_dir)
|
||||
return CommandState(command=cmd, layer=layer)
|
||||
|
||||
|
||||
class InstallationPipeline:
|
||||
"""
|
||||
High-level orchestrator that applies a sequence of installers
|
||||
to a repository based on CLI layer precedence.
|
||||
"""
|
||||
|
||||
def __init__(self, installers: Sequence[BaseInstaller]) -> None:
|
||||
self._installers = list(installers)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Public API
|
||||
# ------------------------------------------------------------------
|
||||
def run(self, ctx: RepoContext) -> None:
|
||||
"""
|
||||
Execute the installation pipeline for a single repository.
|
||||
|
||||
- Detect initial command & layer.
|
||||
- Optionally create a symlink.
|
||||
- Run installers in order, skipping those whose layer is weaker
|
||||
than an already-loaded CLI.
|
||||
- After each installer, re-resolve the command and refresh the
|
||||
symlink if needed.
|
||||
"""
|
||||
repo = ctx.repo
|
||||
repo_dir = ctx.repo_dir
|
||||
identifier = ctx.identifier
|
||||
repositories_base_dir = ctx.repositories_base_dir
|
||||
bin_dir = ctx.bin_dir
|
||||
all_repos = ctx.all_repos
|
||||
quiet = ctx.quiet
|
||||
preview = ctx.preview
|
||||
|
||||
resolver = CommandResolver(ctx)
|
||||
state = resolver.resolve()
|
||||
|
||||
# Persist initial command (if any) and create a symlink.
|
||||
if state.command:
|
||||
repo["command"] = state.command
|
||||
create_ink(
|
||||
repo,
|
||||
repositories_base_dir,
|
||||
bin_dir,
|
||||
all_repos,
|
||||
quiet=quiet,
|
||||
preview=preview,
|
||||
)
|
||||
else:
|
||||
repo.pop("command", None)
|
||||
|
||||
provided_capabilities: Set[str] = set()
|
||||
|
||||
# Main installer loop
|
||||
for installer in self._installers:
|
||||
layer_name = getattr(installer, "layer", None)
|
||||
|
||||
# Installers without a layer participate without precedence logic.
|
||||
if layer_name is None:
|
||||
self._run_installer(installer, ctx, identifier, repo_dir, quiet)
|
||||
continue
|
||||
|
||||
try:
|
||||
installer_layer = CliLayer(layer_name)
|
||||
except ValueError:
|
||||
# Unknown layer string → treat as lowest priority.
|
||||
installer_layer = None
|
||||
|
||||
# "Previous/Current layer already loaded?"
|
||||
if state.layer is not None and installer_layer is not None:
|
||||
current_prio = layer_priority(state.layer)
|
||||
installer_prio = layer_priority(installer_layer)
|
||||
|
||||
if current_prio < installer_prio:
|
||||
# Current CLI comes from a higher-priority layer,
|
||||
# so we skip this installer entirely.
|
||||
if not quiet:
|
||||
print(
|
||||
f"[pkgmgr] Skipping installer "
|
||||
f"{installer.__class__.__name__} for {identifier} – "
|
||||
f"CLI already provided by layer {state.layer.value!r}."
|
||||
)
|
||||
continue
|
||||
|
||||
if current_prio == installer_prio:
|
||||
# Same layer already provides a CLI; usually there is no
|
||||
# need to run another installer on top of it.
|
||||
if not quiet:
|
||||
print(
|
||||
f"[pkgmgr] Skipping installer "
|
||||
f"{installer.__class__.__name__} for {identifier} – "
|
||||
f"layer {installer_layer.value!r} is already loaded."
|
||||
)
|
||||
continue
|
||||
|
||||
# Check if this installer is applicable at all.
|
||||
if not installer.supports(ctx):
|
||||
continue
|
||||
|
||||
# Capabilities: if everything this installer would provide is already
|
||||
# covered, we can safely skip it.
|
||||
caps = installer.discover_capabilities(ctx)
|
||||
if caps and caps.issubset(provided_capabilities):
|
||||
if not quiet:
|
||||
print(
|
||||
f"Skipping installer {installer.__class__.__name__} "
|
||||
f"for {identifier} – capabilities {caps} already provided."
|
||||
)
|
||||
continue
|
||||
|
||||
if not quiet:
|
||||
print(
|
||||
f"[pkgmgr] Running installer {installer.__class__.__name__} "
|
||||
f"for {identifier} in '{repo_dir}' "
|
||||
f"(new capabilities: {caps or set()})..."
|
||||
)
|
||||
|
||||
# Run the installer with error reporting.
|
||||
self._run_installer(installer, ctx, identifier, repo_dir, quiet)
|
||||
|
||||
provided_capabilities.update(caps)
|
||||
|
||||
# After running an installer, re-resolve the command and layer.
|
||||
new_state = resolver.resolve()
|
||||
if new_state.command:
|
||||
repo["command"] = new_state.command
|
||||
create_ink(
|
||||
repo,
|
||||
repositories_base_dir,
|
||||
bin_dir,
|
||||
all_repos,
|
||||
quiet=quiet,
|
||||
preview=preview,
|
||||
)
|
||||
else:
|
||||
repo.pop("command", None)
|
||||
|
||||
state = new_state
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Internal helpers
|
||||
# ------------------------------------------------------------------
|
||||
@staticmethod
|
||||
def _run_installer(
|
||||
installer: BaseInstaller,
|
||||
ctx: RepoContext,
|
||||
identifier: str,
|
||||
repo_dir: str,
|
||||
quiet: bool,
|
||||
) -> None:
|
||||
"""
|
||||
Execute a single installer with unified error handling.
|
||||
"""
|
||||
try:
|
||||
installer.run(ctx)
|
||||
except SystemExit as exc:
|
||||
exit_code = exc.code if isinstance(exc.code, int) else str(exc.code)
|
||||
print(
|
||||
f"[ERROR] Installer {installer.__class__.__name__} failed "
|
||||
f"for repository {identifier} (dir: {repo_dir}) "
|
||||
f"with exit code {exit_code}."
|
||||
)
|
||||
print(
|
||||
"[ERROR] This usually means an underlying command failed "
|
||||
"(e.g. 'make install', 'nix build', 'pip install', ...)."
|
||||
)
|
||||
print(
|
||||
"[ERROR] Check the log above for the exact command output. "
|
||||
"You can also run this repository in isolation via:\n"
|
||||
f" pkgmgr install {identifier} "
|
||||
"--clone-mode shallow --no-verification"
|
||||
)
|
||||
raise
|
||||
@@ -1,7 +1,7 @@
|
||||
import os
|
||||
from pkgmgr.get_repo_identifier import get_repo_identifier
|
||||
from pkgmgr.get_repo_dir import get_repo_dir
|
||||
from pkgmgr.run_command import run_command
|
||||
from pkgmgr.core.repository.identifier import get_repo_identifier
|
||||
from pkgmgr.core.repository.dir import get_repo_dir
|
||||
from pkgmgr.core.command.run import run_command
|
||||
import sys
|
||||
|
||||
def exec_proxy_command(proxy_prefix: str, selected_repos, repositories_base_dir, all_repos, proxy_command: str, extra_args, preview: bool):
|
||||
310
pkgmgr/actions/release/__init__.py
Normal file
310
pkgmgr/actions/release/__init__.py
Normal file
@@ -0,0 +1,310 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Release helper for pkgmgr (public entry point).
|
||||
|
||||
This package provides the high-level `release()` function used by the
|
||||
pkgmgr CLI to perform versioned releases:
|
||||
|
||||
- Determine the next semantic version based on existing Git tags.
|
||||
- Update pyproject.toml with the new version.
|
||||
- Update additional packaging files (flake.nix, PKGBUILD,
|
||||
debian/changelog, RPM spec) where present.
|
||||
- Prepend a basic entry to CHANGELOG.md.
|
||||
- Move the floating 'latest' tag to the newly created release tag so
|
||||
the newest release is always marked as latest.
|
||||
|
||||
Additional behaviour:
|
||||
- If `preview=True` (from --preview), no files are written and no
|
||||
Git commands are executed. Instead, a detailed summary of the
|
||||
planned changes and commands is printed.
|
||||
- If `preview=False` and not forced, the release is executed in two
|
||||
phases:
|
||||
1) Preview-only run (dry-run).
|
||||
2) Interactive confirmation, then real release if confirmed.
|
||||
This confirmation can be skipped with the `force=True` flag.
|
||||
- Before creating and pushing tags, main/master is updated from origin
|
||||
when the release is performed on one of these branches.
|
||||
- If `close=True` is used and the current branch is not main/master,
|
||||
the branch will be closed via branch_commands.close_branch() after
|
||||
a successful release.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sys
|
||||
from typing import Optional
|
||||
|
||||
from pkgmgr.core.git import get_current_branch, GitError
|
||||
from pkgmgr.actions.branch import close_branch
|
||||
|
||||
from .versioning import determine_current_version, bump_semver
|
||||
from .git_ops import run_git_command, sync_branch_with_remote, update_latest_tag
|
||||
from .files import (
|
||||
update_pyproject_version,
|
||||
update_flake_version,
|
||||
update_pkgbuild_version,
|
||||
update_spec_version,
|
||||
update_changelog,
|
||||
update_debian_changelog,
|
||||
update_spec_changelog,
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Internal implementation (single-phase, preview or real)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _release_impl(
|
||||
pyproject_path: str = "pyproject.toml",
|
||||
changelog_path: str = "CHANGELOG.md",
|
||||
release_type: str = "patch",
|
||||
message: Optional[str] = None,
|
||||
preview: bool = False,
|
||||
close: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
Internal implementation that performs a single-phase release.
|
||||
"""
|
||||
current_ver = determine_current_version()
|
||||
new_ver = bump_semver(current_ver, release_type)
|
||||
new_ver_str = str(new_ver)
|
||||
new_tag = new_ver.to_tag(with_prefix=True)
|
||||
|
||||
mode = "PREVIEW" if preview else "REAL"
|
||||
print(f"Release mode: {mode}")
|
||||
print(f"Current version: {current_ver}")
|
||||
print(f"New version: {new_ver_str} ({release_type})")
|
||||
|
||||
repo_root = os.path.dirname(os.path.abspath(pyproject_path))
|
||||
|
||||
# Update core project metadata and packaging files
|
||||
update_pyproject_version(pyproject_path, new_ver_str, preview=preview)
|
||||
changelog_message = update_changelog(
|
||||
changelog_path,
|
||||
new_ver_str,
|
||||
message=message,
|
||||
preview=preview,
|
||||
)
|
||||
|
||||
flake_path = os.path.join(repo_root, "flake.nix")
|
||||
update_flake_version(flake_path, new_ver_str, preview=preview)
|
||||
|
||||
pkgbuild_path = os.path.join(repo_root, "PKGBUILD")
|
||||
update_pkgbuild_version(pkgbuild_path, new_ver_str, preview=preview)
|
||||
|
||||
spec_path = os.path.join(repo_root, "package-manager.spec")
|
||||
update_spec_version(spec_path, new_ver_str, preview=preview)
|
||||
|
||||
# Determine a single effective_message to be reused across all
|
||||
# changelog targets (project, Debian, Fedora).
|
||||
effective_message: Optional[str] = message
|
||||
if effective_message is None and isinstance(changelog_message, str):
|
||||
if changelog_message.strip():
|
||||
effective_message = changelog_message.strip()
|
||||
|
||||
debian_changelog_path = os.path.join(repo_root, "debian", "changelog")
|
||||
package_name = os.path.basename(repo_root) or "package-manager"
|
||||
|
||||
# Debian changelog
|
||||
update_debian_changelog(
|
||||
debian_changelog_path,
|
||||
package_name=package_name,
|
||||
new_version=new_ver_str,
|
||||
message=effective_message,
|
||||
preview=preview,
|
||||
)
|
||||
|
||||
# Fedora / RPM %changelog
|
||||
update_spec_changelog(
|
||||
spec_path=spec_path,
|
||||
package_name=package_name,
|
||||
new_version=new_ver_str,
|
||||
message=effective_message,
|
||||
preview=preview,
|
||||
)
|
||||
|
||||
commit_msg = f"Release version {new_ver_str}"
|
||||
tag_msg = effective_message or commit_msg
|
||||
|
||||
# Determine branch and ensure it is up to date if main/master
|
||||
try:
|
||||
branch = get_current_branch() or "main"
|
||||
except GitError:
|
||||
branch = "main"
|
||||
print(f"Releasing on branch: {branch}")
|
||||
|
||||
# Ensure main/master are up-to-date from origin before creating and
|
||||
# pushing tags. For other branches we only log the intent.
|
||||
sync_branch_with_remote(branch, preview=preview)
|
||||
|
||||
files_to_add = [
|
||||
pyproject_path,
|
||||
changelog_path,
|
||||
flake_path,
|
||||
pkgbuild_path,
|
||||
spec_path,
|
||||
debian_changelog_path,
|
||||
]
|
||||
existing_files = [p for p in files_to_add if p and os.path.exists(p)]
|
||||
|
||||
if preview:
|
||||
for path in existing_files:
|
||||
print(f"[PREVIEW] Would run: git add {path}")
|
||||
print(f'[PREVIEW] Would run: git commit -am "{commit_msg}"')
|
||||
print(f'[PREVIEW] Would run: git tag -a {new_tag} -m "{tag_msg}"')
|
||||
print(f"[PREVIEW] Would run: git push origin {branch}")
|
||||
print("[PREVIEW] Would run: git push origin --tags")
|
||||
|
||||
# Also update the floating 'latest' tag to the new highest SemVer.
|
||||
update_latest_tag(new_tag, preview=True)
|
||||
|
||||
if close and branch not in ("main", "master"):
|
||||
print(
|
||||
f"[PREVIEW] Would also close branch {branch} after the release "
|
||||
"(close=True and branch is not main/master)."
|
||||
)
|
||||
elif close:
|
||||
print(
|
||||
f"[PREVIEW] close=True but current branch is {branch}; "
|
||||
"no branch would be closed."
|
||||
)
|
||||
|
||||
print("Preview completed. No changes were made.")
|
||||
return
|
||||
|
||||
for path in existing_files:
|
||||
run_git_command(f"git add {path}")
|
||||
|
||||
run_git_command(f'git commit -am "{commit_msg}"')
|
||||
run_git_command(f'git tag -a {new_tag} -m "{tag_msg}"')
|
||||
run_git_command(f"git push origin {branch}")
|
||||
run_git_command("git push origin --tags")
|
||||
|
||||
# Move 'latest' to the new release tag so the newest SemVer is always
|
||||
# marked as latest. This is best-effort and must not break the release.
|
||||
try:
|
||||
update_latest_tag(new_tag, preview=False)
|
||||
except GitError as exc: # pragma: no cover
|
||||
print(
|
||||
f"[WARN] Failed to update floating 'latest' tag for {new_tag}: {exc}\n"
|
||||
"[WARN] The release itself completed successfully; only the "
|
||||
"'latest' tag was not updated."
|
||||
)
|
||||
|
||||
print(f"Release {new_ver_str} completed.")
|
||||
|
||||
if close:
|
||||
if branch in ("main", "master"):
|
||||
print(
|
||||
f"[INFO] close=True but current branch is {branch}; "
|
||||
"nothing to close."
|
||||
)
|
||||
return
|
||||
|
||||
print(
|
||||
f"[INFO] Closing branch {branch} after successful release "
|
||||
"(close=True and branch is not main/master)..."
|
||||
)
|
||||
try:
|
||||
close_branch(name=branch, base_branch="main", cwd=".")
|
||||
except Exception as exc: # pragma: no cover
|
||||
print(f"[WARN] Failed to close branch {branch} automatically: {exc}")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Public release entry point
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def release(
|
||||
pyproject_path: str = "pyproject.toml",
|
||||
changelog_path: str = "CHANGELOG.md",
|
||||
release_type: str = "patch",
|
||||
message: Optional[str] = None,
|
||||
preview: bool = False,
|
||||
force: bool = False,
|
||||
close: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
High-level release entry point.
|
||||
|
||||
Modes:
|
||||
|
||||
- preview=True:
|
||||
* Single-phase PREVIEW only.
|
||||
|
||||
- preview=False, force=True:
|
||||
* Single-phase REAL release, no interactive preview.
|
||||
|
||||
- preview=False, force=False:
|
||||
* Two-phase flow (intended default for interactive CLI use).
|
||||
"""
|
||||
if preview:
|
||||
_release_impl(
|
||||
pyproject_path=pyproject_path,
|
||||
changelog_path=changelog_path,
|
||||
release_type=release_type,
|
||||
message=message,
|
||||
preview=True,
|
||||
close=close,
|
||||
)
|
||||
return
|
||||
|
||||
if force:
|
||||
_release_impl(
|
||||
pyproject_path=pyproject_path,
|
||||
changelog_path=changelog_path,
|
||||
release_type=release_type,
|
||||
message=message,
|
||||
preview=False,
|
||||
close=close,
|
||||
)
|
||||
return
|
||||
|
||||
if not sys.stdin.isatty():
|
||||
_release_impl(
|
||||
pyproject_path=pyproject_path,
|
||||
changelog_path=changelog_path,
|
||||
release_type=release_type,
|
||||
message=message,
|
||||
preview=False,
|
||||
close=close,
|
||||
)
|
||||
return
|
||||
|
||||
print("[INFO] Running preview before actual release...\n")
|
||||
_release_impl(
|
||||
pyproject_path=pyproject_path,
|
||||
changelog_path=changelog_path,
|
||||
release_type=release_type,
|
||||
message=message,
|
||||
preview=True,
|
||||
close=close,
|
||||
)
|
||||
|
||||
try:
|
||||
answer = input("Proceed with the actual release? [y/N]: ").strip().lower()
|
||||
except (EOFError, KeyboardInterrupt):
|
||||
print("\n[INFO] Release aborted (no confirmation).")
|
||||
return
|
||||
|
||||
if answer not in ("y", "yes"):
|
||||
print("Release aborted by user. No changes were made.")
|
||||
return
|
||||
|
||||
print("\n[INFO] Running REAL release...\n")
|
||||
_release_impl(
|
||||
pyproject_path=pyproject_path,
|
||||
changelog_path=changelog_path,
|
||||
release_type=release_type,
|
||||
message=message,
|
||||
preview=False,
|
||||
close=close,
|
||||
)
|
||||
|
||||
|
||||
__all__ = ["release"]
|
||||
@@ -1,32 +1,17 @@
|
||||
# pkgmgr/release.py
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
pkgmgr/release.py
|
||||
File and metadata update helpers for the release workflow.
|
||||
|
||||
Release helper for pkgmgr.
|
||||
|
||||
Responsibilities (Milestone 7):
|
||||
- Determine the next semantic version based on existing Git tags.
|
||||
Responsibilities:
|
||||
- Update pyproject.toml with the new version.
|
||||
- Update additional packaging files (flake.nix, PKGBUILD,
|
||||
debian/changelog, RPM spec) where present.
|
||||
- Prepend a basic entry to CHANGELOG.md.
|
||||
- Commit, tag, and push the release on the current branch.
|
||||
|
||||
Additional behaviour:
|
||||
- If `preview=True` (from --preview), no files are written and no
|
||||
Git commands are executed. Instead, a detailed summary of the
|
||||
planned changes and commands is printed.
|
||||
- If `preview=False` and not forced, the release is executed in two
|
||||
phases:
|
||||
1) Preview-only run (dry-run).
|
||||
2) Interactive confirmation, then real release if confirmed.
|
||||
This confirmation can be skipped with the `force=True` flag.
|
||||
- If `close=True` is used and the current branch is not main/master,
|
||||
the branch will be closed via branch_commands.close_branch() after
|
||||
a successful release.
|
||||
- Update flake.nix, PKGBUILD, RPM spec files where present.
|
||||
- Prepend release entries to CHANGELOG.md.
|
||||
- Maintain distribution-specific changelog files:
|
||||
* debian/changelog
|
||||
* RPM spec %changelog section
|
||||
including maintainer metadata where applicable.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
@@ -39,84 +24,6 @@ import tempfile
|
||||
from datetime import date, datetime
|
||||
from typing import Optional, Tuple
|
||||
|
||||
from pkgmgr.git_utils import get_tags, get_current_branch, GitError
|
||||
from pkgmgr.branch_commands import close_branch
|
||||
from pkgmgr.versioning import (
|
||||
SemVer,
|
||||
find_latest_version,
|
||||
bump_major,
|
||||
bump_minor,
|
||||
bump_patch,
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helpers for Git + version discovery
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _determine_current_version() -> SemVer:
|
||||
"""
|
||||
Determine the current semantic version from Git tags.
|
||||
|
||||
Behaviour:
|
||||
- If there are no tags or no SemVer-compatible tags, return 0.0.0.
|
||||
- Otherwise, use the latest SemVer tag as current version.
|
||||
"""
|
||||
tags = get_tags()
|
||||
if not tags:
|
||||
return SemVer(0, 0, 0)
|
||||
|
||||
latest = find_latest_version(tags)
|
||||
if latest is None:
|
||||
return SemVer(0, 0, 0)
|
||||
|
||||
_tag, ver = latest
|
||||
return ver
|
||||
|
||||
|
||||
def _bump_semver(current: SemVer, release_type: str) -> SemVer:
|
||||
"""
|
||||
Bump the given SemVer according to the release type.
|
||||
|
||||
release_type must be one of: "major", "minor", "patch".
|
||||
"""
|
||||
if release_type == "major":
|
||||
return bump_major(current)
|
||||
if release_type == "minor":
|
||||
return bump_minor(current)
|
||||
if release_type == "patch":
|
||||
return bump_patch(current)
|
||||
|
||||
raise ValueError(f"Unknown release type: {release_type!r}")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Low-level Git command helper
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _run_git_command(cmd: str) -> None:
|
||||
"""
|
||||
Run a Git (or shell) command with basic error reporting.
|
||||
|
||||
The command is executed via the shell, primarily for readability
|
||||
when printed (as in 'git commit -am "msg"').
|
||||
"""
|
||||
print(f"[GIT] {cmd}")
|
||||
try:
|
||||
subprocess.run(cmd, shell=True, check=True)
|
||||
except subprocess.CalledProcessError as exc:
|
||||
print(f"[ERROR] Git command failed: {cmd}")
|
||||
print(f" Exit code: {exc.returncode}")
|
||||
if exc.stdout:
|
||||
print("--- stdout ---")
|
||||
print(exc.stdout)
|
||||
if exc.stderr:
|
||||
print("--- stderr ---")
|
||||
print(exc.stderr)
|
||||
raise GitError(f"Git command failed: {cmd}") from exc
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Editor helper for interactive changelog messages
|
||||
@@ -178,7 +85,6 @@ def _open_editor_for_changelog(initial_message: Optional[str] = None) -> str:
|
||||
# File update helpers (pyproject + extra packaging + changelog)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def update_pyproject_version(
|
||||
pyproject_path: str,
|
||||
new_version: str,
|
||||
@@ -192,13 +98,25 @@ def update_pyproject_version(
|
||||
version = "X.Y.Z"
|
||||
|
||||
and replaces the version part with the given new_version string.
|
||||
|
||||
If the file does not exist, it is skipped without failing the release.
|
||||
"""
|
||||
if not os.path.exists(pyproject_path):
|
||||
print(
|
||||
f"[INFO] pyproject.toml not found at: {pyproject_path}, "
|
||||
"skipping version update."
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
with open(pyproject_path, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
except FileNotFoundError:
|
||||
print(f"[ERROR] pyproject.toml not found at: {pyproject_path}")
|
||||
sys.exit(1)
|
||||
except OSError as exc:
|
||||
print(
|
||||
f"[WARN] Could not read pyproject.toml at {pyproject_path}: {exc}. "
|
||||
"Skipping version update."
|
||||
)
|
||||
return
|
||||
|
||||
pattern = r'^(version\s*=\s*")([^"]+)(")'
|
||||
new_content, count = re.subn(
|
||||
@@ -541,221 +459,79 @@ def update_debian_changelog(
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Internal implementation (single-phase, preview or real)
|
||||
# Fedora / RPM spec %changelog helper
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _release_impl(
|
||||
pyproject_path: str = "pyproject.toml",
|
||||
changelog_path: str = "CHANGELOG.md",
|
||||
release_type: str = "patch",
|
||||
def update_spec_changelog(
|
||||
spec_path: str,
|
||||
package_name: str,
|
||||
new_version: str,
|
||||
message: Optional[str] = None,
|
||||
preview: bool = False,
|
||||
close: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
Internal implementation that performs a single-phase release.
|
||||
Prepend a new entry to the %changelog section of an RPM spec file,
|
||||
if present.
|
||||
|
||||
Typical RPM-style entry:
|
||||
|
||||
* Tue Dec 09 2025 John Doe <john@example.com> - 0.5.1-1
|
||||
- Your changelog message
|
||||
"""
|
||||
current_ver = _determine_current_version()
|
||||
new_ver = _bump_semver(current_ver, release_type)
|
||||
new_ver_str = str(new_ver)
|
||||
new_tag = new_ver.to_tag(with_prefix=True)
|
||||
|
||||
mode = "PREVIEW" if preview else "REAL"
|
||||
print(f"Release mode: {mode}")
|
||||
print(f"Current version: {current_ver}")
|
||||
print(f"New version: {new_ver_str} ({release_type})")
|
||||
|
||||
repo_root = os.path.dirname(os.path.abspath(pyproject_path))
|
||||
|
||||
update_pyproject_version(pyproject_path, new_ver_str, preview=preview)
|
||||
changelog_message = update_changelog(
|
||||
changelog_path,
|
||||
new_ver_str,
|
||||
message=message,
|
||||
preview=preview,
|
||||
)
|
||||
|
||||
flake_path = os.path.join(repo_root, "flake.nix")
|
||||
update_flake_version(flake_path, new_ver_str, preview=preview)
|
||||
|
||||
pkgbuild_path = os.path.join(repo_root, "PKGBUILD")
|
||||
update_pkgbuild_version(pkgbuild_path, new_ver_str, preview=preview)
|
||||
|
||||
spec_path = os.path.join(repo_root, "package-manager.spec")
|
||||
update_spec_version(spec_path, new_ver_str, preview=preview)
|
||||
|
||||
effective_message: Optional[str] = message
|
||||
if effective_message is None and isinstance(changelog_message, str):
|
||||
if changelog_message.strip():
|
||||
effective_message = changelog_message.strip()
|
||||
|
||||
debian_changelog_path = os.path.join(repo_root, "debian", "changelog")
|
||||
package_name = os.path.basename(repo_root) or "package-manager"
|
||||
update_debian_changelog(
|
||||
debian_changelog_path,
|
||||
package_name=package_name,
|
||||
new_version=new_ver_str,
|
||||
message=effective_message,
|
||||
preview=preview,
|
||||
)
|
||||
|
||||
commit_msg = f"Release version {new_ver_str}"
|
||||
tag_msg = effective_message or commit_msg
|
||||
|
||||
try:
|
||||
branch = get_current_branch() or "main"
|
||||
except GitError:
|
||||
branch = "main"
|
||||
print(f"Releasing on branch: {branch}")
|
||||
|
||||
files_to_add = [
|
||||
pyproject_path,
|
||||
changelog_path,
|
||||
flake_path,
|
||||
pkgbuild_path,
|
||||
spec_path,
|
||||
debian_changelog_path,
|
||||
]
|
||||
existing_files = [p for p in files_to_add if p and os.path.exists(p)]
|
||||
|
||||
if preview:
|
||||
for path in existing_files:
|
||||
print(f"[PREVIEW] Would run: git add {path}")
|
||||
print(f'[PREVIEW] Would run: git commit -am "{commit_msg}"')
|
||||
print(f'[PREVIEW] Would run: git tag -a {new_tag} -m "{tag_msg}"')
|
||||
print(f"[PREVIEW] Would run: git push origin {branch}")
|
||||
print("[PREVIEW] Would run: git push origin --tags")
|
||||
|
||||
if close and branch not in ("main", "master"):
|
||||
print(
|
||||
f"[PREVIEW] Would also close branch {branch} after the release "
|
||||
"(close=True and branch is not main/master)."
|
||||
)
|
||||
elif close:
|
||||
print(
|
||||
f"[PREVIEW] close=True but current branch is {branch}; "
|
||||
"no branch would be closed."
|
||||
)
|
||||
|
||||
print("Preview completed. No changes were made.")
|
||||
if not os.path.exists(spec_path):
|
||||
print("[INFO] RPM spec file not found, skipping spec changelog update.")
|
||||
return
|
||||
|
||||
for path in existing_files:
|
||||
_run_git_command(f"git add {path}")
|
||||
try:
|
||||
with open(spec_path, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
except Exception as exc:
|
||||
print(f"[WARN] Could not read spec file for changelog update: {exc}")
|
||||
return
|
||||
|
||||
_run_git_command(f'git commit -am "{commit_msg}"')
|
||||
_run_git_command(f'git tag -a {new_tag} -m "{tag_msg}"')
|
||||
_run_git_command(f"git push origin {branch}")
|
||||
_run_git_command("git push origin --tags")
|
||||
debian_version = f"{new_version}-1"
|
||||
now = datetime.now().astimezone()
|
||||
date_str = now.strftime("%a %b %d %Y")
|
||||
|
||||
print(f"Release {new_ver_str} completed.")
|
||||
# Reuse Debian maintainer discovery for author name/email.
|
||||
author_name, author_email = _get_debian_author()
|
||||
|
||||
if close:
|
||||
if branch in ("main", "master"):
|
||||
print(
|
||||
f"[INFO] close=True but current branch is {branch}; "
|
||||
"nothing to close."
|
||||
)
|
||||
return
|
||||
body_line = message.strip() if message else f"Automated release {new_version}."
|
||||
|
||||
stanza = (
|
||||
f"* {date_str} {author_name} <{author_email}> - {debian_version}\n"
|
||||
f"- {body_line}\n\n"
|
||||
)
|
||||
|
||||
marker = "%changelog"
|
||||
idx = content.find(marker)
|
||||
|
||||
if idx == -1:
|
||||
# No %changelog section yet: append one at the end.
|
||||
new_content = content.rstrip() + "\n\n%changelog\n" + stanza
|
||||
else:
|
||||
# Insert stanza right after the %changelog line.
|
||||
before = content[: idx + len(marker)]
|
||||
after = content[idx + len(marker) :]
|
||||
new_content = before + "\n" + stanza + after.lstrip("\n")
|
||||
|
||||
if preview:
|
||||
print(
|
||||
f"[INFO] Closing branch {branch} after successful release "
|
||||
"(close=True and branch is not main/master)..."
|
||||
)
|
||||
try:
|
||||
close_branch(name=branch, base_branch="main", cwd=".")
|
||||
except Exception as exc: # pragma: no cover
|
||||
print(f"[WARN] Failed to close branch {branch} automatically: {exc}")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Public release entry point
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def release(
|
||||
pyproject_path: str = "pyproject.toml",
|
||||
changelog_path: str = "CHANGELOG.md",
|
||||
release_type: str = "patch",
|
||||
message: Optional[str] = None,
|
||||
preview: bool = False,
|
||||
force: bool = False,
|
||||
close: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
High-level release entry point.
|
||||
|
||||
Modes:
|
||||
|
||||
- preview=True:
|
||||
* Single-phase PREVIEW only.
|
||||
|
||||
- preview=False, force=True:
|
||||
* Single-phase REAL release, no interactive preview.
|
||||
|
||||
- preview=False, force=False:
|
||||
* Two-phase flow (intended default for interactive CLI use).
|
||||
"""
|
||||
if preview:
|
||||
_release_impl(
|
||||
pyproject_path=pyproject_path,
|
||||
changelog_path=changelog_path,
|
||||
release_type=release_type,
|
||||
message=message,
|
||||
preview=True,
|
||||
close=close,
|
||||
"[PREVIEW] Would update RPM %changelog section with the following "
|
||||
"stanza:\n"
|
||||
f"{stanza}"
|
||||
)
|
||||
return
|
||||
|
||||
if force:
|
||||
_release_impl(
|
||||
pyproject_path=pyproject_path,
|
||||
changelog_path=changelog_path,
|
||||
release_type=release_type,
|
||||
message=message,
|
||||
preview=False,
|
||||
close=close,
|
||||
)
|
||||
return
|
||||
|
||||
if not sys.stdin.isatty():
|
||||
_release_impl(
|
||||
pyproject_path=pyproject_path,
|
||||
changelog_path=changelog_path,
|
||||
release_type=release_type,
|
||||
message=message,
|
||||
preview=False,
|
||||
close=close,
|
||||
)
|
||||
return
|
||||
|
||||
print("[INFO] Running preview before actual release...\n")
|
||||
_release_impl(
|
||||
pyproject_path=pyproject_path,
|
||||
changelog_path=changelog_path,
|
||||
release_type=release_type,
|
||||
message=message,
|
||||
preview=True,
|
||||
close=close,
|
||||
)
|
||||
|
||||
try:
|
||||
answer = input("Proceed with the actual release? [y/N]: ").strip().lower()
|
||||
except (EOFError, KeyboardInterrupt):
|
||||
print("\n[INFO] Release aborted (no confirmation).")
|
||||
with open(spec_path, "w", encoding="utf-8") as f:
|
||||
f.write(new_content)
|
||||
except Exception as exc:
|
||||
print(f"[WARN] Failed to write updated spec changelog section: {exc}")
|
||||
return
|
||||
|
||||
if answer not in ("y", "yes"):
|
||||
print("Release aborted by user. No changes were made.")
|
||||
return
|
||||
|
||||
print("\n[INFO] Running REAL release...\n")
|
||||
_release_impl(
|
||||
pyproject_path=pyproject_path,
|
||||
changelog_path=changelog_path,
|
||||
release_type=release_type,
|
||||
message=message,
|
||||
preview=False,
|
||||
close=close,
|
||||
print(
|
||||
f"Updated RPM %changelog section in {os.path.basename(spec_path)} "
|
||||
f"for {package_name} {debian_version}"
|
||||
)
|
||||
95
pkgmgr/actions/release/git_ops.py
Normal file
95
pkgmgr/actions/release/git_ops.py
Normal file
@@ -0,0 +1,95 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Git-related helpers for the release workflow.
|
||||
|
||||
Responsibilities:
|
||||
- Run Git (or shell) commands with basic error reporting.
|
||||
- Ensure main/master are synchronized with origin before tagging.
|
||||
- Maintain the floating 'latest' tag that always points to the newest
|
||||
release tag.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import subprocess
|
||||
|
||||
from pkgmgr.core.git import GitError
|
||||
|
||||
|
||||
def run_git_command(cmd: str) -> None:
|
||||
"""
|
||||
Run a Git (or shell) command with basic error reporting.
|
||||
|
||||
The command is executed via the shell, primarily for readability
|
||||
when printed (as in 'git commit -am "msg"').
|
||||
"""
|
||||
print(f"[GIT] {cmd}")
|
||||
try:
|
||||
subprocess.run(cmd, shell=True, check=True)
|
||||
except subprocess.CalledProcessError as exc:
|
||||
print(f"[ERROR] Git command failed: {cmd}")
|
||||
print(f" Exit code: {exc.returncode}")
|
||||
if exc.stdout:
|
||||
print("--- stdout ---")
|
||||
print(exc.stdout)
|
||||
if exc.stderr:
|
||||
print("--- stderr ---")
|
||||
print(exc.stderr)
|
||||
raise GitError(f"Git command failed: {cmd}") from exc
|
||||
|
||||
|
||||
def sync_branch_with_remote(branch: str, preview: bool = False) -> None:
|
||||
"""
|
||||
Ensure the local main/master branch is up-to-date before tagging.
|
||||
|
||||
Behaviour:
|
||||
- For main/master: run 'git fetch origin' and 'git pull origin <branch>'.
|
||||
- For all other branches: only log that no automatic sync is performed.
|
||||
"""
|
||||
if branch not in ("main", "master"):
|
||||
print(
|
||||
f"[INFO] Skipping automatic git pull for non-main/master branch "
|
||||
f"{branch}."
|
||||
)
|
||||
return
|
||||
|
||||
print(
|
||||
f"[INFO] Updating branch {branch} from origin before creating tags..."
|
||||
)
|
||||
|
||||
if preview:
|
||||
print("[PREVIEW] Would run: git fetch origin")
|
||||
print(f"[PREVIEW] Would run: git pull origin {branch}")
|
||||
return
|
||||
|
||||
run_git_command("git fetch origin")
|
||||
run_git_command(f"git pull origin {branch}")
|
||||
|
||||
|
||||
def update_latest_tag(new_tag: str, preview: bool = False) -> None:
|
||||
"""
|
||||
Move the floating 'latest' tag to the newly created release tag.
|
||||
|
||||
Implementation details:
|
||||
- We explicitly dereference the tag object via `<tag>^{}` so that
|
||||
'latest' always points at the underlying commit, not at another tag.
|
||||
- We create/update 'latest' as an annotated tag with a short message so
|
||||
Git configurations that enforce annotated/signed tags do not fail
|
||||
with "no tag message".
|
||||
"""
|
||||
target_ref = f"{new_tag}^{{}}"
|
||||
print(f"[INFO] Updating 'latest' tag to point at {new_tag} (commit {target_ref})...")
|
||||
|
||||
if preview:
|
||||
print(f"[PREVIEW] Would run: git tag -f -a latest {target_ref} "
|
||||
f'-m "Floating latest tag for {new_tag}"')
|
||||
print("[PREVIEW] Would run: git push origin latest --force")
|
||||
return
|
||||
|
||||
run_git_command(
|
||||
f'git tag -f -a latest {target_ref} '
|
||||
f'-m "Floating latest tag for {new_tag}"'
|
||||
)
|
||||
run_git_command("git push origin latest --force")
|
||||
53
pkgmgr/actions/release/versioning.py
Normal file
53
pkgmgr/actions/release/versioning.py
Normal file
@@ -0,0 +1,53 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Version discovery and bumping helpers for the release workflow.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pkgmgr.core.git import get_tags
|
||||
from pkgmgr.core.version.semver import (
|
||||
SemVer,
|
||||
find_latest_version,
|
||||
bump_major,
|
||||
bump_minor,
|
||||
bump_patch,
|
||||
)
|
||||
|
||||
|
||||
def determine_current_version() -> SemVer:
|
||||
"""
|
||||
Determine the current semantic version from Git tags.
|
||||
|
||||
Behaviour:
|
||||
- If there are no tags or no SemVer-compatible tags, return 0.0.0.
|
||||
- Otherwise, use the latest SemVer tag as current version.
|
||||
"""
|
||||
tags = get_tags()
|
||||
if not tags:
|
||||
return SemVer(0, 0, 0)
|
||||
|
||||
latest = find_latest_version(tags)
|
||||
if latest is None:
|
||||
return SemVer(0, 0, 0)
|
||||
|
||||
_tag, ver = latest
|
||||
return ver
|
||||
|
||||
|
||||
def bump_semver(current: SemVer, release_type: str) -> SemVer:
|
||||
"""
|
||||
Bump the given SemVer according to the release type.
|
||||
|
||||
release_type must be one of: "major", "minor", "patch".
|
||||
"""
|
||||
if release_type == "major":
|
||||
return bump_major(current)
|
||||
if release_type == "minor":
|
||||
return bump_minor(current)
|
||||
if release_type == "patch":
|
||||
return bump_patch(current)
|
||||
|
||||
raise ValueError(f"Unknown release type: {release_type!r}")
|
||||
@@ -1,8 +1,8 @@
|
||||
import subprocess
|
||||
import os
|
||||
from pkgmgr.get_repo_dir import get_repo_dir
|
||||
from pkgmgr.get_repo_identifier import get_repo_identifier
|
||||
from pkgmgr.verify import verify_repository
|
||||
from pkgmgr.core.repository.dir import get_repo_dir
|
||||
from pkgmgr.core.repository.identifier import get_repo_identifier
|
||||
from pkgmgr.core.repository.verify import verify_repository
|
||||
|
||||
def clone_repos(
|
||||
selected_repos,
|
||||
@@ -2,8 +2,8 @@ import os
|
||||
import subprocess
|
||||
import sys
|
||||
import yaml
|
||||
from pkgmgr.generate_alias import generate_alias
|
||||
from pkgmgr.save_user_config import save_user_config
|
||||
from pkgmgr.core.command.alias import generate_alias
|
||||
from pkgmgr.core.config.save import save_user_config
|
||||
|
||||
def create_repo(identifier, config_merged, user_config_path, bin_dir, remote=False, preview=False):
|
||||
"""
|
||||
@@ -1,7 +1,7 @@
|
||||
import os
|
||||
import sys
|
||||
from pkgmgr.get_repo_identifier import get_repo_identifier
|
||||
from pkgmgr.get_repo_dir import get_repo_dir
|
||||
from pkgmgr.core.repository.identifier import get_repo_identifier
|
||||
from pkgmgr.core.repository.dir import get_repo_dir
|
||||
|
||||
def deinstall_repos(selected_repos, repositories_base_dir, bin_dir, all_repos, preview=False):
|
||||
for repo in selected_repos:
|
||||
@@ -1,7 +1,7 @@
|
||||
import shutil
|
||||
import os
|
||||
from pkgmgr.get_repo_identifier import get_repo_identifier
|
||||
from pkgmgr.get_repo_dir import get_repo_dir
|
||||
from pkgmgr.core.repository.identifier import get_repo_identifier
|
||||
from pkgmgr.core.repository.dir import get_repo_dir
|
||||
|
||||
def delete_repos(selected_repos, repositories_base_dir, all_repos, preview=False):
|
||||
for repo in selected_repos:
|
||||
77
pkgmgr/actions/repository/pull.py
Normal file
77
pkgmgr/actions/repository/pull.py
Normal file
@@ -0,0 +1,77 @@
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from pkgmgr.core.repository.identifier import get_repo_identifier
|
||||
from pkgmgr.core.repository.dir import get_repo_dir
|
||||
from pkgmgr.core.repository.verify import verify_repository
|
||||
|
||||
|
||||
def pull_with_verification(
|
||||
selected_repos,
|
||||
repositories_base_dir,
|
||||
all_repos,
|
||||
extra_args,
|
||||
no_verification,
|
||||
preview: bool,
|
||||
) -> None:
|
||||
"""
|
||||
Execute `git pull` for each repository with verification.
|
||||
|
||||
- Uses verify_repository() in "pull" mode.
|
||||
- If verification fails (and verification info is set) and
|
||||
--no-verification is not enabled, the user is prompted to confirm
|
||||
the pull.
|
||||
- In preview mode, no interactive prompts are performed and no
|
||||
Git commands are executed; only the would-be command is printed.
|
||||
"""
|
||||
for repo in selected_repos:
|
||||
repo_identifier = get_repo_identifier(repo, all_repos)
|
||||
repo_dir = get_repo_dir(repositories_base_dir, repo)
|
||||
|
||||
if not os.path.exists(repo_dir):
|
||||
print(f"Repository directory '{repo_dir}' not found for {repo_identifier}.")
|
||||
continue
|
||||
|
||||
verified_info = repo.get("verified")
|
||||
verified_ok, errors, commit_hash, signing_key = verify_repository(
|
||||
repo,
|
||||
repo_dir,
|
||||
mode="pull",
|
||||
no_verification=no_verification,
|
||||
)
|
||||
|
||||
# Only prompt the user if:
|
||||
# - we are NOT in preview mode
|
||||
# - verification is enabled
|
||||
# - the repo has verification info configured
|
||||
# - verification failed
|
||||
if (
|
||||
not preview
|
||||
and not no_verification
|
||||
and verified_info
|
||||
and not verified_ok
|
||||
):
|
||||
print(f"Warning: Verification failed for {repo_identifier}:")
|
||||
for err in errors:
|
||||
print(f" - {err}")
|
||||
choice = input("Proceed with 'git pull'? (y/N): ").strip().lower()
|
||||
if choice != "y":
|
||||
continue
|
||||
|
||||
# Build the git pull command (include extra args if present)
|
||||
args_part = " ".join(extra_args) if extra_args else ""
|
||||
full_cmd = f"git pull{(' ' + args_part) if args_part else ''}"
|
||||
|
||||
if preview:
|
||||
# Preview mode: only show the command, do not execute or prompt.
|
||||
print(f"[Preview] In '{repo_dir}': {full_cmd}")
|
||||
else:
|
||||
print(f"Running in '{repo_dir}': {full_cmd}")
|
||||
result = subprocess.run(full_cmd, cwd=repo_dir, shell=True)
|
||||
if result.returncode != 0:
|
||||
print(
|
||||
f"'git pull' for {repo_identifier} failed "
|
||||
f"with exit code {result.returncode}."
|
||||
)
|
||||
sys.exit(result.returncode)
|
||||
@@ -1,9 +1,9 @@
|
||||
import sys
|
||||
import shutil
|
||||
|
||||
from .exec_proxy_command import exec_proxy_command
|
||||
from .run_command import run_command
|
||||
from .get_repo_identifier import get_repo_identifier
|
||||
from pkgmgr.actions.proxy import exec_proxy_command
|
||||
from pkgmgr.core.command.run import run_command
|
||||
from pkgmgr.core.repository.identifier import get_repo_identifier
|
||||
|
||||
|
||||
def status_repos(
|
||||
@@ -1,8 +1,8 @@
|
||||
import sys
|
||||
import shutil
|
||||
|
||||
from pkgmgr.pull_with_verification import pull_with_verification
|
||||
from pkgmgr.install_repos import install_repos
|
||||
from pkgmgr.actions.repository.pull import pull_with_verification
|
||||
from pkgmgr.actions.install import install_repos
|
||||
|
||||
|
||||
def update_repos(
|
||||
@@ -54,7 +54,7 @@ def update_repos(
|
||||
)
|
||||
|
||||
if system_update:
|
||||
from pkgmgr.run_command import run_command
|
||||
from pkgmgr.core.command.run import run_command
|
||||
|
||||
# Nix: upgrade all profile entries (if Nix is available)
|
||||
if shutil.which("nix") is not None:
|
||||
14
pkgmgr/cli.py → pkgmgr/cli/__init__.py
Executable file → Normal file
14
pkgmgr/cli.py → pkgmgr/cli/__init__.py
Executable file → Normal file
@@ -1,13 +1,16 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
from pkgmgr.load_config import load_config
|
||||
from pkgmgr.cli_core import CLIContext, create_parser, dispatch_command
|
||||
from pkgmgr.core.config.load import load_config
|
||||
|
||||
from .context import CLIContext
|
||||
from .parser import create_parser
|
||||
from .dispatch import dispatch_command
|
||||
|
||||
__all__ = ["CLIContext", "create_parser", "dispatch_command", "main"]
|
||||
|
||||
|
||||
# User config lives in the home directory:
|
||||
# ~/.config/pkgmgr/config.yaml
|
||||
@@ -31,7 +34,6 @@ dependency formats, including:
|
||||
• \033[1;33mNix:\033[0m flake.nix
|
||||
• \033[1;33mArch Linux:\033[0m PKGBUILD
|
||||
• \033[1;33mAnsible:\033[0m requirements.yml
|
||||
• \033[1;33mpkgmgr-native:\033[0m pkgmgr.yml
|
||||
|
||||
This allows pkgmgr to perform installation, updates, verification, dependency
|
||||
resolution, and synchronization across complex multi-repo environments — with a
|
||||
@@ -1,10 +1,9 @@
|
||||
# pkgmgr/cli_core/commands/branch.py
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
|
||||
from pkgmgr.cli_core.context import CLIContext
|
||||
from pkgmgr.branch_commands import open_branch, close_branch
|
||||
from pkgmgr.cli.context import CLIContext
|
||||
from pkgmgr.actions.branch import open_branch, close_branch
|
||||
|
||||
|
||||
def handle_branch(args, ctx: CLIContext) -> None:
|
||||
@@ -4,12 +4,12 @@ import os
|
||||
import sys
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
|
||||
from pkgmgr.cli_core.context import CLIContext
|
||||
from pkgmgr.get_repo_dir import get_repo_dir
|
||||
from pkgmgr.get_repo_identifier import get_repo_identifier
|
||||
from pkgmgr.git_utils import get_tags
|
||||
from pkgmgr.versioning import SemVer, extract_semver_from_tags
|
||||
from pkgmgr.changelog import generate_changelog
|
||||
from pkgmgr.cli.context import CLIContext
|
||||
from pkgmgr.core.repository.dir import get_repo_dir
|
||||
from pkgmgr.core.repository.identifier import get_repo_identifier
|
||||
from pkgmgr.core.git import get_tags
|
||||
from pkgmgr.core.version.semver import SemVer, extract_semver_from_tags
|
||||
from pkgmgr.actions.changelog import generate_changelog
|
||||
|
||||
|
||||
Repository = Dict[str, Any]
|
||||
@@ -11,13 +11,13 @@ from typing import Any, Dict
|
||||
|
||||
import yaml
|
||||
|
||||
from pkgmgr.cli_core.context import CLIContext
|
||||
from pkgmgr.config_init import config_init
|
||||
from pkgmgr.interactive_add import interactive_add
|
||||
from pkgmgr.resolve_repos import resolve_repos
|
||||
from pkgmgr.save_user_config import save_user_config
|
||||
from pkgmgr.show_config import show_config
|
||||
from pkgmgr.run_command import run_command
|
||||
from pkgmgr.cli.context import CLIContext
|
||||
from pkgmgr.actions.config.init import config_init
|
||||
from pkgmgr.actions.config.add import interactive_add
|
||||
from pkgmgr.core.repository.resolve import resolve_repos
|
||||
from pkgmgr.core.config.save import save_user_config
|
||||
from pkgmgr.actions.config.show import show_config
|
||||
from pkgmgr.core.command.run import run_command
|
||||
|
||||
|
||||
def _load_user_config(user_config_path: str) -> Dict[str, Any]:
|
||||
@@ -3,8 +3,8 @@ from __future__ import annotations
|
||||
import sys
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from pkgmgr.cli_core.context import CLIContext
|
||||
from pkgmgr.exec_proxy_command import exec_proxy_command
|
||||
from pkgmgr.cli.context import CLIContext
|
||||
from pkgmgr.actions.proxy import exec_proxy_command
|
||||
|
||||
|
||||
Repository = Dict[str, Any]
|
||||
@@ -1,4 +1,3 @@
|
||||
# pkgmgr/cli_core/commands/release.py
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
@@ -6,14 +5,14 @@
|
||||
Release command wiring for the pkgmgr CLI.
|
||||
|
||||
This module implements the `pkgmgr release` subcommand on top of the
|
||||
generic selection logic from cli_core.dispatch. It does not define its
|
||||
own subparser; the CLI surface is configured in cli_core.parser.
|
||||
generic selection logic from cli.dispatch. It does not define its
|
||||
own subparser; the CLI surface is configured in cli.parser.
|
||||
|
||||
Responsibilities:
|
||||
- Take the parsed argparse.Namespace for the `release` command.
|
||||
- Use the list of selected repositories provided by dispatch_command().
|
||||
- Optionally list affected repositories when --list is set.
|
||||
- For each selected repository, run pkgmgr.release.release(...) in
|
||||
- For each selected repository, run pkgmgr.actions.release.release(...) in
|
||||
the context of that repository directory.
|
||||
"""
|
||||
|
||||
@@ -22,10 +21,10 @@ from __future__ import annotations
|
||||
import os
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from pkgmgr.cli_core.context import CLIContext
|
||||
from pkgmgr.get_repo_dir import get_repo_dir
|
||||
from pkgmgr.get_repo_identifier import get_repo_identifier
|
||||
from pkgmgr.release import release as run_release
|
||||
from pkgmgr.cli.context import CLIContext
|
||||
from pkgmgr.core.repository.dir import get_repo_dir
|
||||
from pkgmgr.core.repository.identifier import get_repo_identifier
|
||||
from pkgmgr.actions.release import release as run_release
|
||||
|
||||
|
||||
Repository = Dict[str, Any]
|
||||
@@ -46,7 +45,7 @@ def handle_release(
|
||||
3) For each selected repository:
|
||||
- Resolve its identifier and local directory.
|
||||
- Change into that directory.
|
||||
- Call pkgmgr.release.release(...) with the parsed options.
|
||||
- Call pkgmgr.actions.release.release(...) with the parsed options.
|
||||
"""
|
||||
if not selected:
|
||||
print("[pkgmgr] No repositories selected for release.")
|
||||
@@ -6,20 +6,46 @@ from __future__ import annotations
|
||||
import sys
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from pkgmgr.cli_core.context import CLIContext
|
||||
from pkgmgr.install_repos import install_repos
|
||||
from pkgmgr.deinstall_repos import deinstall_repos
|
||||
from pkgmgr.delete_repos import delete_repos
|
||||
from pkgmgr.update_repos import update_repos
|
||||
from pkgmgr.status_repos import status_repos
|
||||
from pkgmgr.list_repositories import list_repositories
|
||||
from pkgmgr.run_command import run_command
|
||||
from pkgmgr.create_repo import create_repo
|
||||
from pkgmgr.get_selected_repos import get_selected_repos
|
||||
from pkgmgr.cli.context import CLIContext
|
||||
from pkgmgr.actions.install import install_repos
|
||||
from pkgmgr.actions.repository.deinstall import deinstall_repos
|
||||
from pkgmgr.actions.repository.delete import delete_repos
|
||||
from pkgmgr.actions.repository.update import update_repos
|
||||
from pkgmgr.actions.repository.status import status_repos
|
||||
from pkgmgr.actions.repository.list import list_repositories
|
||||
from pkgmgr.core.command.run import run_command
|
||||
from pkgmgr.actions.repository.create import create_repo
|
||||
from pkgmgr.core.repository.selected import get_selected_repos
|
||||
from pkgmgr.core.repository.dir import get_repo_dir
|
||||
|
||||
Repository = Dict[str, Any]
|
||||
|
||||
|
||||
def _resolve_repository_directory(repository: Repository, ctx: CLIContext) -> str:
|
||||
"""
|
||||
Resolve the local filesystem directory for a repository.
|
||||
|
||||
Priority:
|
||||
1. Use repository["directory"] if present.
|
||||
2. Fallback to get_repo_dir(...) using the repositories base directory
|
||||
from the CLI context.
|
||||
"""
|
||||
repo_dir = repository.get("directory")
|
||||
if repo_dir:
|
||||
return repo_dir
|
||||
|
||||
base_dir = (
|
||||
getattr(ctx, "repositories_base_dir", None)
|
||||
or getattr(ctx, "repositories_dir", None)
|
||||
)
|
||||
if not base_dir:
|
||||
raise RuntimeError(
|
||||
"Cannot resolve repositories base directory from context; "
|
||||
"expected ctx.repositories_base_dir or ctx.repositories_dir."
|
||||
)
|
||||
return get_repo_dir(base_dir, repository)
|
||||
|
||||
|
||||
def handle_repos_command(
|
||||
args,
|
||||
ctx: CLIContext,
|
||||
@@ -108,8 +134,25 @@ def handle_repos_command(
|
||||
# path
|
||||
# ------------------------------------------------------------
|
||||
if args.command == "path":
|
||||
if not selected:
|
||||
print("[pkgmgr] No repositories selected for path.")
|
||||
return
|
||||
|
||||
for repository in selected:
|
||||
print(repository["directory"])
|
||||
try:
|
||||
repo_dir = _resolve_repository_directory(repository, ctx)
|
||||
except Exception as exc:
|
||||
ident = (
|
||||
f"{repository.get('provider', '?')}/"
|
||||
f"{repository.get('account', '?')}/"
|
||||
f"{repository.get('repository', '?')}"
|
||||
)
|
||||
print(
|
||||
f"[WARN] Could not resolve directory for {ident}: {exc}"
|
||||
)
|
||||
continue
|
||||
|
||||
print(repo_dir)
|
||||
return
|
||||
|
||||
# ------------------------------------------------------------
|
||||
@@ -119,14 +162,14 @@ def handle_repos_command(
|
||||
if not args.shell_command:
|
||||
print("[ERROR] 'shell' requires a command via -c/--command.")
|
||||
sys.exit(2)
|
||||
|
||||
command_to_run = " ".join(args.shell_command)
|
||||
for repository in selected:
|
||||
print(
|
||||
f"Executing in '{repository['directory']}': {command_to_run}"
|
||||
)
|
||||
repo_dir = _resolve_repository_directory(repository, ctx)
|
||||
print(f"Executing in '{repo_dir}': {command_to_run}")
|
||||
run_command(
|
||||
command_to_run,
|
||||
cwd=repository["directory"],
|
||||
cwd=repo_dir,
|
||||
preview=args.preview,
|
||||
)
|
||||
return
|
||||
115
pkgmgr/cli/commands/tools.py
Normal file
115
pkgmgr/cli/commands/tools.py
Normal file
@@ -0,0 +1,115 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from pkgmgr .cli .context import CLIContext
|
||||
from pkgmgr .core .command .run import run_command
|
||||
from pkgmgr .core .repository .identifier import get_repo_identifier
|
||||
from pkgmgr .core .repository .dir import get_repo_dir
|
||||
|
||||
|
||||
Repository = Dict[str, Any]
|
||||
|
||||
|
||||
def _resolve_repository_path(repository: Repository, ctx: CLIContext) -> str:
|
||||
"""
|
||||
Resolve the filesystem path for a repository.
|
||||
|
||||
Priority:
|
||||
1. Use explicit keys if present (directory / path / workspace / workspace_dir).
|
||||
2. Fallback to get_repo_dir(...) using the repositories base directory
|
||||
from the CLI context.
|
||||
"""
|
||||
|
||||
# 1) Explicit path-like keys on the repository object
|
||||
for key in ("directory", "path", "workspace", "workspace_dir"):
|
||||
value = repository.get(key)
|
||||
if value:
|
||||
return value
|
||||
|
||||
# 2) Fallback: compute from base dir + repository metadata
|
||||
base_dir = (
|
||||
getattr(ctx, "repositories_base_dir", None)
|
||||
or getattr(ctx, "repositories_dir", None)
|
||||
)
|
||||
if not base_dir:
|
||||
raise RuntimeError(
|
||||
"Cannot resolve repositories base directory from context; "
|
||||
"expected ctx.repositories_base_dir or ctx.repositories_dir."
|
||||
)
|
||||
|
||||
return get_repo_dir(base_dir, repository)
|
||||
|
||||
|
||||
def handle_tools_command(
|
||||
args,
|
||||
ctx: CLIContext,
|
||||
selected: List[Repository],
|
||||
) -> None:
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# nautilus "explore" command
|
||||
# ------------------------------------------------------------------
|
||||
if args.command == "explore":
|
||||
for repository in selected:
|
||||
repo_path = _resolve_repository_path(repository, ctx)
|
||||
run_command(
|
||||
f'nautilus "{repo_path}" & disown'
|
||||
)
|
||||
return
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# GNOME terminal command
|
||||
# ------------------------------------------------------------------
|
||||
if args.command == "terminal":
|
||||
for repository in selected:
|
||||
repo_path = _resolve_repository_path(repository, ctx)
|
||||
run_command(
|
||||
f'gnome-terminal --tab --working-directory="{repo_path}"'
|
||||
)
|
||||
return
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# VS Code workspace command
|
||||
# ------------------------------------------------------------------
|
||||
if args.command == "code":
|
||||
if not selected:
|
||||
print("No repositories selected.")
|
||||
return
|
||||
|
||||
identifiers = [
|
||||
get_repo_identifier(repo, ctx.all_repositories)
|
||||
for repo in selected
|
||||
]
|
||||
sorted_identifiers = sorted(identifiers)
|
||||
workspace_name = "_".join(sorted_identifiers) + ".code-workspace"
|
||||
|
||||
directories_cfg = ctx.config_merged.get("directories") or {}
|
||||
workspaces_dir = os.path.expanduser(
|
||||
directories_cfg.get("workspaces", "~/Workspaces")
|
||||
)
|
||||
os.makedirs(workspaces_dir, exist_ok=True)
|
||||
workspace_file = os.path.join(workspaces_dir, workspace_name)
|
||||
|
||||
folders = [
|
||||
{"path": _resolve_repository_path(repository, ctx)}
|
||||
for repository in selected
|
||||
]
|
||||
|
||||
workspace_data = {
|
||||
"folders": folders,
|
||||
"settings": {},
|
||||
}
|
||||
|
||||
if not os.path.exists(workspace_file):
|
||||
with open(workspace_file, "w", encoding="utf-8") as f:
|
||||
json.dump(workspace_data, f, indent=4)
|
||||
print(f"Created workspace file: {workspace_file}")
|
||||
else:
|
||||
print(f"Using existing workspace file: {workspace_file}")
|
||||
|
||||
run_command(f'code "{workspace_file}"')
|
||||
return
|
||||
@@ -4,12 +4,12 @@ import os
|
||||
import sys
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
|
||||
from pkgmgr.cli_core.context import CLIContext
|
||||
from pkgmgr.get_repo_dir import get_repo_dir
|
||||
from pkgmgr.get_repo_identifier import get_repo_identifier
|
||||
from pkgmgr.git_utils import get_tags
|
||||
from pkgmgr.versioning import SemVer, find_latest_version
|
||||
from pkgmgr.version_sources import (
|
||||
from pkgmgr.cli.context import CLIContext
|
||||
from pkgmgr.core.repository.dir import get_repo_dir
|
||||
from pkgmgr.core.repository.identifier import get_repo_identifier
|
||||
from pkgmgr.core.git import get_tags
|
||||
from pkgmgr.core.version.semver import SemVer, find_latest_version
|
||||
from pkgmgr.core.version.source import (
|
||||
read_pyproject_version,
|
||||
read_flake_version,
|
||||
read_pkgbuild_version,
|
||||
@@ -7,12 +7,12 @@ import os
|
||||
import sys
|
||||
from typing import List, Dict, Any
|
||||
|
||||
from pkgmgr.cli_core.context import CLIContext
|
||||
from pkgmgr.cli_core.proxy import maybe_handle_proxy
|
||||
from pkgmgr.get_selected_repos import get_selected_repos
|
||||
from pkgmgr.get_repo_dir import get_repo_dir
|
||||
from pkgmgr.cli.context import CLIContext
|
||||
from pkgmgr.cli.proxy import maybe_handle_proxy
|
||||
from pkgmgr.core.repository.selected import get_selected_repos
|
||||
from pkgmgr.core.repository.dir import get_repo_dir
|
||||
|
||||
from pkgmgr.cli_core.commands import (
|
||||
from pkgmgr.cli.commands import (
|
||||
handle_repos_command,
|
||||
handle_tools_command,
|
||||
handle_release,
|
||||
@@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
|
||||
from pkgmgr.cli_core.proxy import register_proxy_commands
|
||||
from pkgmgr.cli.proxy import register_proxy_commands
|
||||
|
||||
|
||||
class SortedSubParsersAction(argparse._SubParsersAction):
|
||||
@@ -8,12 +8,12 @@ import os
|
||||
import sys
|
||||
from typing import Dict, List, Any
|
||||
|
||||
from pkgmgr.cli_core.context import CLIContext
|
||||
from pkgmgr.clone_repos import clone_repos
|
||||
from pkgmgr.exec_proxy_command import exec_proxy_command
|
||||
from pkgmgr.pull_with_verification import pull_with_verification
|
||||
from pkgmgr.get_selected_repos import get_selected_repos
|
||||
from pkgmgr.get_repo_dir import get_repo_dir
|
||||
from pkgmgr.cli.context import CLIContext
|
||||
from pkgmgr.actions.repository.clone import clone_repos
|
||||
from pkgmgr.actions.proxy import exec_proxy_command
|
||||
from pkgmgr.actions.repository.pull import pull_with_verification
|
||||
from pkgmgr.core.repository.selected import get_selected_repos
|
||||
from pkgmgr.core.repository.dir import get_repo_dir
|
||||
|
||||
|
||||
PROXY_COMMANDS: Dict[str, List[str]] = {
|
||||
@@ -1,5 +0,0 @@
|
||||
from .context import CLIContext
|
||||
from .parser import create_parser
|
||||
from .dispatch import dispatch_command
|
||||
|
||||
__all__ = ["CLIContext", "create_parser", "dispatch_command"]
|
||||
@@ -1,83 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from pkgmgr.cli_core.context import CLIContext
|
||||
from pkgmgr.run_command import run_command
|
||||
from pkgmgr.get_repo_identifier import get_repo_identifier
|
||||
|
||||
|
||||
Repository = Dict[str, Any]
|
||||
|
||||
|
||||
def handle_tools_command(
|
||||
args,
|
||||
ctx: CLIContext,
|
||||
selected: List[Repository],
|
||||
) -> None:
|
||||
"""
|
||||
Handle integration commands:
|
||||
- explore (file manager)
|
||||
- terminal (GNOME Terminal)
|
||||
- code (VS Code workspace)
|
||||
"""
|
||||
|
||||
# --------------------------------------------------------
|
||||
# explore
|
||||
# --------------------------------------------------------
|
||||
if args.command == "explore":
|
||||
for repository in selected:
|
||||
run_command(
|
||||
f"nautilus {repository['directory']} & disown"
|
||||
)
|
||||
return
|
||||
|
||||
# --------------------------------------------------------
|
||||
# terminal
|
||||
# --------------------------------------------------------
|
||||
if args.command == "terminal":
|
||||
for repository in selected:
|
||||
run_command(
|
||||
f'gnome-terminal --tab --working-directory="{repository["directory"]}"'
|
||||
)
|
||||
return
|
||||
|
||||
# --------------------------------------------------------
|
||||
# code
|
||||
# --------------------------------------------------------
|
||||
if args.command == "code":
|
||||
if not selected:
|
||||
print("No repositories selected.")
|
||||
return
|
||||
|
||||
identifiers = [
|
||||
get_repo_identifier(repo, ctx.all_repositories)
|
||||
for repo in selected
|
||||
]
|
||||
sorted_identifiers = sorted(identifiers)
|
||||
workspace_name = "_".join(sorted_identifiers) + ".code-workspace"
|
||||
|
||||
workspaces_dir = os.path.expanduser(
|
||||
ctx.config_merged.get("directories").get("workspaces")
|
||||
)
|
||||
os.makedirs(workspaces_dir, exist_ok=True)
|
||||
workspace_file = os.path.join(workspaces_dir, workspace_name)
|
||||
|
||||
folders = [{"path": repository["directory"]} for repository in selected]
|
||||
|
||||
workspace_data = {
|
||||
"folders": folders,
|
||||
"settings": {},
|
||||
}
|
||||
if not os.path.exists(workspace_file):
|
||||
with open(workspace_file, "w") as f:
|
||||
json.dump(workspace_data, f, indent=4)
|
||||
print(f"Created workspace file: {workspace_file}")
|
||||
else:
|
||||
print(f"Using existing workspace file: {workspace_file}")
|
||||
|
||||
run_command(f'code "{workspace_file}"')
|
||||
return
|
||||
@@ -2,12 +2,18 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
from pkgmgr.get_repo_identifier import get_repo_identifier
|
||||
from pkgmgr.get_repo_dir import get_repo_dir
|
||||
from pkgmgr.core.repository.identifier import get_repo_identifier
|
||||
from pkgmgr.core.repository.dir import get_repo_dir
|
||||
|
||||
|
||||
def create_ink(repo, repositories_base_dir, bin_dir, all_repos,
|
||||
quiet=False, preview=False):
|
||||
def create_ink(
|
||||
repo,
|
||||
repositories_base_dir,
|
||||
bin_dir,
|
||||
all_repos,
|
||||
quiet: bool = False,
|
||||
preview: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
Create a symlink for the repository's command.
|
||||
|
||||
@@ -18,6 +24,11 @@ def create_ink(repo, repositories_base_dir, bin_dir, all_repos,
|
||||
Behavior:
|
||||
- If repo["command"] is defined → create a symlink to it.
|
||||
- If repo["command"] is missing or None → do NOT create a link.
|
||||
|
||||
Safety:
|
||||
- If the resolved command path is identical to the final link target,
|
||||
we skip symlink creation to avoid self-referential symlinks that
|
||||
would break shell resolution ("too many levels of symbolic links").
|
||||
"""
|
||||
|
||||
repo_identifier = get_repo_identifier(repo, all_repos)
|
||||
@@ -31,6 +42,27 @@ def create_ink(repo, repositories_base_dir, bin_dir, all_repos,
|
||||
|
||||
link_path = os.path.join(bin_dir, repo_identifier)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Safety guard: avoid self-referential symlinks
|
||||
#
|
||||
# Example of a broken situation we must avoid:
|
||||
# - command = ~/.local/bin/package-manager
|
||||
# - link_path = ~/.local/bin/package-manager
|
||||
# - create_ink() removes the real binary and creates a symlink
|
||||
# pointing to itself → zsh: too many levels of symbolic links
|
||||
#
|
||||
# If the resolved command already lives exactly at the target path,
|
||||
# we treat it as "already installed" and skip any modification.
|
||||
# ------------------------------------------------------------------
|
||||
if os.path.abspath(command) == os.path.abspath(link_path):
|
||||
if not quiet:
|
||||
print(
|
||||
f"[pkgmgr] Command for '{repo_identifier}' already lives at "
|
||||
f"'{link_path}'. Skipping symlink creation to avoid a "
|
||||
"self-referential link."
|
||||
)
|
||||
return
|
||||
|
||||
if preview:
|
||||
print(f"[Preview] Would link {link_path} → {command}")
|
||||
return
|
||||
@@ -65,7 +97,10 @@ def create_ink(repo, repositories_base_dir, bin_dir, all_repos,
|
||||
|
||||
if alias_name == repo_identifier:
|
||||
if not quiet:
|
||||
print(f"Alias '{alias_name}' equals identifier. Skipping alias creation.")
|
||||
print(
|
||||
f"Alias '{alias_name}' equals identifier. "
|
||||
"Skipping alias creation."
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
207
pkgmgr/core/command/resolve.py
Normal file
207
pkgmgr/core/command/resolve.py
Normal file
@@ -0,0 +1,207 @@
|
||||
import os
|
||||
import shutil
|
||||
from typing import Optional, List, Dict, Any
|
||||
|
||||
|
||||
Repository = Dict[str, Any]
|
||||
|
||||
|
||||
def _is_executable(path: str) -> bool:
|
||||
return os.path.exists(path) and os.access(path, os.X_OK)
|
||||
|
||||
|
||||
def _find_python_package_root(repo_dir: str) -> Optional[str]:
|
||||
"""
|
||||
Detect a Python src-layout package:
|
||||
|
||||
repo_dir/src/<package>/__main__.py
|
||||
|
||||
Returns the directory containing __main__.py (e.g. ".../src/arc")
|
||||
or None if no such structure exists.
|
||||
"""
|
||||
src_dir = os.path.join(repo_dir, "src")
|
||||
if not os.path.isdir(src_dir):
|
||||
return None
|
||||
|
||||
for root, _dirs, files in os.walk(src_dir):
|
||||
if "__main__.py" in files:
|
||||
return root
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _nix_binary_candidates(home: str, names: List[str]) -> List[str]:
|
||||
"""
|
||||
Build possible Nix profile binary paths for a list of candidate names.
|
||||
"""
|
||||
return [
|
||||
os.path.join(home, ".nix-profile", "bin", name)
|
||||
for name in names
|
||||
if name
|
||||
]
|
||||
|
||||
|
||||
def _path_binary_candidates(names: List[str]) -> List[str]:
|
||||
"""
|
||||
Resolve candidate names via PATH using shutil.which.
|
||||
Returns only existing, executable paths.
|
||||
"""
|
||||
binaries: List[str] = []
|
||||
for name in names:
|
||||
if not name:
|
||||
continue
|
||||
candidate = shutil.which(name)
|
||||
if candidate and _is_executable(candidate):
|
||||
binaries.append(candidate)
|
||||
return binaries
|
||||
|
||||
|
||||
def resolve_command_for_repo(
|
||||
repo: Repository,
|
||||
repo_identifier: str,
|
||||
repo_dir: str,
|
||||
) -> Optional[str]:
|
||||
"""
|
||||
Resolve the executable command for a repository.
|
||||
|
||||
Semantics:
|
||||
----------
|
||||
- If the repository explicitly defines the key "command" (even if None),
|
||||
that is treated as authoritative and returned immediately.
|
||||
This allows e.g.:
|
||||
|
||||
command: null
|
||||
|
||||
for pure library repositories with no CLI.
|
||||
|
||||
- If "command" is not defined, we try to discover a suitable CLI command:
|
||||
1. Prefer already installed binaries (PATH, Nix profile).
|
||||
2. For Python src-layout packages (src/*/__main__.py), try to infer
|
||||
a sensible command name (alias, repo identifier, repository name,
|
||||
package directory name) and resolve those via PATH / Nix.
|
||||
3. For script-style repos, fall back to main.sh / main.py.
|
||||
4. If nothing matches, return None (no CLI) instead of raising.
|
||||
|
||||
The caller can interpret:
|
||||
- str → path to the command (symlink target)
|
||||
- None → no CLI command for this repository
|
||||
"""
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 1) Explicit command declaration (including explicit "no command")
|
||||
# ------------------------------------------------------------------
|
||||
if "command" in repo:
|
||||
# May be a string path or None. None means: this repo intentionally
|
||||
# has no CLI command and should not be resolved.
|
||||
return repo.get("command")
|
||||
|
||||
home = os.path.expanduser("~")
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 2) Collect candidate names for CLI binaries
|
||||
#
|
||||
# Order of preference:
|
||||
# - repo_identifier (usually alias or configured id)
|
||||
# - alias (if defined)
|
||||
# - repository name (e.g. "analysis-ready-code")
|
||||
# - python package name (e.g. "arc" from src/arc/__main__.py)
|
||||
# ------------------------------------------------------------------
|
||||
alias = repo.get("alias")
|
||||
repository_name = repo.get("repository")
|
||||
|
||||
python_package_root = _find_python_package_root(repo_dir)
|
||||
if python_package_root:
|
||||
python_package_name = os.path.basename(python_package_root)
|
||||
else:
|
||||
python_package_name = None
|
||||
|
||||
candidate_names: List[str] = []
|
||||
seen: set[str] = set()
|
||||
|
||||
for name in (
|
||||
repo_identifier,
|
||||
alias,
|
||||
repository_name,
|
||||
python_package_name,
|
||||
):
|
||||
if name and name not in seen:
|
||||
seen.add(name)
|
||||
candidate_names.append(name)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 3) Try resolve via PATH (non-system and system) and Nix profile
|
||||
# ------------------------------------------------------------------
|
||||
# a) PATH binaries
|
||||
path_binaries = _path_binary_candidates(candidate_names)
|
||||
|
||||
# b) Classify system (/usr/...) vs non-system
|
||||
system_binary: Optional[str] = None
|
||||
non_system_binary: Optional[str] = None
|
||||
|
||||
for bin_path in path_binaries:
|
||||
if bin_path.startswith("/usr"):
|
||||
# Last system binary wins, but usually there is only one anyway
|
||||
system_binary = bin_path
|
||||
else:
|
||||
non_system_binary = bin_path
|
||||
break # prefer the first non-system binary
|
||||
|
||||
# c) Nix profile binaries
|
||||
nix_binaries = [
|
||||
path for path in _nix_binary_candidates(home, candidate_names)
|
||||
if _is_executable(path)
|
||||
]
|
||||
nix_binary = nix_binaries[0] if nix_binaries else None
|
||||
|
||||
# Decide priority:
|
||||
# 1) non-system PATH binary (user/venv)
|
||||
# 2) Nix profile binary
|
||||
# 3) system binary (/usr/...) → only if we want to expose it
|
||||
if non_system_binary:
|
||||
return non_system_binary
|
||||
|
||||
if nix_binary:
|
||||
return nix_binary
|
||||
|
||||
if system_binary:
|
||||
# Respect system packages. Depending on your policy you can decide
|
||||
# to return None (no symlink, OS owns the command) or to expose it.
|
||||
# Here we choose: no symlink for pure system binaries.
|
||||
if repo.get("ignore_system_binary", False):
|
||||
print(
|
||||
f"[pkgmgr] System binary for '{repo_identifier}' found at "
|
||||
f"{system_binary}; no symlink will be created."
|
||||
)
|
||||
return None
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 4) Script-style repository: fallback to main.sh / main.py
|
||||
# ------------------------------------------------------------------
|
||||
main_sh = os.path.join(repo_dir, "main.sh")
|
||||
main_py = os.path.join(repo_dir, "main.py")
|
||||
|
||||
if _is_executable(main_sh):
|
||||
return main_sh
|
||||
|
||||
if os.path.exists(main_py):
|
||||
return main_py
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 5) No CLI discovered
|
||||
#
|
||||
# At this point we may still have a Python package structure, but
|
||||
# without any installed CLI entry point and without main.sh/main.py.
|
||||
#
|
||||
# This is perfectly valid for library-only repositories, so we do
|
||||
# NOT treat this as an error. The caller can then decide to simply
|
||||
# skip symlink creation.
|
||||
# ------------------------------------------------------------------
|
||||
if python_package_root:
|
||||
print(
|
||||
f"[INFO] Repository '{repo_identifier}' appears to be a Python "
|
||||
f"package at '{python_package_root}' but no CLI entry point was "
|
||||
f"found (PATH, Nix, main.sh/main.py). Treating it as a "
|
||||
f"library-only repository with no command."
|
||||
)
|
||||
|
||||
return None
|
||||
0
pkgmgr/core/config/__init__.py
Normal file
0
pkgmgr/core/config/__init__.py
Normal file
0
pkgmgr/core/repository/__init__.py
Normal file
0
pkgmgr/core/repository/__init__.py
Normal file
@@ -7,7 +7,8 @@ import os
|
||||
import re
|
||||
from typing import Any, Dict, List, Sequence
|
||||
|
||||
from pkgmgr.resolve_repos import resolve_repos
|
||||
from pkgmgr.core.repository.resolve import resolve_repos
|
||||
from pkgmgr.core.repository.ignored import filter_ignored
|
||||
|
||||
Repository = Dict[str, Any]
|
||||
|
||||
@@ -88,7 +89,7 @@ def _apply_filters(
|
||||
if not _match_pattern(ident_str, string_pattern):
|
||||
continue
|
||||
|
||||
# Category filter: nur echte Kategorien, KEINE Tags
|
||||
# Category filter: only real categories, NOT tags
|
||||
if category_patterns:
|
||||
cats: List[str] = []
|
||||
cats.extend(map(str, repo.get("category_files", [])))
|
||||
@@ -106,7 +107,7 @@ def _apply_filters(
|
||||
if not ok:
|
||||
continue
|
||||
|
||||
# Tag filter: ausschließlich YAML-Tags
|
||||
# Tag filter: YAML tags only
|
||||
if tag_patterns:
|
||||
tags: List[str] = list(map(str, repo.get("tags", [])))
|
||||
if not tags:
|
||||
@@ -124,16 +125,38 @@ def _apply_filters(
|
||||
|
||||
return filtered
|
||||
|
||||
|
||||
def _maybe_filter_ignored(args, repos: List[Repository]) -> List[Repository]:
|
||||
"""
|
||||
Apply ignore filtering unless the caller explicitly opted to include ignored
|
||||
repositories (via args.include_ignored).
|
||||
|
||||
Note: this helper is used only for *implicit* selections (all / filters /
|
||||
by-directory). For *explicit* identifiers we do NOT filter ignored repos,
|
||||
so the user can still target them directly if desired.
|
||||
"""
|
||||
include_ignored: bool = bool(getattr(args, "include_ignored", False))
|
||||
if include_ignored:
|
||||
return repos
|
||||
return filter_ignored(repos)
|
||||
|
||||
|
||||
def get_selected_repos(args, all_repositories: List[Repository]) -> List[Repository]:
|
||||
"""
|
||||
Compute the list of repositories selected by CLI arguments.
|
||||
|
||||
Modes:
|
||||
- If identifiers are given: select via resolve_repos() from all_repositories.
|
||||
- Else if any of --category/--string/--tag is used: start from all_repositories
|
||||
and apply filters.
|
||||
- Else if --all is set: select all_repositories.
|
||||
- Else: try to select the repository of the current working directory.
|
||||
Ignored repositories are *not* filtered here, so explicit identifiers
|
||||
always win.
|
||||
- Else if any of --category/--string/--tag is used: start from
|
||||
all_repositories, apply filters and then drop ignored repos.
|
||||
- Else if --all is set: select all_repositories and then drop ignored repos.
|
||||
- Else: try to select the repository of the current working directory
|
||||
and then drop it if it is ignored.
|
||||
|
||||
The ignore filter can be bypassed by setting args.include_ignored = True
|
||||
(e.g. via a CLI flag --include-ignored).
|
||||
"""
|
||||
identifiers: List[str] = getattr(args, "identifiers", []) or []
|
||||
use_all: bool = bool(getattr(args, "all", False))
|
||||
@@ -143,18 +166,25 @@ def get_selected_repos(args, all_repositories: List[Repository]) -> List[Reposit
|
||||
|
||||
has_filters = bool(category_patterns or string_pattern or tag_patterns)
|
||||
|
||||
# 1) Explicit identifiers win
|
||||
# 1) Explicit identifiers win and bypass ignore filtering
|
||||
if identifiers:
|
||||
base = resolve_repos(identifiers, all_repositories)
|
||||
return _apply_filters(base, string_pattern, category_patterns, tag_patterns)
|
||||
|
||||
# 2) Filter-only mode: start from all repositories
|
||||
if has_filters:
|
||||
return _apply_filters(list(all_repositories), string_pattern, category_patterns, tag_patterns)
|
||||
base = _apply_filters(
|
||||
list(all_repositories),
|
||||
string_pattern,
|
||||
category_patterns,
|
||||
tag_patterns,
|
||||
)
|
||||
return _maybe_filter_ignored(args, base)
|
||||
|
||||
# 3) --all (no filters): all repos
|
||||
if use_all:
|
||||
return list(all_repositories)
|
||||
base = list(all_repositories)
|
||||
return _maybe_filter_ignored(args, base)
|
||||
|
||||
# 4) Fallback: try to select repository of current working directory
|
||||
cwd = os.path.abspath(os.getcwd())
|
||||
@@ -164,7 +194,7 @@ def get_selected_repos(args, all_repositories: List[Repository]) -> List[Reposit
|
||||
if os.path.abspath(str(repo.get("directory", ""))) == cwd
|
||||
]
|
||||
if by_dir:
|
||||
return by_dir
|
||||
return _maybe_filter_ignored(args, by_dir)
|
||||
|
||||
# No specific match -> empty list
|
||||
return []
|
||||
0
pkgmgr/core/version/__init__.py
Normal file
0
pkgmgr/core/version/__init__.py
Normal file
@@ -1,294 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Repository installation pipeline for pkgmgr.
|
||||
|
||||
This module orchestrates the installation of repositories by:
|
||||
|
||||
1. Ensuring the repository directory exists (cloning if necessary).
|
||||
2. Verifying the repository according to the configured policies.
|
||||
3. Creating executable links using create_ink(), after resolving the
|
||||
appropriate command via resolve_command_for_repo().
|
||||
4. Running a sequence of modular installer components that handle
|
||||
specific technologies or manifests (PKGBUILD, Nix flakes, Python
|
||||
via pyproject.toml, Makefile, OS-specific package metadata).
|
||||
|
||||
The goal is to keep this file thin and delegate most logic to small,
|
||||
focused installer classes.
|
||||
"""
|
||||
|
||||
import os
|
||||
from typing import List, Dict, Any
|
||||
|
||||
from pkgmgr.get_repo_identifier import get_repo_identifier
|
||||
from pkgmgr.get_repo_dir import get_repo_dir
|
||||
from pkgmgr.create_ink import create_ink
|
||||
from pkgmgr.verify import verify_repository
|
||||
from pkgmgr.clone_repos import clone_repos
|
||||
from pkgmgr.context import RepoContext
|
||||
from pkgmgr.resolve_command import resolve_command_for_repo
|
||||
|
||||
# Installer implementations
|
||||
from pkgmgr.installers.os_packages import (
|
||||
ArchPkgbuildInstaller,
|
||||
DebianControlInstaller,
|
||||
RpmSpecInstaller,
|
||||
)
|
||||
from pkgmgr.installers.nix_flake import NixFlakeInstaller
|
||||
from pkgmgr.installers.python import PythonInstaller
|
||||
from pkgmgr.installers.makefile import MakefileInstaller
|
||||
|
||||
|
||||
# Layering:
|
||||
# 1) OS packages: PKGBUILD / debian/control / RPM spec → os-deps.*
|
||||
# 2) Nix flakes (flake.nix) → e.g. python-runtime, make-install
|
||||
# 3) Python (pyproject.toml) → e.g. python-runtime, make-install
|
||||
# 4) Makefile fallback → e.g. make-install
|
||||
INSTALLERS = [
|
||||
ArchPkgbuildInstaller(), # Arch
|
||||
DebianControlInstaller(), # Debian/Ubuntu
|
||||
RpmSpecInstaller(), # Fedora/RHEL/CentOS
|
||||
NixFlakeInstaller(), # flake.nix (Nix layer)
|
||||
PythonInstaller(), # pyproject.toml
|
||||
MakefileInstaller(), # generic 'make install'
|
||||
]
|
||||
|
||||
|
||||
def _ensure_repo_dir(
|
||||
repo: Dict[str, Any],
|
||||
repositories_base_dir: str,
|
||||
all_repos: List[Dict[str, Any]],
|
||||
preview: bool,
|
||||
no_verification: bool,
|
||||
clone_mode: str,
|
||||
identifier: str,
|
||||
) -> str:
|
||||
"""
|
||||
Ensure the repository directory exists. If not, attempt to clone it.
|
||||
|
||||
Returns the repository directory path or an empty string if cloning failed.
|
||||
"""
|
||||
repo_dir = get_repo_dir(repositories_base_dir, repo)
|
||||
|
||||
if not os.path.exists(repo_dir):
|
||||
print(f"Repository directory '{repo_dir}' does not exist. Cloning it now...")
|
||||
clone_repos(
|
||||
[repo],
|
||||
repositories_base_dir,
|
||||
all_repos,
|
||||
preview,
|
||||
no_verification,
|
||||
clone_mode,
|
||||
)
|
||||
if not os.path.exists(repo_dir):
|
||||
print(f"Cloning failed for repository {identifier}. Skipping installation.")
|
||||
return ""
|
||||
|
||||
return repo_dir
|
||||
|
||||
|
||||
def _verify_repo(
|
||||
repo: Dict[str, Any],
|
||||
repo_dir: str,
|
||||
no_verification: bool,
|
||||
identifier: str,
|
||||
) -> bool:
|
||||
"""
|
||||
Verify the repository using verify_repository().
|
||||
|
||||
Returns True if installation should proceed, False if it should be skipped.
|
||||
"""
|
||||
verified_info = repo.get("verified")
|
||||
verified_ok, errors, commit_hash, signing_key = verify_repository(
|
||||
repo,
|
||||
repo_dir,
|
||||
mode="local",
|
||||
no_verification=no_verification,
|
||||
)
|
||||
|
||||
if not no_verification and verified_info and not verified_ok:
|
||||
print(f"Warning: Verification failed for {identifier}:")
|
||||
for err in errors:
|
||||
print(f" - {err}")
|
||||
choice = input("Proceed with installation? (y/N): ").strip().lower()
|
||||
if choice != "y":
|
||||
print(f"Skipping installation for {identifier}.")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def _create_context(
|
||||
repo: Dict[str, Any],
|
||||
identifier: str,
|
||||
repo_dir: str,
|
||||
repositories_base_dir: str,
|
||||
bin_dir: str,
|
||||
all_repos: List[Dict[str, Any]],
|
||||
no_verification: bool,
|
||||
preview: bool,
|
||||
quiet: bool,
|
||||
clone_mode: str,
|
||||
update_dependencies: bool,
|
||||
) -> RepoContext:
|
||||
"""
|
||||
Build a RepoContext for the given repository and parameters.
|
||||
"""
|
||||
return RepoContext(
|
||||
repo=repo,
|
||||
identifier=identifier,
|
||||
repo_dir=repo_dir,
|
||||
repositories_base_dir=repositories_base_dir,
|
||||
bin_dir=bin_dir,
|
||||
all_repos=all_repos,
|
||||
no_verification=no_verification,
|
||||
preview=preview,
|
||||
quiet=quiet,
|
||||
clone_mode=clone_mode,
|
||||
update_dependencies=update_dependencies,
|
||||
)
|
||||
|
||||
|
||||
def install_repos(
|
||||
selected_repos: List[Dict[str, Any]],
|
||||
repositories_base_dir: str,
|
||||
bin_dir: str,
|
||||
all_repos: List[Dict[str, Any]],
|
||||
no_verification: bool,
|
||||
preview: bool,
|
||||
quiet: bool,
|
||||
clone_mode: str,
|
||||
update_dependencies: bool,
|
||||
) -> None:
|
||||
"""
|
||||
Install repositories by creating symbolic links and processing standard
|
||||
manifest files (PKGBUILD, flake.nix, Python manifests, Makefile, etc.)
|
||||
via dedicated installer components.
|
||||
|
||||
Any installer failure (SystemExit) is treated as fatal and will abort
|
||||
the current installation.
|
||||
"""
|
||||
for repo in selected_repos:
|
||||
identifier = get_repo_identifier(repo, all_repos)
|
||||
repo_dir = _ensure_repo_dir(
|
||||
repo=repo,
|
||||
repositories_base_dir=repositories_base_dir,
|
||||
all_repos=all_repos,
|
||||
preview=preview,
|
||||
no_verification=no_verification,
|
||||
clone_mode=clone_mode,
|
||||
identifier=identifier,
|
||||
)
|
||||
if not repo_dir:
|
||||
continue
|
||||
|
||||
if not _verify_repo(
|
||||
repo=repo,
|
||||
repo_dir=repo_dir,
|
||||
no_verification=no_verification,
|
||||
identifier=identifier,
|
||||
):
|
||||
continue
|
||||
|
||||
ctx = _create_context(
|
||||
repo=repo,
|
||||
identifier=identifier,
|
||||
repo_dir=repo_dir,
|
||||
repositories_base_dir=repositories_base_dir,
|
||||
bin_dir=bin_dir,
|
||||
all_repos=all_repos,
|
||||
no_verification=no_verification,
|
||||
preview=preview,
|
||||
quiet=quiet,
|
||||
clone_mode=clone_mode,
|
||||
update_dependencies=update_dependencies,
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# Resolve the command for this repository before creating the link.
|
||||
# If no command is resolved, no link will be created.
|
||||
# ------------------------------------------------------------
|
||||
resolved_command = resolve_command_for_repo(
|
||||
repo=repo,
|
||||
repo_identifier=identifier,
|
||||
repo_dir=repo_dir,
|
||||
)
|
||||
|
||||
if resolved_command:
|
||||
repo["command"] = resolved_command
|
||||
else:
|
||||
repo.pop("command", None)
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# Create the symlink using create_ink (if a command is set).
|
||||
# ------------------------------------------------------------
|
||||
create_ink(
|
||||
repo,
|
||||
repositories_base_dir,
|
||||
bin_dir,
|
||||
all_repos,
|
||||
quiet=quiet,
|
||||
preview=preview,
|
||||
)
|
||||
|
||||
# Track which logical capabilities have already been provided by
|
||||
# earlier installers for this repository. This allows us to skip
|
||||
# installers that would only duplicate work (e.g. Python runtime
|
||||
# already provided by Nix flake → skip pyproject/Makefile).
|
||||
provided_capabilities: set[str] = set()
|
||||
|
||||
# Run all installers that support this repository, but only if they
|
||||
# provide at least one capability that is not yet satisfied.
|
||||
for installer in INSTALLERS:
|
||||
if not installer.supports(ctx):
|
||||
continue
|
||||
|
||||
caps = installer.discover_capabilities(ctx)
|
||||
|
||||
# If the installer declares capabilities and *all* of them are
|
||||
# already provided, we can safely skip it.
|
||||
if caps and caps.issubset(provided_capabilities):
|
||||
if not quiet:
|
||||
print(
|
||||
f"Skipping installer {installer.__class__.__name__} "
|
||||
f"for {identifier} – capabilities {caps} already provided."
|
||||
)
|
||||
continue
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# Debug output + clear error if an installer fails
|
||||
# ------------------------------------------------------------
|
||||
if not quiet:
|
||||
print(
|
||||
f"[pkgmgr] Running installer {installer.__class__.__name__} "
|
||||
f"for {identifier} in '{repo_dir}' "
|
||||
f"(new capabilities: {caps or '∅'})..."
|
||||
)
|
||||
|
||||
try:
|
||||
installer.run(ctx)
|
||||
except SystemExit as exc:
|
||||
exit_code = exc.code if isinstance(exc.code, int) else str(exc.code)
|
||||
|
||||
print(
|
||||
f"[ERROR] Installer {installer.__class__.__name__} failed "
|
||||
f"for repository {identifier} (dir: {repo_dir}) "
|
||||
f"with exit code {exit_code}."
|
||||
)
|
||||
print(
|
||||
"[ERROR] This usually means an underlying command failed "
|
||||
"(e.g. 'make install', 'nix build', 'pip install', ...)."
|
||||
)
|
||||
print(
|
||||
"[ERROR] Check the log above for the exact command output. "
|
||||
"You can also run this repository in isolation via:\n"
|
||||
f" pkgmgr install {identifier} --clone-mode shallow --no-verification"
|
||||
)
|
||||
|
||||
# Re-raise so that CLI/tests fail clearly,
|
||||
# but now with much more context.
|
||||
raise
|
||||
|
||||
# Only merge capabilities if the installer succeeded
|
||||
provided_capabilities.update(caps)
|
||||
@@ -1,19 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Installer package for pkgmgr.
|
||||
|
||||
This exposes all installer classes so users can import them directly from
|
||||
pkgmgr.installers.
|
||||
"""
|
||||
|
||||
from pkgmgr.installers.base import BaseInstaller # noqa: F401
|
||||
from pkgmgr.installers.nix_flake import NixFlakeInstaller # noqa: F401
|
||||
from pkgmgr.installers.python import PythonInstaller # noqa: F401
|
||||
from pkgmgr.installers.makefile import MakefileInstaller # noqa: F401
|
||||
|
||||
# OS-specific installers
|
||||
from pkgmgr.installers.os_packages.arch_pkgbuild import ArchPkgbuildInstaller # noqa: F401
|
||||
from pkgmgr.installers.os_packages.debian_control import DebianControlInstaller # noqa: F401
|
||||
from pkgmgr.installers.os_packages.rpm_spec import RpmSpecInstaller # noqa: F401
|
||||
@@ -1,93 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Installer that triggers `make install` if a Makefile is present and
|
||||
the Makefile actually defines an 'install' target.
|
||||
|
||||
This is useful for repositories that expose a standard Makefile-based
|
||||
installation step.
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
from pkgmgr.context import RepoContext
|
||||
from pkgmgr.installers.base import BaseInstaller
|
||||
from pkgmgr.run_command import run_command
|
||||
|
||||
|
||||
class MakefileInstaller(BaseInstaller):
|
||||
"""Run `make install` if a Makefile with an 'install' target exists."""
|
||||
|
||||
# Logical layer name, used by capability matchers.
|
||||
layer = "makefile"
|
||||
|
||||
MAKEFILE_NAME = "Makefile"
|
||||
|
||||
def supports(self, ctx: RepoContext) -> bool:
|
||||
"""Return True if a Makefile exists in the repository directory."""
|
||||
makefile_path = os.path.join(ctx.repo_dir, self.MAKEFILE_NAME)
|
||||
return os.path.exists(makefile_path)
|
||||
|
||||
def _has_install_target(self, makefile_path: str) -> bool:
|
||||
"""
|
||||
Check whether the Makefile defines an 'install' target.
|
||||
|
||||
We treat the presence of a real install target as either:
|
||||
- a line starting with 'install:' (optionally preceded by whitespace), or
|
||||
- a .PHONY line that lists 'install' as one of the targets.
|
||||
"""
|
||||
try:
|
||||
with open(makefile_path, "r", encoding="utf-8", errors="ignore") as f:
|
||||
content = f.read()
|
||||
except OSError:
|
||||
# If we cannot read the Makefile for some reason, assume no target.
|
||||
return False
|
||||
|
||||
# install: ...
|
||||
if re.search(r"^\s*install\s*:", content, flags=re.MULTILINE):
|
||||
return True
|
||||
|
||||
# .PHONY: ... install ...
|
||||
if re.search(r"^\s*\.PHONY\s*:\s*.*\binstall\b", content, flags=re.MULTILINE):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def run(self, ctx: RepoContext) -> None:
|
||||
"""
|
||||
Execute `make install` in the repository directory, but only if an
|
||||
'install' target is actually defined in the Makefile.
|
||||
|
||||
Any failure in `make install` is treated as a fatal error and will
|
||||
propagate as SystemExit from run_command().
|
||||
"""
|
||||
makefile_path = os.path.join(ctx.repo_dir, self.MAKEFILE_NAME)
|
||||
|
||||
if not os.path.exists(makefile_path):
|
||||
# Should normally not happen if supports() was checked before,
|
||||
# but keep this guard for robustness.
|
||||
if not ctx.quiet:
|
||||
print(
|
||||
f"[pkgmgr] Makefile '{makefile_path}' not found, "
|
||||
"skipping make install."
|
||||
)
|
||||
return
|
||||
|
||||
if not self._has_install_target(makefile_path):
|
||||
if not ctx.quiet:
|
||||
print(
|
||||
"[pkgmgr] Skipping Makefile install: no 'install' target "
|
||||
f"found in {makefile_path}."
|
||||
)
|
||||
return
|
||||
|
||||
if not ctx.quiet:
|
||||
print(
|
||||
f"[pkgmgr] Running 'make install' in {ctx.repo_dir} "
|
||||
"(install target detected in Makefile)."
|
||||
)
|
||||
|
||||
cmd = "make install"
|
||||
run_command(cmd, cwd=ctx.repo_dir, preview=ctx.preview)
|
||||
@@ -1,106 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Installer for Nix flakes.
|
||||
|
||||
If a repository contains flake.nix and the 'nix' command is available, this
|
||||
installer will try to install profile outputs from the flake.
|
||||
|
||||
Behavior:
|
||||
- If flake.nix is present and `nix` exists on PATH:
|
||||
* First remove any existing `package-manager` profile entry (best-effort).
|
||||
* Then install the flake outputs (`pkgmgr`, `default`) via `nix profile install`.
|
||||
- Failure installing `pkgmgr` is treated as fatal.
|
||||
- Failure installing `default` is logged as an error/warning but does not abort.
|
||||
"""
|
||||
|
||||
import os
|
||||
import shutil
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from pkgmgr.installers.base import BaseInstaller
|
||||
from pkgmgr.run_command import run_command
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pkgmgr.context import RepoContext
|
||||
from pkgmgr.install_repos import InstallContext
|
||||
|
||||
|
||||
class NixFlakeInstaller(BaseInstaller):
|
||||
"""Install Nix flake profiles for repositories that define flake.nix."""
|
||||
|
||||
# Logical layer name, used by capability matchers.
|
||||
layer = "nix"
|
||||
|
||||
FLAKE_FILE = "flake.nix"
|
||||
PROFILE_NAME = "package-manager"
|
||||
|
||||
def supports(self, ctx: "RepoContext") -> bool:
|
||||
"""
|
||||
Only support repositories that:
|
||||
- Have a flake.nix
|
||||
- And have the `nix` command available.
|
||||
"""
|
||||
if shutil.which("nix") is None:
|
||||
return False
|
||||
flake_path = os.path.join(ctx.repo_dir, self.FLAKE_FILE)
|
||||
return os.path.exists(flake_path)
|
||||
|
||||
def _ensure_old_profile_removed(self, ctx: "RepoContext") -> None:
|
||||
"""
|
||||
Best-effort removal of an existing profile entry.
|
||||
|
||||
This handles the "already provides the following file" conflict by
|
||||
removing previous `package-manager` installations before we install
|
||||
the new one.
|
||||
|
||||
Any error in `nix profile remove` is intentionally ignored, because
|
||||
a missing profile entry is not a fatal condition.
|
||||
"""
|
||||
if shutil.which("nix") is None:
|
||||
return
|
||||
|
||||
cmd = f"nix profile remove {self.PROFILE_NAME} || true"
|
||||
try:
|
||||
# NOTE: no allow_failure here → matches the existing unit tests
|
||||
run_command(cmd, cwd=ctx.repo_dir, preview=ctx.preview)
|
||||
except SystemExit:
|
||||
# Unit tests explicitly assert this is swallowed
|
||||
pass
|
||||
|
||||
def run(self, ctx: "InstallContext") -> None:
|
||||
"""
|
||||
Install Nix flake profile outputs (pkgmgr, default).
|
||||
|
||||
Any failure installing `pkgmgr` is treated as fatal (SystemExit).
|
||||
A failure installing `default` is logged but does not abort.
|
||||
"""
|
||||
# Reuse supports() to keep logic in one place
|
||||
if not self.supports(ctx): # type: ignore[arg-type]
|
||||
return
|
||||
|
||||
print("Nix flake detected, attempting to install profile outputs...")
|
||||
|
||||
# Handle the "already installed" case up-front:
|
||||
self._ensure_old_profile_removed(ctx) # type: ignore[arg-type]
|
||||
|
||||
for output in ("pkgmgr", "default"):
|
||||
cmd = f"nix profile install {ctx.repo_dir}#{output}"
|
||||
|
||||
try:
|
||||
# For 'default' we don't want the process to exit on error
|
||||
allow_failure = output == "default"
|
||||
run_command(cmd, cwd=ctx.repo_dir, preview=ctx.preview, allow_failure=allow_failure)
|
||||
print(f"Nix flake output '{output}' successfully installed.")
|
||||
except SystemExit as e:
|
||||
print(f"[Error] Failed to install Nix flake output '{output}': {e}")
|
||||
if output == "pkgmgr":
|
||||
# Broken main CLI install → fatal
|
||||
raise
|
||||
# For 'default' we log and continue
|
||||
print(
|
||||
"[Warning] Continuing despite failure to install 'default' "
|
||||
"because 'pkgmgr' is already installed."
|
||||
)
|
||||
break
|
||||
@@ -1,160 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Installer for RPM-based packages defined in *.spec files.
|
||||
|
||||
This installer:
|
||||
|
||||
1. Installs build dependencies via dnf/yum builddep (where available)
|
||||
2. Uses rpmbuild to build RPMs from the provided .spec file
|
||||
3. Installs the resulting RPMs via `rpm -i`
|
||||
|
||||
It targets RPM-based systems (Fedora / RHEL / CentOS / Rocky / Alma, etc.).
|
||||
"""
|
||||
|
||||
import glob
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from typing import List, Optional
|
||||
|
||||
from pkgmgr.context import RepoContext
|
||||
from pkgmgr.installers.base import BaseInstaller
|
||||
from pkgmgr.run_command import run_command
|
||||
|
||||
|
||||
class RpmSpecInstaller(BaseInstaller):
|
||||
"""
|
||||
Build and install RPM-based packages from *.spec files.
|
||||
|
||||
This installer is responsible for the full build + install of the
|
||||
application on RPM-like systems.
|
||||
"""
|
||||
|
||||
# Logical layer name, used by capability matchers.
|
||||
layer = "os-packages"
|
||||
|
||||
def _is_rpm_like(self) -> bool:
|
||||
"""
|
||||
Basic RPM-like detection:
|
||||
|
||||
- rpmbuild must be available
|
||||
- at least one of dnf / yum / yum-builddep must be present
|
||||
"""
|
||||
if shutil.which("rpmbuild") is None:
|
||||
return False
|
||||
|
||||
has_dnf = shutil.which("dnf") is not None
|
||||
has_yum = shutil.which("yum") is not None
|
||||
has_yum_builddep = shutil.which("yum-builddep") is not None
|
||||
|
||||
return has_dnf or has_yum or has_yum_builddep
|
||||
|
||||
def _spec_path(self, ctx: RepoContext) -> Optional[str]:
|
||||
"""Return the first *.spec file in the repository root, if any."""
|
||||
pattern = os.path.join(ctx.repo_dir, "*.spec")
|
||||
matches = sorted(glob.glob(pattern))
|
||||
if not matches:
|
||||
return None
|
||||
return matches[0]
|
||||
|
||||
def supports(self, ctx: RepoContext) -> bool:
|
||||
"""
|
||||
This installer is supported if:
|
||||
- we are on an RPM-based system (rpmbuild + dnf/yum/yum-builddep available), and
|
||||
- a *.spec file exists in the repository root.
|
||||
"""
|
||||
if not self._is_rpm_like():
|
||||
return False
|
||||
|
||||
return self._spec_path(ctx) is not None
|
||||
|
||||
def _find_built_rpms(self) -> List[str]:
|
||||
"""
|
||||
Find RPMs built by rpmbuild.
|
||||
|
||||
By default, rpmbuild outputs RPMs into:
|
||||
~/rpmbuild/RPMS/*/*.rpm
|
||||
"""
|
||||
home = os.path.expanduser("~")
|
||||
pattern = os.path.join(home, "rpmbuild", "RPMS", "**", "*.rpm")
|
||||
return sorted(glob.glob(pattern, recursive=True))
|
||||
|
||||
def _install_build_dependencies(self, ctx: RepoContext, spec_path: str) -> None:
|
||||
"""
|
||||
Install build dependencies for the given .spec file.
|
||||
|
||||
Strategy (best-effort):
|
||||
|
||||
1. If dnf is available:
|
||||
sudo dnf builddep -y <spec>
|
||||
2. Else if yum-builddep is available:
|
||||
sudo yum-builddep -y <spec>
|
||||
3. Else if yum is available:
|
||||
sudo yum-builddep -y <spec> # Some systems provide it via yum plugin
|
||||
4. Otherwise: print a warning and skip automatic builddep install.
|
||||
|
||||
Any failure in builddep installation is treated as fatal (SystemExit),
|
||||
consistent with other installer steps.
|
||||
"""
|
||||
spec_basename = os.path.basename(spec_path)
|
||||
|
||||
if shutil.which("dnf") is not None:
|
||||
cmd = f"sudo dnf builddep -y {spec_basename}"
|
||||
elif shutil.which("yum-builddep") is not None:
|
||||
cmd = f"sudo yum-builddep -y {spec_basename}"
|
||||
elif shutil.which("yum") is not None:
|
||||
# Some distributions ship yum-builddep as a plugin.
|
||||
cmd = f"sudo yum-builddep -y {spec_basename}"
|
||||
else:
|
||||
print(
|
||||
"[Warning] No suitable RPM builddep tool (dnf/yum-builddep/yum) found. "
|
||||
"Skipping automatic build dependency installation for RPM."
|
||||
)
|
||||
return
|
||||
|
||||
# Run builddep in the repository directory so relative spec paths work.
|
||||
run_command(cmd, cwd=ctx.repo_dir, preview=ctx.preview)
|
||||
|
||||
def run(self, ctx: RepoContext) -> None:
|
||||
"""
|
||||
Build and install RPM-based packages.
|
||||
|
||||
Steps:
|
||||
1. dnf/yum builddep <spec> (automatic build dependency installation)
|
||||
2. rpmbuild -ba path/to/spec
|
||||
3. sudo rpm -i ~/rpmbuild/RPMS/*/*.rpm
|
||||
"""
|
||||
spec_path = self._spec_path(ctx)
|
||||
if not spec_path:
|
||||
return
|
||||
|
||||
# 1) Install build dependencies
|
||||
self._install_build_dependencies(ctx, spec_path)
|
||||
|
||||
# 2) Build RPMs
|
||||
# Use the full spec path, but run in the repo directory.
|
||||
spec_basename = os.path.basename(spec_path)
|
||||
build_cmd = f"rpmbuild -ba {spec_basename}"
|
||||
run_command(build_cmd, cwd=ctx.repo_dir, preview=ctx.preview)
|
||||
|
||||
# 3) Find built RPMs
|
||||
rpms = self._find_built_rpms()
|
||||
if not rpms:
|
||||
print(
|
||||
"[Warning] No RPM files found after rpmbuild. "
|
||||
"Skipping RPM package installation."
|
||||
)
|
||||
return
|
||||
|
||||
# 4) Install RPMs
|
||||
if shutil.which("rpm") is None:
|
||||
print(
|
||||
"[Warning] rpm binary not found on PATH. "
|
||||
"Cannot install built RPMs."
|
||||
)
|
||||
return
|
||||
|
||||
install_cmd = "sudo rpm -i " + " ".join(rpms)
|
||||
run_command(install_cmd, cwd=ctx.repo_dir, preview=ctx.preview)
|
||||
@@ -1,68 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Installer for Python projects based on pyproject.toml.
|
||||
|
||||
Strategy:
|
||||
- Determine a pip command in this order:
|
||||
1. $PKGMGR_PIP (explicit override, e.g. ~/.venvs/pkgmgr/bin/pip)
|
||||
2. sys.executable -m pip (current interpreter)
|
||||
3. "pip" from PATH as last resort
|
||||
- If pyproject.toml exists: pip install .
|
||||
|
||||
All installation failures are treated as fatal errors (SystemExit).
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
from pkgmgr.installers.base import BaseInstaller
|
||||
from pkgmgr.run_command import run_command
|
||||
|
||||
|
||||
class PythonInstaller(BaseInstaller):
|
||||
"""Install Python projects and dependencies via pip."""
|
||||
|
||||
# Logical layer name, used by capability matchers.
|
||||
layer = "python"
|
||||
|
||||
def supports(self, ctx) -> bool:
|
||||
"""
|
||||
Return True if this installer should handle the given repository.
|
||||
|
||||
Only pyproject.toml is supported as the single source of truth
|
||||
for Python dependencies and packaging metadata.
|
||||
"""
|
||||
repo_dir = ctx.repo_dir
|
||||
return os.path.exists(os.path.join(repo_dir, "pyproject.toml"))
|
||||
|
||||
def _pip_cmd(self) -> str:
|
||||
"""
|
||||
Resolve the pip command to use.
|
||||
"""
|
||||
explicit = os.environ.get("PKGMGR_PIP", "").strip()
|
||||
if explicit:
|
||||
return explicit
|
||||
|
||||
if sys.executable:
|
||||
return f"{sys.executable} -m pip"
|
||||
|
||||
return "pip"
|
||||
|
||||
def run(self, ctx) -> None:
|
||||
"""
|
||||
Install Python project defined via pyproject.toml.
|
||||
|
||||
Any pip failure is propagated as SystemExit.
|
||||
"""
|
||||
pip_cmd = self._pip_cmd()
|
||||
|
||||
pyproject = os.path.join(ctx.repo_dir, "pyproject.toml")
|
||||
if os.path.exists(pyproject):
|
||||
print(
|
||||
f"pyproject.toml found in {ctx.identifier}, "
|
||||
f"installing Python project..."
|
||||
)
|
||||
cmd = f"{pip_cmd} install ."
|
||||
run_command(cmd, cwd=ctx.repo_dir, preview=ctx.preview)
|
||||
@@ -1,48 +0,0 @@
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from pkgmgr.get_repo_identifier import get_repo_identifier
|
||||
from pkgmgr.get_repo_dir import get_repo_dir
|
||||
from pkgmgr.verify import verify_repository
|
||||
|
||||
def pull_with_verification(
|
||||
selected_repos,
|
||||
repositories_base_dir,
|
||||
all_repos,
|
||||
extra_args,
|
||||
no_verification,
|
||||
preview:bool):
|
||||
"""
|
||||
Executes "git pull" for each repository with verification.
|
||||
|
||||
Uses the verify_repository function in "pull" mode.
|
||||
If verification fails (and verification info is set) and --no-verification is not enabled,
|
||||
the user is prompted to confirm the pull.
|
||||
"""
|
||||
for repo in selected_repos:
|
||||
repo_identifier = get_repo_identifier(repo, all_repos)
|
||||
repo_dir = get_repo_dir(repositories_base_dir, repo)
|
||||
if not os.path.exists(repo_dir):
|
||||
print(f"Repository directory '{repo_dir}' not found for {repo_identifier}.")
|
||||
continue
|
||||
|
||||
verified_info = repo.get("verified")
|
||||
verified_ok, errors, commit_hash, signing_key = verify_repository(repo, repo_dir, mode="pull", no_verification=no_verification)
|
||||
|
||||
if not no_verification and verified_info and not verified_ok:
|
||||
print(f"Warning: Verification failed for {repo_identifier}:")
|
||||
for err in errors:
|
||||
print(f" - {err}")
|
||||
choice = input("Proceed with 'git pull'? (y/N): ").strip().lower()
|
||||
if choice != "y":
|
||||
continue
|
||||
|
||||
full_cmd = f"git pull {' '.join(extra_args)}"
|
||||
if preview:
|
||||
print(f"[Preview] In '{repo_dir}': {full_cmd}")
|
||||
else:
|
||||
print(f"Running in '{repo_dir}': {full_cmd}")
|
||||
result = subprocess.run(full_cmd, cwd=repo_dir, shell=True)
|
||||
if result.returncode != 0:
|
||||
print(f"'git pull' for {repo_identifier} failed with exit code {result.returncode}.")
|
||||
sys.exit(result.returncode)
|
||||
@@ -1,113 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Command resolver for repositories.
|
||||
|
||||
This module determines the correct command to expose via symlink.
|
||||
It implements the following priority:
|
||||
|
||||
1. Explicit command in repo config → command
|
||||
2. System package manager binary (/usr/...) → NO LINK (respect OS)
|
||||
3. Nix profile binary (~/.nix-profile/bin/<id>) → command
|
||||
4. Python / non-system console script on PATH → command
|
||||
5. Fallback: repository's main.sh or main.py → command
|
||||
6. If nothing is available → raise error
|
||||
|
||||
The actual symlink creation is handled by create_ink(). This resolver
|
||||
only decides *what* should be used as the entrypoint, or whether no
|
||||
link should be created at all.
|
||||
"""
|
||||
|
||||
import os
|
||||
import shutil
|
||||
from typing import Optional
|
||||
|
||||
|
||||
def resolve_command_for_repo(repo, repo_identifier: str, repo_dir: str) -> Optional[str]:
|
||||
"""
|
||||
Determine the command for this repository.
|
||||
|
||||
Returns:
|
||||
str → path to the command (a symlink should be created)
|
||||
None → do NOT create a link (e.g. system package already provides it)
|
||||
|
||||
On total failure (no suitable command found at any layer), this function
|
||||
raises SystemExit with a descriptive error message.
|
||||
"""
|
||||
# ------------------------------------------------------------
|
||||
# 1. Explicit command defined by repository config
|
||||
# ------------------------------------------------------------
|
||||
explicit = repo.get("command")
|
||||
if explicit:
|
||||
return explicit
|
||||
|
||||
home = os.path.expanduser("~")
|
||||
|
||||
def is_executable(path: str) -> bool:
|
||||
return os.path.exists(path) and os.access(path, os.X_OK)
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# 2. System package manager binary via PATH
|
||||
#
|
||||
# If the binary lives under /usr/, we treat it as a system-managed
|
||||
# package (e.g. installed via pacman/apt/yum). In that case, pkgmgr
|
||||
# does NOT create a link at all and defers entirely to the OS.
|
||||
# ------------------------------------------------------------
|
||||
path_candidate = shutil.which(repo_identifier)
|
||||
system_binary: Optional[str] = None
|
||||
non_system_binary: Optional[str] = None
|
||||
|
||||
if path_candidate:
|
||||
if path_candidate.startswith("/usr/"):
|
||||
system_binary = path_candidate
|
||||
else:
|
||||
non_system_binary = path_candidate
|
||||
|
||||
if system_binary:
|
||||
# Respect system package manager: do not create a link.
|
||||
if repo.get("debug", False):
|
||||
print(
|
||||
f"[pkgmgr] System binary for '{repo_identifier}' found at "
|
||||
f"{system_binary}; no symlink will be created."
|
||||
)
|
||||
return None
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# 3. Nix profile binary (~/.nix-profile/bin/<identifier>)
|
||||
# ------------------------------------------------------------
|
||||
nix_candidate = os.path.join(home, ".nix-profile", "bin", repo_identifier)
|
||||
if is_executable(nix_candidate):
|
||||
return nix_candidate
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# 4. Python / non-system console script on PATH
|
||||
#
|
||||
# Here we reuse the non-system PATH candidate (e.g. from a venv or
|
||||
# a user-local install like ~/.local/bin). This is treated as a
|
||||
# valid command target.
|
||||
# ------------------------------------------------------------
|
||||
if non_system_binary and is_executable(non_system_binary):
|
||||
return non_system_binary
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# 5. Fallback: main.sh / main.py inside the repository
|
||||
# ------------------------------------------------------------
|
||||
main_sh = os.path.join(repo_dir, "main.sh")
|
||||
main_py = os.path.join(repo_dir, "main.py")
|
||||
|
||||
if is_executable(main_sh):
|
||||
return main_sh
|
||||
|
||||
if is_executable(main_py) or os.path.exists(main_py):
|
||||
return main_py
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# 6. Nothing found → treat as a hard error
|
||||
# ------------------------------------------------------------
|
||||
raise SystemExit(
|
||||
f"No executable command could be resolved for repository '{repo_identifier}'. "
|
||||
"No explicit 'command' configured, no system-managed binary under /usr/, "
|
||||
"no Nix profile binary, no non-system console script on PATH, and no "
|
||||
"main.sh/main.py found in the repository."
|
||||
)
|
||||
@@ -7,7 +7,7 @@ build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "package-manager"
|
||||
version = "0.5.1"
|
||||
version = "0.9.1"
|
||||
description = "Kevin's package-manager tool (pkgmgr)"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.11"
|
||||
|
||||
35
scripts/build/build-image-missing.sh
Executable file
35
scripts/build/build-image-missing.sh
Executable file
@@ -0,0 +1,35 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||
source "${SCRIPT_DIR}/resolve-base-image.sh"
|
||||
|
||||
echo "============================================================"
|
||||
echo ">>> Building ONLY missing container images"
|
||||
echo "============================================================"
|
||||
|
||||
for distro in $DISTROS; do
|
||||
IMAGE="package-manager-test-$distro"
|
||||
BASE_IMAGE="$(resolve_base_image "$distro")"
|
||||
|
||||
if docker image inspect "$IMAGE" >/dev/null 2>&1; then
|
||||
echo "[build-missing] Image already exists: $IMAGE (skipping)"
|
||||
continue
|
||||
fi
|
||||
|
||||
echo
|
||||
echo "------------------------------------------------------------"
|
||||
echo "[build-missing] Building missing image: $IMAGE"
|
||||
echo "BASE_IMAGE = $BASE_IMAGE"
|
||||
echo "------------------------------------------------------------"
|
||||
|
||||
docker build \
|
||||
--build-arg BASE_IMAGE="$BASE_IMAGE" \
|
||||
-t "$IMAGE" \
|
||||
.
|
||||
done
|
||||
|
||||
echo
|
||||
echo "============================================================"
|
||||
echo ">>> build-missing: Done"
|
||||
echo "============================================================"
|
||||
17
scripts/build/build-image-no-cache.sh
Executable file
17
scripts/build/build-image-no-cache.sh
Executable file
@@ -0,0 +1,17 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||
source "${SCRIPT_DIR}/resolve-base-image.sh"
|
||||
|
||||
for distro in $DISTROS; do
|
||||
base_image="$(resolve_base_image "$distro")"
|
||||
|
||||
echo ">>> Building test image for distro '$distro' with NO CACHE (BASE_IMAGE=$base_image)..."
|
||||
|
||||
docker build \
|
||||
--no-cache \
|
||||
--build-arg BASE_IMAGE="$base_image" \
|
||||
-t "package-manager-test-$distro" \
|
||||
.
|
||||
done
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user