Compare commits
52 Commits
v0.7.11
...
39b16b87a8
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
39b16b87a8 | ||
|
|
26c9d79814 | ||
|
|
2776d18a42 | ||
|
|
7057ccfb95 | ||
|
|
1807949c6f | ||
|
|
d611720b8f | ||
|
|
bf871650a8 | ||
|
|
5ca1adda7b | ||
|
|
acb18adf76 | ||
|
|
c18490f5d3 | ||
|
|
eeda944b73 | ||
|
|
52cfbebba4 | ||
|
|
f4385807f1 | ||
|
|
e9e083c9dd | ||
|
|
3218b2b39f | ||
|
|
ba296a79c9 | ||
|
|
62e05e2f5b | ||
|
|
77d8b68ba5 | ||
|
|
bb0a801396 | ||
|
|
ee968efc4b | ||
|
|
644b2b8fa0 | ||
|
|
0f74907f82 | ||
|
|
5a8b1b11de | ||
|
|
389ec40163 | ||
|
|
1d03055491 | ||
|
|
7775c6d974 | ||
|
|
a24a819511 | ||
|
|
0a6c2f2988 | ||
|
|
0c90e984ad | ||
|
|
0a0cbbfe6d | ||
|
|
15c44cd484 | ||
|
|
6d7ee6fc04 | ||
|
|
5a022db0db | ||
|
|
37ac22e0b4 | ||
|
|
bcea440e40 | ||
|
|
6edde2d65b | ||
|
|
74189c1e14 | ||
|
|
b5ddf7402a | ||
|
|
900224ed2e | ||
|
|
e290043089 | ||
|
|
a7fd37d646 | ||
|
|
d4b00046d3 | ||
|
|
545d345ea4 | ||
|
|
a29b831e41 | ||
|
|
bc9ca140bd | ||
|
|
ad8e3cd07c | ||
|
|
22efe0b32e | ||
|
|
d23a0a94d5 | ||
|
|
e42b79c9d8 | ||
|
|
3b2c657bfa | ||
|
|
e335ab05a1 | ||
|
|
75f963d6e2 |
@@ -25,7 +25,5 @@ venv/
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
|
||||
# Arch pkg artifacts
|
||||
*.pkg.tar.*
|
||||
*.log
|
||||
package-manager-*
|
||||
# Logs
|
||||
*.log
|
||||
26
.github/workflows/ci.yml
vendored
Normal file
26
.github/workflows/ci.yml
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches-ignore:
|
||||
- main
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
test-unit:
|
||||
uses: ./.github/workflows/test-unit.yml
|
||||
|
||||
test-integration:
|
||||
uses: ./.github/workflows/test-integration.yml
|
||||
|
||||
test-container:
|
||||
uses: ./.github/workflows/test-container.yml
|
||||
|
||||
test-e2e:
|
||||
uses: ./.github/workflows/test-e2e.yml
|
||||
|
||||
test-virgin-user:
|
||||
uses: ./.github/workflows/test-virgin-user.yml
|
||||
|
||||
test-virgin-root:
|
||||
uses: ./.github/workflows/test-virgin-root.yml
|
||||
64
.github/workflows/mark-stable.yml
vendored
Normal file
64
.github/workflows/mark-stable.yml
vendored
Normal file
@@ -0,0 +1,64 @@
|
||||
name: Mark stable commit
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
test-unit:
|
||||
uses: ./.github/workflows/test-unit.yml
|
||||
|
||||
test-integration:
|
||||
uses: ./.github/workflows/test-integration.yml
|
||||
|
||||
test-container:
|
||||
uses: ./.github/workflows/test-container.yml
|
||||
|
||||
test-e2e:
|
||||
uses: ./.github/workflows/test-e2e.yml
|
||||
|
||||
test-virgin-user:
|
||||
uses: ./.github/workflows/test-virgin-user.yml
|
||||
|
||||
test-virgin-root:
|
||||
uses: ./.github/workflows/test-virgin-root.yml
|
||||
|
||||
mark-stable:
|
||||
needs:
|
||||
- test-unit
|
||||
- test-integration
|
||||
- test-container
|
||||
- test-e2e
|
||||
- test-virgin-user
|
||||
- test-virgin-root
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
permissions:
|
||||
contents: write # to move the tag
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Move 'stable' tag to this commit
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
git config user.name "github-actions[bot]"
|
||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||
|
||||
echo "Tagging commit $GITHUB_SHA as stable…"
|
||||
|
||||
# delete local tag if exists
|
||||
git tag -d stable 2>/dev/null || true
|
||||
# delete remote tag if exists
|
||||
git push origin :refs/tags/stable || true
|
||||
|
||||
# create new tag on this commit
|
||||
git tag stable "$GITHUB_SHA"
|
||||
git push origin stable
|
||||
|
||||
echo "✅ Stable tag updated."
|
||||
21
.github/workflows/test-container.yml
vendored
21
.github/workflows/test-container.yml
vendored
@@ -1,25 +1,28 @@
|
||||
name: Test OS Containers
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- master
|
||||
- develop
|
||||
- "*"
|
||||
pull_request:
|
||||
workflow_call:
|
||||
|
||||
jobs:
|
||||
test-container:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
distro: [arch, debian, ubuntu, fedora, centos]
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Show commit SHA
|
||||
run: git rev-parse HEAD
|
||||
|
||||
- name: Show Docker version
|
||||
run: docker version
|
||||
|
||||
- name: Run container tests
|
||||
run: make test-container
|
||||
- name: Run container tests (${{ matrix.distro }})
|
||||
run: |
|
||||
set -euo pipefail
|
||||
distro="${{ matrix.distro }}" make test-container
|
||||
|
||||
20
.github/workflows/test-e2e.yml
vendored
20
.github/workflows/test-e2e.yml
vendored
@@ -1,18 +1,16 @@
|
||||
name: Test End-To-End
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- master
|
||||
- develop
|
||||
- "*"
|
||||
pull_request:
|
||||
workflow_call:
|
||||
|
||||
jobs:
|
||||
test-e2e:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60 # E2E + all distros can be heavier
|
||||
timeout-minutes: 60 # E2E can be heavier
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
distro: [arch, debian, ubuntu, fedora, centos]
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
@@ -21,5 +19,7 @@ jobs:
|
||||
- name: Show Docker version
|
||||
run: docker version
|
||||
|
||||
- name: Run E2E tests via make (all distros)
|
||||
run: make test-e2e
|
||||
- name: Run E2E tests via make (${{ matrix.distro }})
|
||||
run: |
|
||||
set -euo pipefail
|
||||
distro="${{ matrix.distro }}" make test-e2e
|
||||
|
||||
10
.github/workflows/test-integration.yml
vendored
10
.github/workflows/test-integration.yml
vendored
@@ -1,13 +1,7 @@
|
||||
name: Test Code Integration
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- master
|
||||
- develop
|
||||
- "*"
|
||||
pull_request:
|
||||
workflow_call:
|
||||
|
||||
jobs:
|
||||
test-integration:
|
||||
@@ -22,4 +16,4 @@ jobs:
|
||||
run: docker version
|
||||
|
||||
- name: Run integration tests via make (Arch container)
|
||||
run: make test-integration DISTROS="arch"
|
||||
run: make test-integration distro="arch"
|
||||
|
||||
10
.github/workflows/test-unit.yml
vendored
10
.github/workflows/test-unit.yml
vendored
@@ -1,13 +1,7 @@
|
||||
name: Test Units
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- master
|
||||
- develop
|
||||
- "*"
|
||||
pull_request:
|
||||
workflow_call:
|
||||
|
||||
jobs:
|
||||
test-unit:
|
||||
@@ -22,4 +16,4 @@ jobs:
|
||||
run: docker version
|
||||
|
||||
- name: Run unit tests via make (Arch container)
|
||||
run: make test-unit DISTROS="arch"
|
||||
run: make test-unit distro="arch"
|
||||
|
||||
58
.github/workflows/test-virgin-root.yml
vendored
Normal file
58
.github/workflows/test-virgin-root.yml
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
name: Test Virgin Root
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
|
||||
jobs:
|
||||
test-virgin-root:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 45
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Show Docker version
|
||||
run: docker version
|
||||
|
||||
- name: Virgin Arch pkgmgr flake test (root)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
echo ">>> Starting virgin ArchLinux container test (root, with shared caches)..."
|
||||
|
||||
docker run --rm \
|
||||
-v "$PWD":/src \
|
||||
-v pkgmgr_repos:/root/Repositories \
|
||||
-v pkgmgr_pip_cache:/root/.cache/pip \
|
||||
-w /src \
|
||||
archlinux:latest \
|
||||
bash -lc '
|
||||
set -euo pipefail
|
||||
|
||||
echo ">>> Updating and upgrading Arch system..."
|
||||
pacman -Syu --noconfirm git python python-pip nix >/dev/null
|
||||
|
||||
echo ">>> Creating isolated virtual environment for pkgmgr..."
|
||||
python -m venv /tmp/pkgmgr-venv
|
||||
|
||||
echo ">>> Activating virtual environment..."
|
||||
source /tmp/pkgmgr-venv/bin/activate
|
||||
|
||||
echo ">>> Upgrading pip (cached)..."
|
||||
python -m pip install --upgrade pip >/dev/null
|
||||
|
||||
echo ">>> Installing pkgmgr from current source tree (cached pip)..."
|
||||
python -m pip install /src >/dev/null
|
||||
|
||||
echo ">>> Enabling Nix experimental features..."
|
||||
export NIX_CONFIG="experimental-features = nix-command flakes"
|
||||
|
||||
echo ">>> Running: pkgmgr update pkgmgr --clone-mode shallow --no-verification"
|
||||
pkgmgr update pkgmgr --clone-mode shallow --no-verification
|
||||
|
||||
echo ">>> Running: pkgmgr version pkgmgr"
|
||||
pkgmgr version pkgmgr
|
||||
|
||||
echo ">>> Virgin Arch (root) test completed successfully."
|
||||
'
|
||||
73
.github/workflows/test-virgin-user.yml
vendored
Normal file
73
.github/workflows/test-virgin-user.yml
vendored
Normal file
@@ -0,0 +1,73 @@
|
||||
name: Test Virgin User
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
|
||||
jobs:
|
||||
test-virgin-user:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 45
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Show Docker version
|
||||
run: docker version
|
||||
|
||||
- name: Virgin Arch pkgmgr user test (non-root with sudo)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
echo ">>> Starting virgin ArchLinux container test (non-root user with sudo)..."
|
||||
|
||||
docker run --rm \
|
||||
-v "$PWD":/src \
|
||||
archlinux:latest \
|
||||
bash -lc '
|
||||
set -euo pipefail
|
||||
|
||||
echo ">>> [root] Updating and upgrading Arch system..."
|
||||
pacman -Syu --noconfirm git python python-pip sudo base-devel debugedit
|
||||
|
||||
echo ">>> [root] Creating non-root user dev..."
|
||||
useradd -m dev
|
||||
|
||||
echo ">>> [root] Allowing passwordless sudo for dev..."
|
||||
echo "dev ALL=(ALL) NOPASSWD: ALL" > /etc/sudoers.d/dev
|
||||
chmod 0440 /etc/sudoers.d/dev
|
||||
|
||||
echo ">>> [root] Adjusting ownership of /src for dev..."
|
||||
chown -R dev:dev /src
|
||||
|
||||
echo ">>> [root] Running pkgmgr flow as non-root user dev..."
|
||||
sudo -u dev env PKGMGR_DISABLE_NIX_FLAKE_INSTALLER=1 bash -lc "
|
||||
set -euo pipefail
|
||||
cd /src
|
||||
|
||||
echo \">>> [dev] Using user: \$(whoami)\"
|
||||
echo \">>> [dev] Running scripts/installation/main.sh...\"
|
||||
bash scripts/installation/main.sh
|
||||
|
||||
echo \">>> [dev] Activating venv...\"
|
||||
. \"\$HOME/.venvs/pkgmgr/bin/activate\"
|
||||
|
||||
echo \">>> [dev] Installing pkgmgr into venv via pip...\"
|
||||
python -m pip install /src >/dev/null
|
||||
|
||||
echo \">>> [dev] PKGMGR_DISABLE_NIX_FLAKE_INSTALLER=\$PKGMGR_DISABLE_NIX_FLAKE_INSTALLER\"
|
||||
echo \">>> [dev] Updating managed repo package-manager via pkgmgr...\"
|
||||
pkgmgr update pkgmgr --clone-mode shallow --no-verification
|
||||
|
||||
echo \">>> [dev] PATH:\"
|
||||
echo \"\$PATH\"
|
||||
|
||||
echo \">>> [dev] which pkgmgr:\"
|
||||
which pkgmgr || echo \">>> [dev] pkgmgr not found in PATH\"
|
||||
|
||||
echo \">>> [dev] Running: pkgmgr version pkgmgr\"
|
||||
pkgmgr version pkgmgr
|
||||
"
|
||||
|
||||
echo ">>> [root] Container flow finished."
|
||||
'
|
||||
13
.gitignore
vendored
13
.gitignore
vendored
@@ -1,9 +1,6 @@
|
||||
|
||||
# Prevents unwanted files from being committed to version control.
|
||||
|
||||
# Custom Config file
|
||||
config/config.yaml
|
||||
|
||||
# Python bytecode
|
||||
__pycache__/
|
||||
*.pyc
|
||||
@@ -17,6 +14,7 @@ venv/
|
||||
dist/
|
||||
build/*
|
||||
*.egg-info/
|
||||
package-manager-*
|
||||
|
||||
# Editor files
|
||||
.vscode/
|
||||
@@ -28,14 +26,9 @@ Thumbs.db
|
||||
|
||||
# Nix Cache to speed up tests
|
||||
.nix/
|
||||
.nix-dev-installed
|
||||
|
||||
# Ignore logs
|
||||
*.log
|
||||
package-manager-*
|
||||
|
||||
# debian
|
||||
debian/package-manager/
|
||||
debian/debhelper-build-stamp
|
||||
debian/files
|
||||
debian/.debhelper/
|
||||
debian/package-manager.substvars
|
||||
result
|
||||
45
CHANGELOG.md
45
CHANGELOG.md
@@ -1,3 +1,48 @@
|
||||
## [0.9.1] - 2025-12-10
|
||||
|
||||
* * Refactored installer: new `venv-create.sh`, cleaner root/user setup flow, updated README with architecture map.
|
||||
* Split virgin tests into root/user workflows; stabilized Nix installer across distros; improved test scripts with dynamic distro selection and isolated Nix stores.
|
||||
* Fixed repository directory resolution; improved `pkgmgr path` and `pkgmgr shell`; added full unit/E2E coverage.
|
||||
* Removed deprecated files and updated `.gitignore`.
|
||||
|
||||
|
||||
## [0.9.0] - 2025-12-10
|
||||
|
||||
* Introduce a virgin Arch-based Nix flake E2E workflow that validates pkgmgr’s full flake installation path using shared caches for faster and reproducible CI runs.
|
||||
|
||||
|
||||
## [0.8.0] - 2025-12-10
|
||||
|
||||
* **v0.7.15 — Installer & Command Resolution Improvements**
|
||||
|
||||
* Introduced a unified **layer-based installer pipeline** with clear precedence (OS-packages, Nix, Python, Makefile).
|
||||
* Reworked installer structure and improved Python/Nix/Makefile installers, including isolated Python venvs and refined flake-output handling.
|
||||
* Fully rewrote **command resolution** with stronger typing, safer fallbacks, and explicit support for `command: null` to mark library-only repositories.
|
||||
* Added extensive **unit and integration tests** for installer capability ordering, command resolution, and Nix/Python installer behavior.
|
||||
* Expanded documentation with capability hierarchy diagrams and scenario matrices.
|
||||
* Removed deprecated repository entries and obsolete configuration files.
|
||||
|
||||
|
||||
## [0.7.14] - 2025-12-10
|
||||
|
||||
* Fixed the clone-all integration test so that `SystemExit(0)` from the proxy is treated as a successful command instead of a failure.
|
||||
|
||||
|
||||
## [0.7.13] - 2025-12-10
|
||||
|
||||
### Fix tools path resolution and add tests
|
||||
|
||||
- Fixed a crash in `pkgmgr code` caused by missing `directory` metadata by introducing `_resolve_repository_path()` with proper fallbacks to `repositories_base_dir` / `repositories_dir`.
|
||||
- Updated `explore`, `terminal` and `code` tool commands to use the new resolver.
|
||||
- Improved VS Code workspace generation and path handling.
|
||||
- Added unit & E2E tests for tool commands.
|
||||
|
||||
|
||||
## [0.7.12] - 2025-12-09
|
||||
|
||||
* Fixed self refering alias during setup
|
||||
|
||||
|
||||
## [0.7.11] - 2025-12-09
|
||||
|
||||
* test: fix installer unit tests for OS packages and Nix dev shell
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
# ------------------------------------------------------------
|
||||
# Base image selector — overridden by Makefile
|
||||
# ------------------------------------------------------------
|
||||
ARG BASE_IMAGE=archlinux:latest
|
||||
ARG BASE_IMAGE
|
||||
FROM ${BASE_IMAGE}
|
||||
|
||||
RUN echo "BASE_IMAGE=${BASE_IMAGE}" && \
|
||||
cat /etc/os-release || true
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# Nix environment defaults
|
||||
#
|
||||
|
||||
3
MIRRORS
Normal file
3
MIRRORS
Normal file
@@ -0,0 +1,3 @@
|
||||
https://github.com/kevinveenbirkenbach/package-manager
|
||||
https://git.veen.world/kevinveenbirkenbach/package-manager
|
||||
https://code.infinito.nexus/kevinveenbirkenbach/package-manager
|
||||
22
Makefile
22
Makefile
@@ -2,17 +2,15 @@
|
||||
test build build-no-cache test-unit test-e2e test-integration \
|
||||
test-container
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# Local Nix cache directories in the repo
|
||||
# ------------------------------------------------------------
|
||||
NIX_STORE_VOLUME := pkgmgr_nix_store
|
||||
NIX_CACHE_VOLUME := pkgmgr_nix_cache
|
||||
# Distro
|
||||
# Options: arch debian ubuntu fedora centos
|
||||
distro ?= arch
|
||||
export distro
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# Distro list and base images
|
||||
# Base images
|
||||
# (kept for documentation/reference; actual build logic is in scripts/build)
|
||||
# ------------------------------------------------------------
|
||||
DISTROS := arch debian ubuntu fedora centos
|
||||
BASE_IMAGE_ARCH := archlinux:latest
|
||||
BASE_IMAGE_DEBIAN := debian:stable-slim
|
||||
BASE_IMAGE_UBUNTU := ubuntu:latest
|
||||
@@ -20,7 +18,6 @@ BASE_IMAGE_FEDORA := fedora:latest
|
||||
BASE_IMAGE_CENTOS := quay.io/centos/centos:stream9
|
||||
|
||||
# Make them available in scripts
|
||||
export DISTROS
|
||||
export BASE_IMAGE_ARCH
|
||||
export BASE_IMAGE_DEBIAN
|
||||
export BASE_IMAGE_UBUNTU
|
||||
@@ -68,8 +65,13 @@ test-container: build-missing
|
||||
build-missing:
|
||||
@bash scripts/build/build-image-missing.sh
|
||||
|
||||
# Combined test target for local + CI (unit + e2e + integration)
|
||||
test: test-container test-unit test-e2e test-integration
|
||||
# Combined test target for local + CI (unit + integration + e2e)
|
||||
test: test-container test-unit test-integration test-e2e
|
||||
|
||||
delete-volumes:
|
||||
@docker volume rm pkgmgr_nix_store_${distro} pkgmgr_nix_cache_${distro} || true
|
||||
|
||||
purge: delete-volumes build-no-cache
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# System install (native packages, calls scripts/installation/run-package.sh)
|
||||
|
||||
64
README.md
64
README.md
@@ -24,6 +24,15 @@
|
||||
- **Custom Aliases:**
|
||||
Generate and manage custom aliases for easy command invocation.
|
||||
|
||||
## Architecture & Setup Map 🗺️
|
||||
|
||||
The following diagram provides a full overview of PKGMGR’s package structure,
|
||||
installation layers, and setup controller flow:
|
||||
|
||||

|
||||
|
||||
**Diagram status:** *Stand: 11. Dezember 2025*
|
||||
**Always-up-to-date version:** https://s.veen.world/pkgmgrmp
|
||||
|
||||
## Installation ⚙️
|
||||
|
||||
@@ -51,55 +60,6 @@ The `make setup` command will:
|
||||
- Install required packages from `requirements.txt`.
|
||||
- Execute `python main.py install` to complete the installation.
|
||||
|
||||
## Docker Quickstart 🐳
|
||||
|
||||
Alternatively to installing locally, you can use Docker: build the image with
|
||||
|
||||
```bash
|
||||
docker build --no-cache -t pkgmgr .
|
||||
```
|
||||
|
||||
or alternativ pull it via
|
||||
|
||||
```bash
|
||||
docker pull kevinveenbirkenbach/pkgmgr:latest
|
||||
```
|
||||
|
||||
and then run
|
||||
|
||||
```bash
|
||||
docker run --rm pkgmgr --help
|
||||
```
|
||||
|
||||
## Usage 📖
|
||||
|
||||
Run the script with different commands. For example:
|
||||
|
||||
- **Install all packages:**
|
||||
```bash
|
||||
pkgmgr install --all
|
||||
```
|
||||
- **Pull updates for a specific repository:**
|
||||
```bash
|
||||
pkgmgr pull pkgmgr
|
||||
```
|
||||
- **Commit changes with extra Git parameters:**
|
||||
```bash
|
||||
pkgmgr commit pkgmgr -- -m "Your commit message"
|
||||
```
|
||||
- **List all configured packages:**
|
||||
```bash
|
||||
pkgmgr config show
|
||||
```
|
||||
- **Manage configuration:**
|
||||
```bash
|
||||
pkgmgr config init
|
||||
pkgmgr config add
|
||||
pkgmgr config edit
|
||||
pkgmgr config delete <identifier>
|
||||
pkgmgr config ignore <identifier> --set true
|
||||
```
|
||||
|
||||
## License 📄
|
||||
|
||||
This project is licensed under the MIT License.
|
||||
@@ -108,9 +68,3 @@ This project is licensed under the MIT License.
|
||||
|
||||
Kevin Veen-Birkenbach
|
||||
[https://www.veen.world](https://www.veen.world)
|
||||
|
||||
---
|
||||
|
||||
**Repository:** [github.com/kevinveenbirkenbach/package-manager](https://github.com/kevinveenbirkenbach/package-manager)
|
||||
|
||||
*Created with AI 🤖 - [View conversation](https://chatgpt.com/share/67c728c4-92d0-800f-8945-003fa9bf27c6)*
|
||||
|
||||
6
TODO.md
Normal file
6
TODO.md
Normal file
@@ -0,0 +1,6 @@
|
||||
# to-dos
|
||||
|
||||
For the following checkout the implementation map:
|
||||
|
||||
- Implement TAGS
|
||||
- Implement SIGNING_KEY
|
||||
BIN
assets/map.png
Normal file
BIN
assets/map.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 1.9 MiB |
@@ -380,17 +380,6 @@ repositories:
|
||||
- 44D8F11FD62F878E
|
||||
- B5690EEEBB952194
|
||||
|
||||
- account: kevinveenbirkenbach
|
||||
alias: infinito-presentation
|
||||
description: This repository contains a Infinito.Nexus presentation designed for customers, end-users, investors, developers, and administrators, offering tailored content and insights for each group.
|
||||
homepage: https://github.com/kevinveenbirkenbach/infinito-presentation
|
||||
provider: github.com
|
||||
repository: infinito-presentation
|
||||
verified:
|
||||
gpg_keys:
|
||||
- 44D8F11FD62F878E
|
||||
- B5690EEEBB952194
|
||||
|
||||
- account: kevinveenbirkenbach
|
||||
description: A lightweight Python utility to generate dynamic color schemes from a single base color. Provides HSL-based color transformations for theming, UI design, and CSS variable generation. Optimized for integration in Python projects, Flask applications, and Ansible roles.
|
||||
homepage: https://github.com/kevinveenbirkenbach/colorscheme-generator
|
||||
@@ -599,17 +588,6 @@ repositories:
|
||||
- 44D8F11FD62F878E
|
||||
- B5690EEEBB952194
|
||||
|
||||
- account: kevinveenbirkenbach
|
||||
desciption: Infinito Inventory Builder — a containerized web application that dynamically generates Ansible inventory files from invokable Infinito.Nexus roles through an interactive, browser-based interface.
|
||||
homepage: https://github.com/kevinveenbirkenbach/infinito-inventory-builder
|
||||
alias: invbuild
|
||||
provider: github.com
|
||||
repository: infinito-inventory-builder
|
||||
verified:
|
||||
gpg_keys:
|
||||
- 44D8F11FD62F878E
|
||||
- B5690EEEBB952194
|
||||
|
||||
- account: kevinveenbirkenbach
|
||||
desciption: A simple Python CLI tool to safely rename Linux user accounts using usermod — including home directory migration and validation checks.
|
||||
homepage: https://github.com/kevinveenbirkenbach/user-rename
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
- account: kevinveenbirkenbach
|
||||
alias: gkfdrtdtcntr
|
||||
provider: github.com
|
||||
repository: federated-to-central-social-network-bridge
|
||||
verified:
|
||||
gpg_keys:
|
||||
- 44D8F11FD62F878E
|
||||
27
flake.lock
generated
Normal file
27
flake.lock
generated
Normal file
@@ -0,0 +1,27 @@
|
||||
{
|
||||
"nodes": {
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1765186076,
|
||||
"narHash": "sha256-hM20uyap1a0M9d344I692r+ik4gTMyj60cQWO+hAYP8=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "addf7cf5f383a3101ecfba091b98d0a1263dc9b8",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixos-unstable",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"nixpkgs": "nixpkgs"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
"version": 7
|
||||
}
|
||||
39
flake.nix
39
flake.nix
@@ -26,12 +26,17 @@
|
||||
packages = forAllSystems (system:
|
||||
let
|
||||
pkgs = nixpkgs.legacyPackages.${system};
|
||||
|
||||
# Single source of truth for pkgmgr: Python 3.11
|
||||
# - Matches pyproject.toml: requires-python = ">=3.11"
|
||||
# - Uses python311Packages so that PyYAML etc. are available
|
||||
python = pkgs.python311;
|
||||
pyPkgs = pkgs.python311Packages;
|
||||
in
|
||||
rec {
|
||||
pkgmgr = pyPkgs.buildPythonApplication {
|
||||
pname = "package-manager";
|
||||
version = "0.7.11";
|
||||
version = "0.9.1";
|
||||
|
||||
# Use the git repo as source
|
||||
src = ./.;
|
||||
@@ -45,18 +50,17 @@
|
||||
pyPkgs.wheel
|
||||
];
|
||||
|
||||
# Runtime dependencies (matches [project.dependencies])
|
||||
# Runtime dependencies (matches [project.dependencies] in pyproject.toml)
|
||||
propagatedBuildInputs = [
|
||||
pyPkgs.pyyaml
|
||||
# Add more here if needed, e.g.:
|
||||
# pyPkgs.click
|
||||
# pyPkgs.rich
|
||||
pyPkgs.pip
|
||||
];
|
||||
|
||||
doCheck = false;
|
||||
|
||||
pythonImportsCheck = [ "pkgmgr" ];
|
||||
};
|
||||
|
||||
default = pkgmgr;
|
||||
}
|
||||
);
|
||||
@@ -67,23 +71,42 @@
|
||||
devShells = forAllSystems (system:
|
||||
let
|
||||
pkgs = nixpkgs.legacyPackages.${system};
|
||||
pkgmgrPkg = self.packages.${system}.pkgmgr;
|
||||
|
||||
ansiblePkg =
|
||||
if pkgs ? ansible-core then pkgs.ansible-core
|
||||
else pkgs.ansible;
|
||||
|
||||
# Use the same Python version as the package (3.11)
|
||||
python = pkgs.python311;
|
||||
|
||||
pythonWithDeps = python.withPackages (ps: [
|
||||
ps.pip
|
||||
ps.pyyaml
|
||||
]);
|
||||
in
|
||||
{
|
||||
default = pkgs.mkShell {
|
||||
buildInputs = [
|
||||
pkgmgrPkg
|
||||
pythonWithDeps
|
||||
pkgs.git
|
||||
ansiblePkg
|
||||
];
|
||||
|
||||
shellHook = ''
|
||||
# Ensure our Python with dependencies is preferred on PATH
|
||||
export PATH=${pythonWithDeps}/bin:$PATH
|
||||
|
||||
# Ensure src/ layout is importable:
|
||||
# pkgmgr lives in ./src/pkgmgr
|
||||
export PYTHONPATH="$PWD/src:${PYTHONPATH:-}"
|
||||
# Also add repo root in case tools/tests rely on it
|
||||
export PYTHONPATH="$PWD:$PYTHONPATH"
|
||||
|
||||
echo "Entered pkgmgr development shell for ${system}"
|
||||
echo "pkgmgr CLI is available via the flake build"
|
||||
echo "Python used in this shell:"
|
||||
python --version
|
||||
echo "pkgmgr CLI (from source) is available via:"
|
||||
echo " python -m pkgmgr.cli --help"
|
||||
'';
|
||||
};
|
||||
}
|
||||
|
||||
10
main.py
10
main.py
@@ -1,6 +1,14 @@
|
||||
#!/usr/bin/env python3
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
# Ensure local src/ overrides installed package
|
||||
ROOT = Path(__file__).resolve().parent
|
||||
SRC = ROOT / "src"
|
||||
if SRC.is_dir():
|
||||
sys.path.insert(0, str(SRC))
|
||||
|
||||
from pkgmgr.cli import main
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
main()
|
||||
|
||||
6
packaging/arch/.gitignore
vendored
Normal file
6
packaging/arch/.gitignore
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
# Arch pkg artifacts
|
||||
*.pkg.tar.*
|
||||
*.log
|
||||
package-manager-*
|
||||
src/
|
||||
pkg/
|
||||
@@ -1,7 +1,7 @@
|
||||
# Maintainer: Kevin Veen-Birkenbach <info@veen.world>
|
||||
|
||||
pkgname=package-manager
|
||||
pkgver=0.7.11
|
||||
pkgver=0.9.1
|
||||
pkgrel=1
|
||||
pkgdesc="Local-flake wrapper for Kevin's package-manager (Nix-based)."
|
||||
arch=('any')
|
||||
@@ -15,7 +15,7 @@ makedepends=('rsync')
|
||||
install=${pkgname}.install
|
||||
|
||||
# Local source checkout — avoids the tarball requirement.
|
||||
# This assumes you build the package from inside the main project repository.
|
||||
# We build from the project root (two levels above packaging/arch/).
|
||||
source=()
|
||||
sha256sums=()
|
||||
|
||||
@@ -24,12 +24,18 @@ _srcdir_name="source"
|
||||
|
||||
prepare() {
|
||||
mkdir -p "$srcdir/$_srcdir_name"
|
||||
|
||||
local project_root
|
||||
project_root="$(cd "$startdir/../.." && pwd)"
|
||||
|
||||
rsync -a \
|
||||
--exclude=".git" \
|
||||
--exclude=".github" \
|
||||
--exclude="pkg" \
|
||||
--exclude="srcpkg" \
|
||||
"$startdir/" "$srcdir/$_srcdir_name/"
|
||||
--exclude="packaging" \
|
||||
--exclude="assets" \
|
||||
"$project_root/" "$srcdir/$_srcdir_name/"
|
||||
}
|
||||
|
||||
build() {
|
||||
@@ -62,7 +68,8 @@ package() {
|
||||
"$pkgdir/usr/lib/package-manager/PKGBUILD" \
|
||||
"$pkgdir/usr/lib/package-manager/Dockerfile" \
|
||||
"$pkgdir/usr/lib/package-manager/debian" \
|
||||
"$pkgdir/usr/lib/package-manager/packaging" \
|
||||
"$pkgdir/usr/lib/package-manager/.gitignore" \
|
||||
"$pkgdir/usr/lib/package-manager/__pycache__" \
|
||||
"$pkgdir/usr/lib/package-manager/.gitkeep"
|
||||
"$pkgdir/usr/lib/package-manager/.gitkeep" || true
|
||||
}
|
||||
6
packaging/debian/.gitignore
vendored
Normal file
6
packaging/debian/.gitignore
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
# debian
|
||||
package-manager/
|
||||
debhelper-build-stamp
|
||||
files
|
||||
.debhelper/
|
||||
package-manager.substvars
|
||||
@@ -1,3 +1,49 @@
|
||||
package-manager (0.9.1-1) unstable; urgency=medium
|
||||
|
||||
* * Refactored installer: new `venv-create.sh`, cleaner root/user setup flow, updated README with architecture map.
|
||||
* Split virgin tests into root/user workflows; stabilized Nix installer across distros; improved test scripts with dynamic distro selection and isolated Nix stores.
|
||||
* Fixed repository directory resolution; improved `pkgmgr path` and `pkgmgr shell`; added full unit/E2E coverage.
|
||||
* Removed deprecated files and updated `.gitignore`.
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Wed, 10 Dec 2025 22:56:01 +0100
|
||||
|
||||
package-manager (0.9.0-1) unstable; urgency=medium
|
||||
|
||||
* Introduce a virgin Arch-based Nix flake E2E workflow that validates pkgmgr’s full flake installation path using shared caches for faster and reproducible CI runs.
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Wed, 10 Dec 2025 18:38:07 +0100
|
||||
|
||||
package-manager (0.8.0-1) unstable; urgency=medium
|
||||
|
||||
* **v0.7.15 — Installer & Command Resolution Improvements**
|
||||
|
||||
* Introduced a unified **layer-based installer pipeline** with clear precedence (OS-packages, Nix, Python, Makefile).
|
||||
* Reworked installer structure and improved Python/Nix/Makefile installers, including isolated Python venvs and refined flake-output handling.
|
||||
* Fully rewrote **command resolution** with stronger typing, safer fallbacks, and explicit support for `command: null` to mark library-only repositories.
|
||||
* Added extensive **unit and integration tests** for installer capability ordering, command resolution, and Nix/Python installer behavior.
|
||||
* Expanded documentation with capability hierarchy diagrams and scenario matrices.
|
||||
* Removed deprecated repository entries and obsolete configuration files.
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Wed, 10 Dec 2025 17:31:57 +0100
|
||||
|
||||
package-manager (0.7.14-1) unstable; urgency=medium
|
||||
|
||||
* Fixed the clone-all integration test so that `SystemExit(0)` from the proxy is treated as a successful command instead of a failure.
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Wed, 10 Dec 2025 10:38:33 +0100
|
||||
|
||||
package-manager (0.7.13-1) unstable; urgency=medium
|
||||
|
||||
* Automated release.
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Wed, 10 Dec 2025 10:27:24 +0100
|
||||
|
||||
package-manager (0.7.12-1) unstable; urgency=medium
|
||||
|
||||
* Fixed self refering alias during setup
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 09 Dec 2025 23:36:35 +0100
|
||||
|
||||
package-manager (0.7.11-1) unstable; urgency=medium
|
||||
|
||||
* test: fix installer unit tests for OS packages and Nix dev shell
|
||||
@@ -9,7 +9,7 @@ Homepage: https://github.com/kevinveenbirkenbach/package-manager
|
||||
|
||||
Package: package-manager
|
||||
Architecture: any
|
||||
Depends: nix, ${misc:Depends}
|
||||
Depends: ${misc:Depends}
|
||||
Description: Wrapper that runs Kevin's package-manager via Nix flake
|
||||
This package provides the `pkgmgr` command, which runs Kevin's package
|
||||
manager via a local Nix flake
|
||||
@@ -1,5 +1,5 @@
|
||||
Name: package-manager
|
||||
Version: 0.7.11
|
||||
Version: 0.9.1
|
||||
Release: 1%{?dist}
|
||||
Summary: Wrapper that runs Kevin's package-manager via Nix flake
|
||||
|
||||
@@ -77,6 +77,34 @@ echo ">>> package-manager removed. Nix itself was not removed."
|
||||
/usr/lib/package-manager/
|
||||
|
||||
%changelog
|
||||
* Wed Dec 10 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 0.9.1-1
|
||||
- * Refactored installer: new `venv-create.sh`, cleaner root/user setup flow, updated README with architecture map.
|
||||
* Split virgin tests into root/user workflows; stabilized Nix installer across distros; improved test scripts with dynamic distro selection and isolated Nix stores.
|
||||
* Fixed repository directory resolution; improved `pkgmgr path` and `pkgmgr shell`; added full unit/E2E coverage.
|
||||
* Removed deprecated files and updated `.gitignore`.
|
||||
|
||||
* Wed Dec 10 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 0.9.0-1
|
||||
- Introduce a virgin Arch-based Nix flake E2E workflow that validates pkgmgr’s full flake installation path using shared caches for faster and reproducible CI runs.
|
||||
|
||||
* Wed Dec 10 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 0.8.0-1
|
||||
- **v0.7.15 — Installer & Command Resolution Improvements**
|
||||
|
||||
* Introduced a unified **layer-based installer pipeline** with clear precedence (OS-packages, Nix, Python, Makefile).
|
||||
* Reworked installer structure and improved Python/Nix/Makefile installers, including isolated Python venvs and refined flake-output handling.
|
||||
* Fully rewrote **command resolution** with stronger typing, safer fallbacks, and explicit support for `command: null` to mark library-only repositories.
|
||||
* Added extensive **unit and integration tests** for installer capability ordering, command resolution, and Nix/Python installer behavior.
|
||||
* Expanded documentation with capability hierarchy diagrams and scenario matrices.
|
||||
* Removed deprecated repository entries and obsolete configuration files.
|
||||
|
||||
* Wed Dec 10 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 0.7.14-1
|
||||
- Fixed the clone-all integration test so that `SystemExit(0)` from the proxy is treated as a successful command instead of a failure.
|
||||
|
||||
* Wed Dec 10 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 0.7.13-1
|
||||
- Automated release.
|
||||
|
||||
* Tue Dec 09 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 0.7.12-1
|
||||
- Fixed self refering alias during setup
|
||||
|
||||
* Tue Dec 09 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 0.7.11-1
|
||||
- test: fix installer unit tests for OS packages and Nix dev shell
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
version: 1
|
||||
|
||||
author: "Kevin Veen-Birkenbach"
|
||||
url: "https://github.com/kevinveenbirkenbach/package-manager"
|
||||
description: "A configurable Python-based package manager for managing multiple repositories via Bash."
|
||||
|
||||
dependencies: []
|
||||
@@ -1,294 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Repository installation pipeline for pkgmgr.
|
||||
|
||||
This module orchestrates the installation of repositories by:
|
||||
|
||||
1. Ensuring the repository directory exists (cloning if necessary).
|
||||
2. Verifying the repository according to the configured policies.
|
||||
3. Creating executable links using create_ink(), after resolving the
|
||||
appropriate command via resolve_command_for_repo().
|
||||
4. Running a sequence of modular installer components that handle
|
||||
specific technologies or manifests (PKGBUILD, Nix flakes, Python
|
||||
via pyproject.toml, Makefile, OS-specific package metadata).
|
||||
|
||||
The goal is to keep this file thin and delegate most logic to small,
|
||||
focused installer classes.
|
||||
"""
|
||||
|
||||
import os
|
||||
from typing import List, Dict, Any
|
||||
|
||||
from pkgmgr.core.repository.identifier import get_repo_identifier
|
||||
from pkgmgr.core.repository.dir import get_repo_dir
|
||||
from pkgmgr.core.command.ink import create_ink
|
||||
from pkgmgr.core.repository.verify import verify_repository
|
||||
from pkgmgr.actions.repository.clone import clone_repos
|
||||
from pkgmgr.actions.repository.install.context import RepoContext
|
||||
from pkgmgr.core.command.resolve import resolve_command_for_repo
|
||||
|
||||
# Installer implementations
|
||||
from pkgmgr.actions.repository.install.installers.os_packages import (
|
||||
ArchPkgbuildInstaller,
|
||||
DebianControlInstaller,
|
||||
RpmSpecInstaller,
|
||||
)
|
||||
from pkgmgr.actions.repository.install.installers.nix_flake import NixFlakeInstaller
|
||||
from pkgmgr.actions.repository.install.installers.python import PythonInstaller
|
||||
from pkgmgr.actions.repository.install.installers.makefile import MakefileInstaller
|
||||
|
||||
|
||||
# Layering:
|
||||
# 1) OS packages: PKGBUILD / debian/control / RPM spec → os-deps.*
|
||||
# 2) Nix flakes (flake.nix) → e.g. python-runtime, make-install
|
||||
# 3) Python (pyproject.toml) → e.g. python-runtime, make-install
|
||||
# 4) Makefile fallback → e.g. make-install
|
||||
INSTALLERS = [
|
||||
ArchPkgbuildInstaller(), # Arch
|
||||
DebianControlInstaller(), # Debian/Ubuntu
|
||||
RpmSpecInstaller(), # Fedora/RHEL/CentOS
|
||||
NixFlakeInstaller(), # flake.nix (Nix layer)
|
||||
PythonInstaller(), # pyproject.toml
|
||||
MakefileInstaller(), # generic 'make install'
|
||||
]
|
||||
|
||||
|
||||
def _ensure_repo_dir(
|
||||
repo: Dict[str, Any],
|
||||
repositories_base_dir: str,
|
||||
all_repos: List[Dict[str, Any]],
|
||||
preview: bool,
|
||||
no_verification: bool,
|
||||
clone_mode: str,
|
||||
identifier: str,
|
||||
) -> str:
|
||||
"""
|
||||
Ensure the repository directory exists. If not, attempt to clone it.
|
||||
|
||||
Returns the repository directory path or an empty string if cloning failed.
|
||||
"""
|
||||
repo_dir = get_repo_dir(repositories_base_dir, repo)
|
||||
|
||||
if not os.path.exists(repo_dir):
|
||||
print(f"Repository directory '{repo_dir}' does not exist. Cloning it now...")
|
||||
clone_repos(
|
||||
[repo],
|
||||
repositories_base_dir,
|
||||
all_repos,
|
||||
preview,
|
||||
no_verification,
|
||||
clone_mode,
|
||||
)
|
||||
if not os.path.exists(repo_dir):
|
||||
print(f"Cloning failed for repository {identifier}. Skipping installation.")
|
||||
return ""
|
||||
|
||||
return repo_dir
|
||||
|
||||
|
||||
def _verify_repo(
|
||||
repo: Dict[str, Any],
|
||||
repo_dir: str,
|
||||
no_verification: bool,
|
||||
identifier: str,
|
||||
) -> bool:
|
||||
"""
|
||||
Verify the repository using verify_repository().
|
||||
|
||||
Returns True if installation should proceed, False if it should be skipped.
|
||||
"""
|
||||
verified_info = repo.get("verified")
|
||||
verified_ok, errors, commit_hash, signing_key = verify_repository(
|
||||
repo,
|
||||
repo_dir,
|
||||
mode="local",
|
||||
no_verification=no_verification,
|
||||
)
|
||||
|
||||
if not no_verification and verified_info and not verified_ok:
|
||||
print(f"Warning: Verification failed for {identifier}:")
|
||||
for err in errors:
|
||||
print(f" - {err}")
|
||||
choice = input("Proceed with installation? (y/N): ").strip().lower()
|
||||
if choice != "y":
|
||||
print(f"Skipping installation for {identifier}.")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def _create_context(
|
||||
repo: Dict[str, Any],
|
||||
identifier: str,
|
||||
repo_dir: str,
|
||||
repositories_base_dir: str,
|
||||
bin_dir: str,
|
||||
all_repos: List[Dict[str, Any]],
|
||||
no_verification: bool,
|
||||
preview: bool,
|
||||
quiet: bool,
|
||||
clone_mode: str,
|
||||
update_dependencies: bool,
|
||||
) -> RepoContext:
|
||||
"""
|
||||
Build a RepoContext for the given repository and parameters.
|
||||
"""
|
||||
return RepoContext(
|
||||
repo=repo,
|
||||
identifier=identifier,
|
||||
repo_dir=repo_dir,
|
||||
repositories_base_dir=repositories_base_dir,
|
||||
bin_dir=bin_dir,
|
||||
all_repos=all_repos,
|
||||
no_verification=no_verification,
|
||||
preview=preview,
|
||||
quiet=quiet,
|
||||
clone_mode=clone_mode,
|
||||
update_dependencies=update_dependencies,
|
||||
)
|
||||
|
||||
|
||||
def install_repos(
|
||||
selected_repos: List[Dict[str, Any]],
|
||||
repositories_base_dir: str,
|
||||
bin_dir: str,
|
||||
all_repos: List[Dict[str, Any]],
|
||||
no_verification: bool,
|
||||
preview: bool,
|
||||
quiet: bool,
|
||||
clone_mode: str,
|
||||
update_dependencies: bool,
|
||||
) -> None:
|
||||
"""
|
||||
Install repositories by creating symbolic links and processing standard
|
||||
manifest files (PKGBUILD, flake.nix, Python manifests, Makefile, etc.)
|
||||
via dedicated installer components.
|
||||
|
||||
Any installer failure (SystemExit) is treated as fatal and will abort
|
||||
the current installation.
|
||||
"""
|
||||
for repo in selected_repos:
|
||||
identifier = get_repo_identifier(repo, all_repos)
|
||||
repo_dir = _ensure_repo_dir(
|
||||
repo=repo,
|
||||
repositories_base_dir=repositories_base_dir,
|
||||
all_repos=all_repos,
|
||||
preview=preview,
|
||||
no_verification=no_verification,
|
||||
clone_mode=clone_mode,
|
||||
identifier=identifier,
|
||||
)
|
||||
if not repo_dir:
|
||||
continue
|
||||
|
||||
if not _verify_repo(
|
||||
repo=repo,
|
||||
repo_dir=repo_dir,
|
||||
no_verification=no_verification,
|
||||
identifier=identifier,
|
||||
):
|
||||
continue
|
||||
|
||||
ctx = _create_context(
|
||||
repo=repo,
|
||||
identifier=identifier,
|
||||
repo_dir=repo_dir,
|
||||
repositories_base_dir=repositories_base_dir,
|
||||
bin_dir=bin_dir,
|
||||
all_repos=all_repos,
|
||||
no_verification=no_verification,
|
||||
preview=preview,
|
||||
quiet=quiet,
|
||||
clone_mode=clone_mode,
|
||||
update_dependencies=update_dependencies,
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# Resolve the command for this repository before creating the link.
|
||||
# If no command is resolved, no link will be created.
|
||||
# ------------------------------------------------------------
|
||||
resolved_command = resolve_command_for_repo(
|
||||
repo=repo,
|
||||
repo_identifier=identifier,
|
||||
repo_dir=repo_dir,
|
||||
)
|
||||
|
||||
if resolved_command:
|
||||
repo["command"] = resolved_command
|
||||
else:
|
||||
repo.pop("command", None)
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# Create the symlink using create_ink (if a command is set).
|
||||
# ------------------------------------------------------------
|
||||
create_ink(
|
||||
repo,
|
||||
repositories_base_dir,
|
||||
bin_dir,
|
||||
all_repos,
|
||||
quiet=quiet,
|
||||
preview=preview,
|
||||
)
|
||||
|
||||
# Track which logical capabilities have already been provided by
|
||||
# earlier installers for this repository. This allows us to skip
|
||||
# installers that would only duplicate work (e.g. Python runtime
|
||||
# already provided by Nix flake → skip pyproject/Makefile).
|
||||
provided_capabilities: set[str] = set()
|
||||
|
||||
# Run all installers that support this repository, but only if they
|
||||
# provide at least one capability that is not yet satisfied.
|
||||
for installer in INSTALLERS:
|
||||
if not installer.supports(ctx):
|
||||
continue
|
||||
|
||||
caps = installer.discover_capabilities(ctx)
|
||||
|
||||
# If the installer declares capabilities and *all* of them are
|
||||
# already provided, we can safely skip it.
|
||||
if caps and caps.issubset(provided_capabilities):
|
||||
if not quiet:
|
||||
print(
|
||||
f"Skipping installer {installer.__class__.__name__} "
|
||||
f"for {identifier} – capabilities {caps} already provided."
|
||||
)
|
||||
continue
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# Debug output + clear error if an installer fails
|
||||
# ------------------------------------------------------------
|
||||
if not quiet:
|
||||
print(
|
||||
f"[pkgmgr] Running installer {installer.__class__.__name__} "
|
||||
f"for {identifier} in '{repo_dir}' "
|
||||
f"(new capabilities: {caps or '∅'})..."
|
||||
)
|
||||
|
||||
try:
|
||||
installer.run(ctx)
|
||||
except SystemExit as exc:
|
||||
exit_code = exc.code if isinstance(exc.code, int) else str(exc.code)
|
||||
|
||||
print(
|
||||
f"[ERROR] Installer {installer.__class__.__name__} failed "
|
||||
f"for repository {identifier} (dir: {repo_dir}) "
|
||||
f"with exit code {exit_code}."
|
||||
)
|
||||
print(
|
||||
"[ERROR] This usually means an underlying command failed "
|
||||
"(e.g. 'make install', 'nix build', 'pip install', ...)."
|
||||
)
|
||||
print(
|
||||
"[ERROR] Check the log above for the exact command output. "
|
||||
"You can also run this repository in isolation via:\n"
|
||||
f" pkgmgr install {identifier} --clone-mode shallow --no-verification"
|
||||
)
|
||||
|
||||
# Re-raise so that CLI/tests fail clearly,
|
||||
# but now with much more context.
|
||||
raise
|
||||
|
||||
# Only merge capabilities if the installer succeeded
|
||||
provided_capabilities.update(caps)
|
||||
@@ -1,19 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Installer package for pkgmgr.
|
||||
|
||||
This exposes all installer classes so users can import them directly from
|
||||
pkgmgr.actions.repository.install.installers.
|
||||
"""
|
||||
|
||||
from pkgmgr.actions.repository.install.installers.base import BaseInstaller # noqa: F401
|
||||
from pkgmgr.actions.repository.install.installers.nix_flake import NixFlakeInstaller # noqa: F401
|
||||
from pkgmgr.actions.repository.install.installers.python import PythonInstaller # noqa: F401
|
||||
from pkgmgr.actions.repository.install.installers.makefile import MakefileInstaller # noqa: F401
|
||||
|
||||
# OS-specific installers
|
||||
from pkgmgr.actions.repository.install.installers.os_packages.arch_pkgbuild import ArchPkgbuildInstaller # noqa: F401
|
||||
from pkgmgr.actions.repository.install.installers.os_packages.debian_control import DebianControlInstaller # noqa: F401
|
||||
from pkgmgr.actions.repository.install.installers.os_packages.rpm_spec import RpmSpecInstaller # noqa: F401
|
||||
@@ -1,93 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Installer that triggers `make install` if a Makefile is present and
|
||||
the Makefile actually defines an 'install' target.
|
||||
|
||||
This is useful for repositories that expose a standard Makefile-based
|
||||
installation step.
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
from pkgmgr.actions.repository.install.context import RepoContext
|
||||
from pkgmgr.actions.repository.install.installers.base import BaseInstaller
|
||||
from pkgmgr.core.command.run import run_command
|
||||
|
||||
|
||||
class MakefileInstaller(BaseInstaller):
|
||||
"""Run `make install` if a Makefile with an 'install' target exists."""
|
||||
|
||||
# Logical layer name, used by capability matchers.
|
||||
layer = "makefile"
|
||||
|
||||
MAKEFILE_NAME = "Makefile"
|
||||
|
||||
def supports(self, ctx: RepoContext) -> bool:
|
||||
"""Return True if a Makefile exists in the repository directory."""
|
||||
makefile_path = os.path.join(ctx.repo_dir, self.MAKEFILE_NAME)
|
||||
return os.path.exists(makefile_path)
|
||||
|
||||
def _has_install_target(self, makefile_path: str) -> bool:
|
||||
"""
|
||||
Check whether the Makefile defines an 'install' target.
|
||||
|
||||
We treat the presence of a real install target as either:
|
||||
- a line starting with 'install:' (optionally preceded by whitespace), or
|
||||
- a .PHONY line that lists 'install' as one of the targets.
|
||||
"""
|
||||
try:
|
||||
with open(makefile_path, "r", encoding="utf-8", errors="ignore") as f:
|
||||
content = f.read()
|
||||
except OSError:
|
||||
# If we cannot read the Makefile for some reason, assume no target.
|
||||
return False
|
||||
|
||||
# install: ...
|
||||
if re.search(r"^\s*install\s*:", content, flags=re.MULTILINE):
|
||||
return True
|
||||
|
||||
# .PHONY: ... install ...
|
||||
if re.search(r"^\s*\.PHONY\s*:\s*.*\binstall\b", content, flags=re.MULTILINE):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def run(self, ctx: RepoContext) -> None:
|
||||
"""
|
||||
Execute `make install` in the repository directory, but only if an
|
||||
'install' target is actually defined in the Makefile.
|
||||
|
||||
Any failure in `make install` is treated as a fatal error and will
|
||||
propagate as SystemExit from run_command().
|
||||
"""
|
||||
makefile_path = os.path.join(ctx.repo_dir, self.MAKEFILE_NAME)
|
||||
|
||||
if not os.path.exists(makefile_path):
|
||||
# Should normally not happen if supports() was checked before,
|
||||
# but keep this guard for robustness.
|
||||
if not ctx.quiet:
|
||||
print(
|
||||
f"[pkgmgr] Makefile '{makefile_path}' not found, "
|
||||
"skipping make install."
|
||||
)
|
||||
return
|
||||
|
||||
if not self._has_install_target(makefile_path):
|
||||
if not ctx.quiet:
|
||||
print(
|
||||
"[pkgmgr] Skipping Makefile install: no 'install' target "
|
||||
f"found in {makefile_path}."
|
||||
)
|
||||
return
|
||||
|
||||
if not ctx.quiet:
|
||||
print(
|
||||
f"[pkgmgr] Running 'make install' in {ctx.repo_dir} "
|
||||
"(install target detected in Makefile)."
|
||||
)
|
||||
|
||||
cmd = "make install"
|
||||
run_command(cmd, cwd=ctx.repo_dir, preview=ctx.preview)
|
||||
@@ -1,127 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Installer for Python projects based on pyproject.toml.
|
||||
|
||||
Strategy:
|
||||
- Determine a pip command in this order:
|
||||
1. $PKGMGR_PIP (explicit override, e.g. ~/.venvs/pkgmgr/bin/pip)
|
||||
2. sys.executable -m pip (current interpreter)
|
||||
3. "pip" from PATH as last resort
|
||||
- If pyproject.toml exists: pip install .
|
||||
|
||||
All installation failures are treated as fatal errors (SystemExit),
|
||||
except when we explicitly skip the installer:
|
||||
|
||||
- If IN_NIX_SHELL is set, we assume Python is managed by Nix and
|
||||
skip this installer entirely.
|
||||
- If PKGMGR_DISABLE_PYTHON_INSTALLER=1 is set, the installer is
|
||||
globally disabled (useful for CI or debugging).
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sys
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from pkgmgr.actions.repository.install.installers.base import BaseInstaller
|
||||
from pkgmgr.core.command.run import run_command
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pkgmgr.actions.repository.install.context import RepoContext
|
||||
from pkgmgr.actions.repository.install import InstallContext
|
||||
|
||||
|
||||
class PythonInstaller(BaseInstaller):
|
||||
"""Install Python projects and dependencies via pip."""
|
||||
|
||||
# Logical layer name, used by capability matchers.
|
||||
layer = "python"
|
||||
|
||||
def _in_nix_shell(self) -> bool:
|
||||
"""
|
||||
Return True if we appear to be running inside a Nix dev shell.
|
||||
|
||||
Nix sets IN_NIX_SHELL in `nix develop` environments. In that case
|
||||
the Python environment is already provided by Nix, so we must not
|
||||
attempt an additional pip-based installation.
|
||||
"""
|
||||
return bool(os.environ.get("IN_NIX_SHELL"))
|
||||
|
||||
def supports(self, ctx: "RepoContext") -> bool:
|
||||
"""
|
||||
Return True if this installer should handle the given repository.
|
||||
|
||||
Only pyproject.toml is supported as the single source of truth
|
||||
for Python dependencies and packaging metadata.
|
||||
|
||||
The installer is *disabled* when:
|
||||
- IN_NIX_SHELL is set (Python managed by Nix dev shell), or
|
||||
- PKGMGR_DISABLE_PYTHON_INSTALLER=1 is set.
|
||||
"""
|
||||
# 1) Skip in Nix dev shells – Python is managed by the flake/devShell.
|
||||
if self._in_nix_shell():
|
||||
print(
|
||||
"[INFO] IN_NIX_SHELL detected; skipping PythonInstaller. "
|
||||
"Python runtime is provided by the Nix dev shell."
|
||||
)
|
||||
return False
|
||||
|
||||
# 2) Optional global kill-switch.
|
||||
if os.environ.get("PKGMGR_DISABLE_PYTHON_INSTALLER") == "1":
|
||||
print(
|
||||
"[INFO] PKGMGR_DISABLE_PYTHON_INSTALLER=1 – "
|
||||
"PythonInstaller is disabled."
|
||||
)
|
||||
return False
|
||||
|
||||
repo_dir = ctx.repo_dir
|
||||
return os.path.exists(os.path.join(repo_dir, "pyproject.toml"))
|
||||
|
||||
def _pip_cmd(self) -> str:
|
||||
"""
|
||||
Resolve the pip command to use.
|
||||
|
||||
Order:
|
||||
1) PKGMGR_PIP (explicit override)
|
||||
2) sys.executable -m pip
|
||||
3) plain "pip"
|
||||
"""
|
||||
explicit = os.environ.get("PKGMGR_PIP", "").strip()
|
||||
if explicit:
|
||||
return explicit
|
||||
|
||||
if sys.executable:
|
||||
return f"{sys.executable} -m pip"
|
||||
|
||||
return "pip"
|
||||
|
||||
def run(self, ctx: "InstallContext") -> None:
|
||||
"""
|
||||
Install Python project defined via pyproject.toml.
|
||||
|
||||
Any pip failure is propagated as SystemExit.
|
||||
"""
|
||||
# Extra guard in case run() is called directly without supports().
|
||||
if self._in_nix_shell():
|
||||
print(
|
||||
"[INFO] IN_NIX_SHELL detected in PythonInstaller.run(); "
|
||||
"skipping pip-based installation."
|
||||
)
|
||||
return
|
||||
|
||||
if not self.supports(ctx): # type: ignore[arg-type]
|
||||
return
|
||||
|
||||
pip_cmd = self._pip_cmd()
|
||||
|
||||
pyproject = os.path.join(ctx.repo_dir, "pyproject.toml")
|
||||
if os.path.exists(pyproject):
|
||||
print(
|
||||
f"pyproject.toml found in {ctx.identifier}, "
|
||||
f"installing Python project..."
|
||||
)
|
||||
cmd = f"{pip_cmd} install ."
|
||||
run_command(cmd, cwd=ctx.repo_dir, preview=ctx.preview)
|
||||
@@ -1,83 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from pkgmgr.cli.context import CLIContext
|
||||
from pkgmgr.core.command.run import run_command
|
||||
from pkgmgr.core.repository.identifier import get_repo_identifier
|
||||
|
||||
|
||||
Repository = Dict[str, Any]
|
||||
|
||||
|
||||
def handle_tools_command(
|
||||
args,
|
||||
ctx: CLIContext,
|
||||
selected: List[Repository],
|
||||
) -> None:
|
||||
"""
|
||||
Handle integration commands:
|
||||
- explore (file manager)
|
||||
- terminal (GNOME Terminal)
|
||||
- code (VS Code workspace)
|
||||
"""
|
||||
|
||||
# --------------------------------------------------------
|
||||
# explore
|
||||
# --------------------------------------------------------
|
||||
if args.command == "explore":
|
||||
for repository in selected:
|
||||
run_command(
|
||||
f"nautilus {repository['directory']} & disown"
|
||||
)
|
||||
return
|
||||
|
||||
# --------------------------------------------------------
|
||||
# terminal
|
||||
# --------------------------------------------------------
|
||||
if args.command == "terminal":
|
||||
for repository in selected:
|
||||
run_command(
|
||||
f'gnome-terminal --tab --working-directory="{repository["directory"]}"'
|
||||
)
|
||||
return
|
||||
|
||||
# --------------------------------------------------------
|
||||
# code
|
||||
# --------------------------------------------------------
|
||||
if args.command == "code":
|
||||
if not selected:
|
||||
print("No repositories selected.")
|
||||
return
|
||||
|
||||
identifiers = [
|
||||
get_repo_identifier(repo, ctx.all_repositories)
|
||||
for repo in selected
|
||||
]
|
||||
sorted_identifiers = sorted(identifiers)
|
||||
workspace_name = "_".join(sorted_identifiers) + ".code-workspace"
|
||||
|
||||
workspaces_dir = os.path.expanduser(
|
||||
ctx.config_merged.get("directories").get("workspaces")
|
||||
)
|
||||
os.makedirs(workspaces_dir, exist_ok=True)
|
||||
workspace_file = os.path.join(workspaces_dir, workspace_name)
|
||||
|
||||
folders = [{"path": repository["directory"]} for repository in selected]
|
||||
|
||||
workspace_data = {
|
||||
"folders": folders,
|
||||
"settings": {},
|
||||
}
|
||||
if not os.path.exists(workspace_file):
|
||||
with open(workspace_file, "w") as f:
|
||||
json.dump(workspace_data, f, indent=4)
|
||||
print(f"Created workspace file: {workspace_file}")
|
||||
else:
|
||||
print(f"Using existing workspace file: {workspace_file}")
|
||||
|
||||
run_command(f'code "{workspace_file}"')
|
||||
return
|
||||
@@ -1,505 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
|
||||
from pkgmgr.cli.proxy import register_proxy_commands
|
||||
|
||||
|
||||
class SortedSubParsersAction(argparse._SubParsersAction):
|
||||
"""
|
||||
Subparsers action that keeps choices sorted alphabetically.
|
||||
"""
|
||||
|
||||
def add_parser(self, name, **kwargs):
|
||||
parser = super().add_parser(name, **kwargs)
|
||||
# Sort choices alphabetically by dest (subcommand name)
|
||||
self._choices_actions.sort(key=lambda a: a.dest)
|
||||
return parser
|
||||
|
||||
|
||||
def add_identifier_arguments(subparser: argparse.ArgumentParser) -> None:
|
||||
"""
|
||||
Common identifier / selection arguments for many subcommands.
|
||||
|
||||
Selection modes (mutual intent, not hard-enforced):
|
||||
- identifiers (positional): select by alias / provider/account/repo
|
||||
- --all: select all repositories
|
||||
- --category / --string / --tag: filter-based selection on top
|
||||
of the full repository set
|
||||
"""
|
||||
subparser.add_argument(
|
||||
"identifiers",
|
||||
nargs="*",
|
||||
help=(
|
||||
"Identifier(s) for repositories. "
|
||||
"Default: Repository of current folder."
|
||||
),
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--all",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=(
|
||||
"Apply the subcommand to all repositories in the config. "
|
||||
"Some subcommands ask for confirmation. If you want to give this "
|
||||
"confirmation for all repositories, pipe 'yes'. E.g: "
|
||||
"yes | pkgmgr {subcommand} --all"
|
||||
),
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--category",
|
||||
nargs="+",
|
||||
default=[],
|
||||
help=(
|
||||
"Filter repositories by category patterns derived from config "
|
||||
"filenames or repo metadata (use filename without .yml/.yaml, "
|
||||
"or /regex/ to use a regular expression)."
|
||||
),
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--string",
|
||||
default="",
|
||||
help=(
|
||||
"Filter repositories whose identifier / name / path contains this "
|
||||
"substring (case-insensitive). Use /regex/ for regular expressions."
|
||||
),
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--tag",
|
||||
action="append",
|
||||
default=[],
|
||||
help=(
|
||||
"Filter repositories by tag. Matches tags from the repository "
|
||||
"collector and category tags. Use /regex/ for regular expressions."
|
||||
),
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--preview",
|
||||
action="store_true",
|
||||
help="Preview changes without executing commands",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--list",
|
||||
action="store_true",
|
||||
help="List affected repositories (with preview or status)",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-a",
|
||||
"--args",
|
||||
nargs=argparse.REMAINDER,
|
||||
dest="extra_args",
|
||||
help="Additional parameters to be attached.",
|
||||
default=[],
|
||||
)
|
||||
|
||||
|
||||
def add_install_update_arguments(subparser: argparse.ArgumentParser) -> None:
|
||||
"""
|
||||
Common arguments for install/update commands.
|
||||
"""
|
||||
add_identifier_arguments(subparser)
|
||||
subparser.add_argument(
|
||||
"-q",
|
||||
"--quiet",
|
||||
action="store_true",
|
||||
help="Suppress warnings and info messages",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--no-verification",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Disable verification via commit/gpg",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--dependencies",
|
||||
action="store_true",
|
||||
help="Also pull and update dependencies",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--clone-mode",
|
||||
choices=["ssh", "https", "shallow"],
|
||||
default="ssh",
|
||||
help=(
|
||||
"Specify the clone mode: ssh, https, or shallow "
|
||||
"(HTTPS shallow clone; default: ssh)"
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def create_parser(description_text: str) -> argparse.ArgumentParser:
|
||||
"""
|
||||
Create the top-level argument parser for pkgmgr.
|
||||
"""
|
||||
parser = argparse.ArgumentParser(
|
||||
description=description_text,
|
||||
formatter_class=argparse.RawTextHelpFormatter,
|
||||
)
|
||||
subparsers = parser.add_subparsers(
|
||||
dest="command",
|
||||
help="Subcommands",
|
||||
action=SortedSubParsersAction,
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# install / update / deinstall / delete
|
||||
# ------------------------------------------------------------
|
||||
install_parser = subparsers.add_parser(
|
||||
"install",
|
||||
help="Setup repository/repositories alias links to executables",
|
||||
)
|
||||
add_install_update_arguments(install_parser)
|
||||
|
||||
update_parser = subparsers.add_parser(
|
||||
"update",
|
||||
help="Update (pull + install) repository/repositories",
|
||||
)
|
||||
add_install_update_arguments(update_parser)
|
||||
update_parser.add_argument(
|
||||
"--system",
|
||||
action="store_true",
|
||||
help="Include system update commands",
|
||||
)
|
||||
|
||||
deinstall_parser = subparsers.add_parser(
|
||||
"deinstall",
|
||||
help="Remove alias links to repository/repositories",
|
||||
)
|
||||
add_identifier_arguments(deinstall_parser)
|
||||
|
||||
delete_parser = subparsers.add_parser(
|
||||
"delete",
|
||||
help="Delete repository/repositories alias links to executables",
|
||||
)
|
||||
add_identifier_arguments(delete_parser)
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# create
|
||||
# ------------------------------------------------------------
|
||||
create_cmd_parser = subparsers.add_parser(
|
||||
"create",
|
||||
help=(
|
||||
"Create new repository entries: add them to the config if not "
|
||||
"already present, initialize the local repository, and push "
|
||||
"remotely if --remote is set."
|
||||
),
|
||||
)
|
||||
add_identifier_arguments(create_cmd_parser)
|
||||
create_cmd_parser.add_argument(
|
||||
"--remote",
|
||||
action="store_true",
|
||||
help="If set, add the remote and push the initial commit.",
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# status
|
||||
# ------------------------------------------------------------
|
||||
status_parser = subparsers.add_parser(
|
||||
"status",
|
||||
help="Show status for repository/repositories or system",
|
||||
)
|
||||
add_identifier_arguments(status_parser)
|
||||
status_parser.add_argument(
|
||||
"--system",
|
||||
action="store_true",
|
||||
help="Show system status",
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# config
|
||||
# ------------------------------------------------------------
|
||||
config_parser = subparsers.add_parser(
|
||||
"config",
|
||||
help="Manage configuration",
|
||||
)
|
||||
config_subparsers = config_parser.add_subparsers(
|
||||
dest="subcommand",
|
||||
help="Config subcommands",
|
||||
required=True,
|
||||
)
|
||||
|
||||
config_show = config_subparsers.add_parser(
|
||||
"show",
|
||||
help="Show configuration",
|
||||
)
|
||||
add_identifier_arguments(config_show)
|
||||
|
||||
config_subparsers.add_parser(
|
||||
"add",
|
||||
help="Interactively add a new repository entry",
|
||||
)
|
||||
|
||||
config_subparsers.add_parser(
|
||||
"edit",
|
||||
help="Edit configuration file with nano",
|
||||
)
|
||||
|
||||
config_subparsers.add_parser(
|
||||
"init",
|
||||
help="Initialize user configuration by scanning the base directory",
|
||||
)
|
||||
|
||||
config_delete = config_subparsers.add_parser(
|
||||
"delete",
|
||||
help="Delete repository entry from user config",
|
||||
)
|
||||
add_identifier_arguments(config_delete)
|
||||
|
||||
config_ignore = config_subparsers.add_parser(
|
||||
"ignore",
|
||||
help="Set ignore flag for repository entries in user config",
|
||||
)
|
||||
add_identifier_arguments(config_ignore)
|
||||
config_ignore.add_argument(
|
||||
"--set",
|
||||
choices=["true", "false"],
|
||||
required=True,
|
||||
help="Set ignore to true or false",
|
||||
)
|
||||
|
||||
config_subparsers.add_parser(
|
||||
"update",
|
||||
help=(
|
||||
"Update default config files in ~/.config/pkgmgr/ from the "
|
||||
"installed pkgmgr package (does not touch config.yaml)."
|
||||
),
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# path / explore / terminal / code / shell
|
||||
# ------------------------------------------------------------
|
||||
path_parser = subparsers.add_parser(
|
||||
"path",
|
||||
help="Print the path(s) of repository/repositories",
|
||||
)
|
||||
add_identifier_arguments(path_parser)
|
||||
|
||||
explore_parser = subparsers.add_parser(
|
||||
"explore",
|
||||
help="Open repository in Nautilus file manager",
|
||||
)
|
||||
add_identifier_arguments(explore_parser)
|
||||
|
||||
terminal_parser = subparsers.add_parser(
|
||||
"terminal",
|
||||
help="Open repository in a new GNOME Terminal tab",
|
||||
)
|
||||
add_identifier_arguments(terminal_parser)
|
||||
|
||||
code_parser = subparsers.add_parser(
|
||||
"code",
|
||||
help="Open repository workspace with VS Code",
|
||||
)
|
||||
add_identifier_arguments(code_parser)
|
||||
|
||||
shell_parser = subparsers.add_parser(
|
||||
"shell",
|
||||
help="Execute a shell command in each repository",
|
||||
)
|
||||
add_identifier_arguments(shell_parser)
|
||||
shell_parser.add_argument(
|
||||
"-c",
|
||||
"--command",
|
||||
nargs=argparse.REMAINDER,
|
||||
dest="shell_command",
|
||||
help=(
|
||||
"The shell command (and its arguments) to execute in each "
|
||||
"repository"
|
||||
),
|
||||
default=[],
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# branch
|
||||
# ------------------------------------------------------------
|
||||
branch_parser = subparsers.add_parser(
|
||||
"branch",
|
||||
help="Branch-related utilities (e.g. open/close feature branches)",
|
||||
)
|
||||
branch_subparsers = branch_parser.add_subparsers(
|
||||
dest="subcommand",
|
||||
help="Branch subcommands",
|
||||
required=True,
|
||||
)
|
||||
|
||||
branch_open = branch_subparsers.add_parser(
|
||||
"open",
|
||||
help="Create and push a new branch on top of a base branch",
|
||||
)
|
||||
branch_open.add_argument(
|
||||
"name",
|
||||
nargs="?",
|
||||
help=(
|
||||
"Name of the new branch (optional; will be asked interactively "
|
||||
"if omitted)"
|
||||
),
|
||||
)
|
||||
branch_open.add_argument(
|
||||
"--base",
|
||||
default="main",
|
||||
help="Base branch to create the new branch from (default: main)",
|
||||
)
|
||||
|
||||
branch_close = branch_subparsers.add_parser(
|
||||
"close",
|
||||
help="Merge a feature branch into base and delete it",
|
||||
)
|
||||
branch_close.add_argument(
|
||||
"name",
|
||||
nargs="?",
|
||||
help=(
|
||||
"Name of the branch to close (optional; current branch is used "
|
||||
"if omitted)"
|
||||
),
|
||||
)
|
||||
branch_close.add_argument(
|
||||
"--base",
|
||||
default="main",
|
||||
help=(
|
||||
"Base branch to merge into (default: main; falls back to master "
|
||||
"internally if main does not exist)"
|
||||
),
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# release
|
||||
# ------------------------------------------------------------
|
||||
release_parser = subparsers.add_parser(
|
||||
"release",
|
||||
help=(
|
||||
"Create a release for repository/ies by incrementing version "
|
||||
"and updating the changelog."
|
||||
),
|
||||
)
|
||||
release_parser.add_argument(
|
||||
"release_type",
|
||||
choices=["major", "minor", "patch"],
|
||||
help="Type of version increment for the release (major, minor, patch).",
|
||||
)
|
||||
release_parser.add_argument(
|
||||
"-m",
|
||||
"--message",
|
||||
default=None,
|
||||
help=(
|
||||
"Optional release message to add to the changelog and tag."
|
||||
),
|
||||
)
|
||||
# Generic selection / preview / list / extra_args
|
||||
add_identifier_arguments(release_parser)
|
||||
# Close current branch after successful release
|
||||
release_parser.add_argument(
|
||||
"--close",
|
||||
action="store_true",
|
||||
help=(
|
||||
"Close the current branch after a successful release in each "
|
||||
"repository, if it is not main/master."
|
||||
),
|
||||
)
|
||||
# Force: skip preview+confirmation and run release directly
|
||||
release_parser.add_argument(
|
||||
"-f",
|
||||
"--force",
|
||||
action="store_true",
|
||||
help=(
|
||||
"Skip the interactive preview+confirmation step and run the "
|
||||
"release directly."
|
||||
),
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# version
|
||||
# ------------------------------------------------------------
|
||||
version_parser = subparsers.add_parser(
|
||||
"version",
|
||||
help=(
|
||||
"Show version information for repository/ies "
|
||||
"(git tags, pyproject.toml, flake.nix, PKGBUILD, debian, spec, "
|
||||
"Ansible Galaxy)."
|
||||
),
|
||||
)
|
||||
add_identifier_arguments(version_parser)
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# changelog
|
||||
# ------------------------------------------------------------
|
||||
changelog_parser = subparsers.add_parser(
|
||||
"changelog",
|
||||
help=(
|
||||
"Show changelog derived from Git history. "
|
||||
"By default, shows the changes between the last two SemVer tags."
|
||||
),
|
||||
)
|
||||
changelog_parser.add_argument(
|
||||
"range",
|
||||
nargs="?",
|
||||
default="",
|
||||
help=(
|
||||
"Optional tag or range (e.g. v1.2.3 or v1.2.0..v1.2.3). "
|
||||
"If omitted, the changelog between the last two SemVer "
|
||||
"tags is shown."
|
||||
),
|
||||
)
|
||||
add_identifier_arguments(changelog_parser)
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# list
|
||||
# ------------------------------------------------------------
|
||||
list_parser = subparsers.add_parser(
|
||||
"list",
|
||||
help="List all repositories with details and status",
|
||||
)
|
||||
# dieselbe Selektionslogik wie bei install/update/etc.:
|
||||
add_identifier_arguments(list_parser)
|
||||
list_parser.add_argument(
|
||||
"--status",
|
||||
type=str,
|
||||
default="",
|
||||
help=(
|
||||
"Filter repositories by status (case insensitive). "
|
||||
"Use /regex/ for regular expressions."
|
||||
),
|
||||
)
|
||||
list_parser.add_argument(
|
||||
"--description",
|
||||
action="store_true",
|
||||
help=(
|
||||
"Show an additional detailed section per repository "
|
||||
"(description, homepage, tags, categories, paths)."
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# make
|
||||
# ------------------------------------------------------------
|
||||
make_parser = subparsers.add_parser(
|
||||
"make",
|
||||
help="Executes make commands",
|
||||
)
|
||||
add_identifier_arguments(make_parser)
|
||||
make_subparsers = make_parser.add_subparsers(
|
||||
dest="subcommand",
|
||||
help="Make subcommands",
|
||||
required=True,
|
||||
)
|
||||
|
||||
make_install = make_subparsers.add_parser(
|
||||
"install",
|
||||
help="Executes the make install command",
|
||||
)
|
||||
add_identifier_arguments(make_install)
|
||||
|
||||
make_deinstall = make_subparsers.add_parser(
|
||||
"deinstall",
|
||||
help="Executes the make deinstall command",
|
||||
)
|
||||
add_identifier_arguments(make_deinstall)
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# Proxy commands (git, docker, docker compose, ...)
|
||||
# ------------------------------------------------------------
|
||||
register_proxy_commands(subparsers)
|
||||
|
||||
return parser
|
||||
@@ -1,113 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Command resolver for repositories.
|
||||
|
||||
This module determines the correct command to expose via symlink.
|
||||
It implements the following priority:
|
||||
|
||||
1. Explicit command in repo config → command
|
||||
2. System package manager binary (/usr/...) → NO LINK (respect OS)
|
||||
3. Nix profile binary (~/.nix-profile/bin/<id>) → command
|
||||
4. Python / non-system console script on PATH → command
|
||||
5. Fallback: repository's main.sh or main.py → command
|
||||
6. If nothing is available → raise error
|
||||
|
||||
The actual symlink creation is handled by create_ink(). This resolver
|
||||
only decides *what* should be used as the entrypoint, or whether no
|
||||
link should be created at all.
|
||||
"""
|
||||
|
||||
import os
|
||||
import shutil
|
||||
from typing import Optional
|
||||
|
||||
|
||||
def resolve_command_for_repo(repo, repo_identifier: str, repo_dir: str) -> Optional[str]:
|
||||
"""
|
||||
Determine the command for this repository.
|
||||
|
||||
Returns:
|
||||
str → path to the command (a symlink should be created)
|
||||
None → do NOT create a link (e.g. system package already provides it)
|
||||
|
||||
On total failure (no suitable command found at any layer), this function
|
||||
raises SystemExit with a descriptive error message.
|
||||
"""
|
||||
# ------------------------------------------------------------
|
||||
# 1. Explicit command defined by repository config
|
||||
# ------------------------------------------------------------
|
||||
explicit = repo.get("command")
|
||||
if explicit:
|
||||
return explicit
|
||||
|
||||
home = os.path.expanduser("~")
|
||||
|
||||
def is_executable(path: str) -> bool:
|
||||
return os.path.exists(path) and os.access(path, os.X_OK)
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# 2. System package manager binary via PATH
|
||||
#
|
||||
# If the binary lives under /usr/, we treat it as a system-managed
|
||||
# package (e.g. installed via pacman/apt/yum). In that case, pkgmgr
|
||||
# does NOT create a link at all and defers entirely to the OS.
|
||||
# ------------------------------------------------------------
|
||||
path_candidate = shutil.which(repo_identifier)
|
||||
system_binary: Optional[str] = None
|
||||
non_system_binary: Optional[str] = None
|
||||
|
||||
if path_candidate:
|
||||
if path_candidate.startswith("/usr/"):
|
||||
system_binary = path_candidate
|
||||
else:
|
||||
non_system_binary = path_candidate
|
||||
|
||||
if system_binary:
|
||||
# Respect system package manager: do not create a link.
|
||||
if repo.get("debug", False):
|
||||
print(
|
||||
f"[pkgmgr] System binary for '{repo_identifier}' found at "
|
||||
f"{system_binary}; no symlink will be created."
|
||||
)
|
||||
return None
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# 3. Nix profile binary (~/.nix-profile/bin/<identifier>)
|
||||
# ------------------------------------------------------------
|
||||
nix_candidate = os.path.join(home, ".nix-profile", "bin", repo_identifier)
|
||||
if is_executable(nix_candidate):
|
||||
return nix_candidate
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# 4. Python / non-system console script on PATH
|
||||
#
|
||||
# Here we reuse the non-system PATH candidate (e.g. from a venv or
|
||||
# a user-local install like ~/.local/bin). This is treated as a
|
||||
# valid command target.
|
||||
# ------------------------------------------------------------
|
||||
if non_system_binary and is_executable(non_system_binary):
|
||||
return non_system_binary
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# 5. Fallback: main.sh / main.py inside the repository
|
||||
# ------------------------------------------------------------
|
||||
main_sh = os.path.join(repo_dir, "main.sh")
|
||||
main_py = os.path.join(repo_dir, "main.py")
|
||||
|
||||
if is_executable(main_sh):
|
||||
return main_sh
|
||||
|
||||
if is_executable(main_py) or os.path.exists(main_py):
|
||||
return main_py
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# 6. Nothing found → treat as a hard error
|
||||
# ------------------------------------------------------------
|
||||
raise SystemExit(
|
||||
f"No executable command could be resolved for repository '{repo_identifier}'. "
|
||||
"No explicit 'command' configured, no system-managed binary under /usr/, "
|
||||
"no Nix profile binary, no non-system console script on PATH, and no "
|
||||
"main.sh/main.py found in the repository."
|
||||
)
|
||||
@@ -7,7 +7,7 @@ build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "package-manager"
|
||||
version = "0.7.11"
|
||||
version = "0.9.1"
|
||||
description = "Kevin's package-manager tool (pkgmgr)"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.11"
|
||||
@@ -39,13 +39,13 @@ pkgmgr = "pkgmgr.cli:main"
|
||||
# -----------------------------
|
||||
# setuptools configuration
|
||||
# -----------------------------
|
||||
# We use find_packages(), not a fixed list,
|
||||
# and explicitly include pkgmgr* and config*
|
||||
# Source layout: all packages live under "src/"
|
||||
[tool.setuptools]
|
||||
package-dir = { "" = "src", "config" = "config" }
|
||||
|
||||
[tool.setuptools.packages.find]
|
||||
where = ["."]
|
||||
where = ["src", "."]
|
||||
include = ["pkgmgr*", "config*"]
|
||||
|
||||
# Ensure defaults.yaml is shipped inside wheels & nix builds
|
||||
[tool.setuptools.package-data]
|
||||
"config" = ["defaults.yaml"]
|
||||
|
||||
@@ -4,32 +4,21 @@ set -euo pipefail
|
||||
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||
source "${SCRIPT_DIR}/resolve-base-image.sh"
|
||||
|
||||
echo "============================================================"
|
||||
echo ">>> Building ONLY missing container images"
|
||||
echo "============================================================"
|
||||
IMAGE="package-manager-test-$distro"
|
||||
BASE_IMAGE="$(resolve_base_image "$distro")"
|
||||
|
||||
for distro in $DISTROS; do
|
||||
IMAGE="package-manager-test-$distro"
|
||||
BASE_IMAGE="$(resolve_base_image "$distro")"
|
||||
|
||||
if docker image inspect "$IMAGE" >/dev/null 2>&1; then
|
||||
echo "[build-missing] Image already exists: $IMAGE (skipping)"
|
||||
continue
|
||||
fi
|
||||
|
||||
echo
|
||||
echo "------------------------------------------------------------"
|
||||
echo "[build-missing] Building missing image: $IMAGE"
|
||||
echo "BASE_IMAGE = $BASE_IMAGE"
|
||||
echo "------------------------------------------------------------"
|
||||
|
||||
docker build \
|
||||
--build-arg BASE_IMAGE="$BASE_IMAGE" \
|
||||
-t "$IMAGE" \
|
||||
.
|
||||
done
|
||||
if docker image inspect "$IMAGE" >/dev/null 2>&1; then
|
||||
echo "[build-missing] Image already exists: $IMAGE (skipping)"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo
|
||||
echo "============================================================"
|
||||
echo ">>> build-missing: Done"
|
||||
echo "============================================================"
|
||||
echo "------------------------------------------------------------"
|
||||
echo "[build-missing] Building missing image: $IMAGE"
|
||||
echo "BASE_IMAGE = $BASE_IMAGE"
|
||||
echo "------------------------------------------------------------"
|
||||
|
||||
docker build \
|
||||
--build-arg BASE_IMAGE="$BASE_IMAGE" \
|
||||
-t "$IMAGE" \
|
||||
.
|
||||
|
||||
@@ -4,14 +4,12 @@ set -euo pipefail
|
||||
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||
source "${SCRIPT_DIR}/resolve-base-image.sh"
|
||||
|
||||
for distro in $DISTROS; do
|
||||
base_image="$(resolve_base_image "$distro")"
|
||||
base_image="$(resolve_base_image "$distro")"
|
||||
|
||||
echo ">>> Building test image for distro '$distro' with NO CACHE (BASE_IMAGE=$base_image)..."
|
||||
echo ">>> Building test image for distro '$distro' with NO CACHE (BASE_IMAGE=$base_image)..."
|
||||
|
||||
docker build \
|
||||
--no-cache \
|
||||
--build-arg BASE_IMAGE="$base_image" \
|
||||
-t "package-manager-test-$distro" \
|
||||
.
|
||||
done
|
||||
docker build \
|
||||
--no-cache \
|
||||
--build-arg BASE_IMAGE="$base_image" \
|
||||
-t "package-manager-test-$distro" \
|
||||
.
|
||||
|
||||
@@ -4,13 +4,11 @@ set -euo pipefail
|
||||
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||
source "${SCRIPT_DIR}/resolve-base-image.sh"
|
||||
|
||||
for distro in $DISTROS; do
|
||||
base_image="$(resolve_base_image "$distro")"
|
||||
base_image="$(resolve_base_image "$distro")"
|
||||
|
||||
echo ">>> Building test image for distro '$distro' (BASE_IMAGE=$base_image)..."
|
||||
echo ">>> Building test image for distro '$distro' (BASE_IMAGE=$base_image)..."
|
||||
|
||||
docker build \
|
||||
--build-arg BASE_IMAGE="$base_image" \
|
||||
-t "package-manager-test-$distro" \
|
||||
.
|
||||
done
|
||||
docker build \
|
||||
--build-arg BASE_IMAGE="$base_image" \
|
||||
-t "package-manager-test-$distro" \
|
||||
.
|
||||
|
||||
@@ -94,7 +94,15 @@ if [[ "${IN_CONTAINER}" -eq 1 && "${EUID:-0}" -eq 0 ]]; then
|
||||
# Ensure "nix" user (home at /home/nix)
|
||||
if ! id nix >/dev/null 2>&1; then
|
||||
echo "[init-nix] Creating user 'nix'..."
|
||||
useradd -m -r -g nixbld -s /usr/bin/bash nix
|
||||
# Resolve a valid shell path across distros:
|
||||
# - Debian/Ubuntu: /bin/bash
|
||||
# - Arch: /usr/bin/bash (often symlinked)
|
||||
# Fall back to /bin/sh on ultra-minimal systems.
|
||||
BASH_SHELL="$(command -v bash || true)"
|
||||
if [[ -z "${BASH_SHELL}" ]]; then
|
||||
BASH_SHELL="/bin/sh"
|
||||
fi
|
||||
useradd -m -r -g nixbld -s "${BASH_SHELL}" nix
|
||||
fi
|
||||
|
||||
# Ensure /nix exists and is writable by the "nix" user.
|
||||
|
||||
@@ -3,10 +3,21 @@ set -euo pipefail
|
||||
|
||||
echo "[arch/package] Building Arch package (makepkg --nodeps)..."
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "${SCRIPT_DIR}/../../.." && pwd)"
|
||||
PKG_DIR="${PROJECT_ROOT}/packaging/arch"
|
||||
|
||||
if [[ ! -f "${PKG_DIR}/PKGBUILD" ]]; then
|
||||
echo "[arch/package] ERROR: PKGBUILD not found in ${PKG_DIR}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cd "${PKG_DIR}"
|
||||
|
||||
if id aur_builder >/dev/null 2>&1; then
|
||||
echo "[arch/package] Using 'aur_builder' user for makepkg..."
|
||||
chown -R aur_builder:aur_builder "$(pwd)"
|
||||
su aur_builder -c "cd '$(pwd)' && rm -f package-manager-*.pkg.tar.* && makepkg --noconfirm --clean --nodeps"
|
||||
chown -R aur_builder:aur_builder "${PKG_DIR}"
|
||||
su aur_builder -c "cd '${PKG_DIR}' && rm -f package-manager-*.pkg.tar.* && makepkg --noconfirm --clean --nodeps"
|
||||
else
|
||||
echo "[arch/package] WARNING: user 'aur_builder' not found, running makepkg as current user..."
|
||||
rm -f package-manager-*.pkg.tar.*
|
||||
|
||||
@@ -4,8 +4,17 @@ set -euo pipefail
|
||||
echo "[centos/package] Setting up rpmbuild directories..."
|
||||
mkdir -p /root/rpmbuild/{BUILD,RPMS,SOURCES,SPECS,SRPMS}
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "${SCRIPT_DIR}/../../.." && pwd)"
|
||||
SPEC_PATH="${PROJECT_ROOT}/packaging/fedora/package-manager.spec"
|
||||
|
||||
if [[ ! -f "${SPEC_PATH}" ]]; then
|
||||
echo "[centos/package] ERROR: SPEC file not found: ${SPEC_PATH}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "[centos/package] Extracting version from package-manager.spec..."
|
||||
version="$(grep -E '^Version:' package-manager.spec | awk '{print $2}')"
|
||||
version="$(grep -E '^Version:' "${SPEC_PATH}" | awk '{print $2}')"
|
||||
if [[ -z "${version}" ]]; then
|
||||
echo "ERROR: Version missing!"
|
||||
exit 1
|
||||
@@ -15,13 +24,13 @@ srcdir="package-manager-${version}"
|
||||
echo "[centos/package] Preparing source tree: ${srcdir}"
|
||||
rm -rf "/tmp/${srcdir}"
|
||||
mkdir -p "/tmp/${srcdir}"
|
||||
cp -a . "/tmp/${srcdir}/"
|
||||
cp -a "${PROJECT_ROOT}/." "/tmp/${srcdir}/"
|
||||
|
||||
echo "[centos/package] Creating source tarball..."
|
||||
tar czf "/root/rpmbuild/SOURCES/${srcdir}.tar.gz" -C /tmp "${srcdir}"
|
||||
|
||||
echo "[centos/package] Copying SPEC..."
|
||||
cp package-manager.spec /root/rpmbuild/SPECS/
|
||||
cp "${SPEC_PATH}" /root/rpmbuild/SPECS/
|
||||
|
||||
echo "[centos/package] Running rpmbuild..."
|
||||
cd /root/rpmbuild/SPECS
|
||||
|
||||
@@ -3,6 +3,25 @@ set -euo pipefail
|
||||
|
||||
echo "[debian/package] Building Debian package..."
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "${SCRIPT_DIR}/../../.." && pwd)"
|
||||
|
||||
BUILD_ROOT="/tmp/package-manager-debian-build"
|
||||
rm -rf "${BUILD_ROOT}"
|
||||
mkdir -p "${BUILD_ROOT}"
|
||||
|
||||
echo "[debian/package] Syncing project sources to ${BUILD_ROOT}..."
|
||||
rsync -a \
|
||||
--exclude 'packaging/debian' \
|
||||
"${PROJECT_ROOT}/" "${BUILD_ROOT}/"
|
||||
|
||||
echo "[debian/package] Overlaying debian/ metadata from packaging/debian..."
|
||||
mkdir -p "${BUILD_ROOT}/debian"
|
||||
cp -a "${PROJECT_ROOT}/packaging/debian/." "${BUILD_ROOT}/debian/"
|
||||
|
||||
cd "${BUILD_ROOT}"
|
||||
|
||||
echo "[debian/package] Running dpkg-buildpackage..."
|
||||
dpkg-buildpackage -us -uc -b
|
||||
|
||||
echo "[debian/package] Installing generated DEB package..."
|
||||
|
||||
@@ -4,8 +4,17 @@ set -euo pipefail
|
||||
echo "[fedora/package] Setting up rpmbuild directories..."
|
||||
mkdir -p /root/rpmbuild/{BUILD,RPMS,SOURCES,SPECS,SRPMS}
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "${SCRIPT_DIR}/../../.." && pwd)"
|
||||
SPEC_PATH="${PROJECT_ROOT}/packaging/fedora/package-manager.spec"
|
||||
|
||||
if [[ ! -f "${SPEC_PATH}" ]]; then
|
||||
echo "[fedora/package] ERROR: SPEC file not found: ${SPEC_PATH}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "[fedora/package] Extracting version from package-manager.spec..."
|
||||
version="$(grep -E '^Version:' package-manager.spec | awk '{print $2}')"
|
||||
version="$(grep -E '^Version:' "${SPEC_PATH}" | awk '{print $2}')"
|
||||
if [[ -z "${version}" ]]; then
|
||||
echo "ERROR: Version missing!"
|
||||
exit 1
|
||||
@@ -15,13 +24,13 @@ srcdir="package-manager-${version}"
|
||||
echo "[fedora/package] Preparing source tree: ${srcdir}"
|
||||
rm -rf "/tmp/${srcdir}"
|
||||
mkdir -p "/tmp/${srcdir}"
|
||||
cp -a . "/tmp/${srcdir}/"
|
||||
cp -a "${PROJECT_ROOT}/." "/tmp/${srcdir}/"
|
||||
|
||||
echo "[fedora/package] Creating source tarball..."
|
||||
tar czf "/root/rpmbuild/SOURCES/${srcdir}.tar.gz" -C /tmp "${srcdir}"
|
||||
|
||||
echo "[fedora/package] Copying SPEC..."
|
||||
cp package-manager.spec /root/rpmbuild/SPECS/
|
||||
cp "${SPEC_PATH}" /root/rpmbuild/SPECS/
|
||||
|
||||
echo "[fedora/package] Running rpmbuild..."
|
||||
cd /root/rpmbuild/SPECS
|
||||
|
||||
@@ -4,20 +4,22 @@ set -euo pipefail
|
||||
# ------------------------------------------------------------
|
||||
# main.sh
|
||||
#
|
||||
# Developer setup entrypoint.
|
||||
# Developer / system setup entrypoint.
|
||||
#
|
||||
# Responsibilities:
|
||||
# - If inside a Nix shell (IN_NIX_SHELL=1):
|
||||
# * Skip venv creation and dependency installation
|
||||
# * Run `python3 main.py install`
|
||||
# - Otherwise:
|
||||
# - If running as root (EUID=0):
|
||||
# * Run system-level installer (run-package.sh)
|
||||
# - Otherwise (normal user):
|
||||
# * Create ~/.venvs/pkgmgr virtual environment if missing
|
||||
# * Install Python dependencies into that venv
|
||||
# * Append auto-activation to ~/.bashrc and ~/.zshrc
|
||||
# * Run `main.py install` using the venv Python
|
||||
# ------------------------------------------------------------
|
||||
|
||||
echo "[installation/main] Starting developer setup..."
|
||||
echo "[installation/main] Starting setup..."
|
||||
|
||||
PROJECT_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
|
||||
cd "${PROJECT_ROOT}"
|
||||
@@ -26,20 +28,34 @@ VENV_DIR="${HOME}/.venvs/pkgmgr"
|
||||
RC_LINE='if [ -d "${HOME}/.venvs/pkgmgr" ]; then . "${HOME}/.venvs/pkgmgr/bin/activate"; if [ -n "${PS1:-}" ]; then echo "Global Python virtual environment '\''~/.venvs/pkgmgr'\'' activated."; fi; fi'
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# Nix shell mode: do not touch venv, only run main.py install
|
||||
# 1) Nix shell mode: do not touch venv, only run main.py install
|
||||
# ------------------------------------------------------------
|
||||
if [[ -n "${IN_NIX_SHELL:-}" ]]; then
|
||||
echo "[installation/main] Nix shell detected (IN_NIX_SHELL=1)."
|
||||
echo "[installation/main] Skipping virtualenv creation and dependency installation."
|
||||
echo "[installation/main] Running main.py install via system python3..."
|
||||
python3 main.py install
|
||||
echo "[installation/main] Developer setup finished (Nix mode)."
|
||||
echo "[installation/main] Setup finished (Nix mode)."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# Normal host mode: create/update venv and run main.py install
|
||||
# 2) Root mode: system / distro-level installation
|
||||
# ------------------------------------------------------------
|
||||
if [[ "${EUID:-$(id -u)}" -eq 0 ]]; then
|
||||
echo "[installation/main] Running as root (EUID=0)."
|
||||
echo "[installation/main] Skipping user virtualenv and shell RC modifications."
|
||||
echo "[installation/main] Delegating to scripts/installation/run-package.sh..."
|
||||
bash scripts/installation/run-package.sh
|
||||
echo "[installation/main] Root/system setup complete."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# 3) Normal user mode: dev setup with venv
|
||||
# ------------------------------------------------------------
|
||||
|
||||
echo "[installation/main] Running in normal user mode (developer setup)."
|
||||
|
||||
echo "[installation/main] Ensuring main.py is executable..."
|
||||
chmod +x main.py || true
|
||||
@@ -47,26 +63,8 @@ chmod +x main.py || true
|
||||
echo "[installation/main] Ensuring global virtualenv root: ${HOME}/.venvs"
|
||||
mkdir -p "${HOME}/.venvs"
|
||||
|
||||
if [[ ! -d "${VENV_DIR}" ]]; then
|
||||
echo "[installation/main] Creating virtual environment at: ${VENV_DIR}"
|
||||
python3 -m venv "${VENV_DIR}"
|
||||
else
|
||||
echo "[installation/main] Virtual environment already exists at: ${VENV_DIR}"
|
||||
fi
|
||||
|
||||
echo "[installation/main] Installing Python tooling into venv..."
|
||||
"${VENV_DIR}/bin/python" -m ensurepip --upgrade
|
||||
"${VENV_DIR}/bin/pip" install --upgrade pip setuptools wheel
|
||||
|
||||
if [[ -f "requirements.txt" ]]; then
|
||||
echo "[installation/main] Installing dependencies from requirements.txt..."
|
||||
"${VENV_DIR}/bin/pip" install -r requirements.txt
|
||||
elif [[ -f "_requirements.txt" ]]; then
|
||||
echo "[installation/main] Installing dependencies from _requirements.txt..."
|
||||
"${VENV_DIR}/bin/pip" install -r _requirements.txt
|
||||
else
|
||||
echo "[installation/main] No requirements.txt or _requirements.txt found. Skipping dependency installation."
|
||||
fi
|
||||
echo "[installation/main] Creating/updating virtualenv via helper..."
|
||||
PKGMGR_VENV_DIR="${VENV_DIR}" bash scripts/installation/venv-create.sh
|
||||
|
||||
echo "[installation/main] Ensuring ~/.bashrc and ~/.zshrc exist..."
|
||||
touch "${HOME}/.bashrc" "${HOME}/.zshrc"
|
||||
|
||||
@@ -8,6 +8,12 @@ source "${SCRIPT_DIR}/lib.sh"
|
||||
|
||||
OS_ID="$(detect_os_id)"
|
||||
|
||||
# Map Manjaro to Arch
|
||||
if [[ "${OS_ID}" == "manjaro" ]]; then
|
||||
echo "[run-package] Mapping OS 'manjaro' → 'arch'"
|
||||
OS_ID="arch"
|
||||
fi
|
||||
|
||||
echo "[run-package] Detected OS: ${OS_ID}"
|
||||
|
||||
case "${OS_ID}" in
|
||||
|
||||
@@ -3,6 +3,25 @@ set -euo pipefail
|
||||
|
||||
echo "[ubuntu/package] Building Ubuntu (Debian-style) package..."
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "${SCRIPT_DIR}/../../.." && pwd)"
|
||||
|
||||
BUILD_ROOT="/tmp/package-manager-ubuntu-build"
|
||||
rm -rf "${BUILD_ROOT}"
|
||||
mkdir -p "${BUILD_ROOT}"
|
||||
|
||||
echo "[ubuntu/package] Syncing project sources to ${BUILD_ROOT}..."
|
||||
rsync -a \
|
||||
--exclude 'packaging/debian' \
|
||||
"${PROJECT_ROOT}/" "${BUILD_ROOT}/"
|
||||
|
||||
echo "[ubuntu/package] Overlaying debian/ metadata from packaging/debian..."
|
||||
mkdir -p "${BUILD_ROOT}/debian"
|
||||
cp -a "${PROJECT_ROOT}/packaging/debian/." "${BUILD_ROOT}/debian/"
|
||||
|
||||
cd "${BUILD_ROOT}"
|
||||
|
||||
echo "[ubuntu/package] Running dpkg-buildpackage..."
|
||||
dpkg-buildpackage -us -uc -b
|
||||
|
||||
echo "[ubuntu/package] Installing generated DEB package..."
|
||||
|
||||
44
scripts/installation/venv-create.sh
Normal file
44
scripts/installation/venv-create.sh
Normal file
@@ -0,0 +1,44 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# venv-create.sh
|
||||
#
|
||||
# Small helper to create/update a Python virtual environment for pkgmgr.
|
||||
#
|
||||
# Usage:
|
||||
# PKGMGR_VENV_DIR=/home/dev/.venvs/pkgmgr bash scripts/installation/venv-create.sh
|
||||
# or
|
||||
# bash scripts/installation/venv-create.sh /home/dev/.venvs/pkgmgr
|
||||
|
||||
PROJECT_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
|
||||
cd "${PROJECT_ROOT}"
|
||||
|
||||
VENV_DIR="${PKGMGR_VENV_DIR:-${1:-${HOME}/.venvs/pkgmgr}}"
|
||||
|
||||
echo "[venv-create] Using VENV_DIR=${VENV_DIR}"
|
||||
|
||||
echo "[venv-create] Ensuring virtualenv parent directory exists..."
|
||||
mkdir -p "$(dirname "${VENV_DIR}")"
|
||||
|
||||
if [[ ! -d "${VENV_DIR}" ]]; then
|
||||
echo "[venv-create] Creating virtual environment at: ${VENV_DIR}"
|
||||
python3 -m venv "${VENV_DIR}"
|
||||
else
|
||||
echo "[venv-create] Virtual environment already exists at: ${VENV_DIR}"
|
||||
fi
|
||||
|
||||
echo "[venv-create] Installing Python tooling into venv..."
|
||||
"${VENV_DIR}/bin/python" -m ensurepip --upgrade
|
||||
"${VENV_DIR}/bin/pip" install --upgrade pip setuptools wheel
|
||||
|
||||
if [[ -f "requirements.txt" ]]; then
|
||||
echo "[venv-create] Installing dependencies from requirements.txt..."
|
||||
"${VENV_DIR}/bin/pip" install -r requirements.txt
|
||||
elif [[ -f "_requirements.txt" ]]; then
|
||||
echo "[venv-create] Installing dependencies from _requirements.txt..."
|
||||
"${VENV_DIR}/bin/pip" install -r _requirements.txt
|
||||
else
|
||||
echo "[venv-create] No requirements.txt or _requirements.txt found. Skipping dependency installation."
|
||||
fi
|
||||
|
||||
echo "[venv-create] Done."
|
||||
@@ -1,41 +1,32 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
echo "============================================================"
|
||||
echo ">>> Running sanity test: verifying test containers start"
|
||||
echo "============================================================"
|
||||
|
||||
for distro in $DISTROS; do
|
||||
IMAGE="package-manager-test-$distro"
|
||||
|
||||
echo
|
||||
echo "------------------------------------------------------------"
|
||||
echo ">>> Testing container: $IMAGE"
|
||||
echo "------------------------------------------------------------"
|
||||
|
||||
echo "[test-container] Running: docker run --rm --entrypoint pkgmgr $IMAGE --help"
|
||||
echo
|
||||
|
||||
# Run the command and capture the output
|
||||
if OUTPUT=$(docker run --rm \
|
||||
-e PKGMGR_DEV=1 \
|
||||
-v pkgmgr_nix_store:/nix \
|
||||
-v "$(pwd):/src" \
|
||||
-v "pkgmgr_nix_cache:/root/.cache/nix" \
|
||||
"$IMAGE" 2>&1); then
|
||||
echo "$OUTPUT"
|
||||
echo
|
||||
echo "[test-container] SUCCESS: $IMAGE responded to 'pkgmgr --help'"
|
||||
|
||||
else
|
||||
echo "$OUTPUT"
|
||||
echo
|
||||
echo "[test-container] ERROR: $IMAGE failed to run 'pkgmgr --help'"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
IMAGE="package-manager-test-$distro"
|
||||
|
||||
echo
|
||||
echo "============================================================"
|
||||
echo ">>> All containers passed the sanity check"
|
||||
echo "============================================================"
|
||||
echo "------------------------------------------------------------"
|
||||
echo ">>> Testing container: $IMAGE"
|
||||
echo "------------------------------------------------------------"
|
||||
echo "[test-container] Inspect image metadata:"
|
||||
docker image inspect "$IMAGE" | sed -n '1,40p'
|
||||
|
||||
echo "[test-container] Running: docker run --rm --entrypoint pkgmgr $IMAGE --help"
|
||||
echo
|
||||
|
||||
# Run the command and capture the output
|
||||
if OUTPUT=$(docker run --rm \
|
||||
-e PKGMGR_DEV=1 \
|
||||
-v pkgmgr_nix_store_${distro}:/nix \
|
||||
-v "$(pwd):/src" \
|
||||
-v "pkgmgr_nix_cache_${distro}:/root/.cache/nix" \
|
||||
"$IMAGE" 2>&1); then
|
||||
echo "$OUTPUT"
|
||||
echo
|
||||
echo "[test-container] SUCCESS: $IMAGE responded to 'pkgmgr --help'"
|
||||
|
||||
else
|
||||
echo "$OUTPUT"
|
||||
echo
|
||||
echo "[test-container] ERROR: $IMAGE failed to run 'pkgmgr --help'"
|
||||
exit 1
|
||||
fi
|
||||
@@ -1,52 +1,60 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
echo ">>> Running E2E tests in all distros: $DISTROS"
|
||||
echo "============================================================"
|
||||
echo ">>> Running E2E tests: $distro"
|
||||
echo "============================================================"
|
||||
|
||||
for distro in $DISTROS; do
|
||||
echo "============================================================"
|
||||
echo ">>> Running E2E tests: $distro"
|
||||
echo "============================================================"
|
||||
docker run --rm \
|
||||
-v "$(pwd):/src" \
|
||||
-v "pkgmgr_nix_store_${distro}:/nix" \
|
||||
-v "pkgmgr_nix_cache_${distro}:/root/.cache/nix" \
|
||||
-e PKGMGR_DEV=1 \
|
||||
-e TEST_PATTERN="${TEST_PATTERN}" \
|
||||
--workdir /src \
|
||||
"package-manager-test-${distro}" \
|
||||
bash -lc '
|
||||
set -euo pipefail
|
||||
|
||||
docker run --rm \
|
||||
-v "$(pwd):/src" \
|
||||
-v pkgmgr_nix_store:/nix \
|
||||
-v "pkgmgr_nix_cache:/root/.cache/nix" \
|
||||
-e PKGMGR_DEV=1 \
|
||||
-e TEST_PATTERN="${TEST_PATTERN}" \
|
||||
--workdir /src \
|
||||
--entrypoint bash \
|
||||
"package-manager-test-$distro" \
|
||||
-c '
|
||||
set -e;
|
||||
# Load distro info
|
||||
if [ -f /etc/os-release ]; then
|
||||
. /etc/os-release
|
||||
fi
|
||||
|
||||
if [ -f /etc/os-release ]; then
|
||||
. /etc/os-release;
|
||||
fi;
|
||||
echo "Running tests inside distro: ${ID:-unknown}"
|
||||
|
||||
echo "Running tests inside distro: $ID";
|
||||
# Load Nix environment if available
|
||||
if [ -f "/nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh" ]; then
|
||||
. "/nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh"
|
||||
fi
|
||||
|
||||
# Try to load nix environment
|
||||
if [ -f "/nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh" ]; then
|
||||
. "/nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh";
|
||||
fi
|
||||
if [ -f "$HOME/.nix-profile/etc/profile.d/nix.sh" ]; then
|
||||
. "$HOME/.nix-profile/etc/profile.d/nix.sh"
|
||||
fi
|
||||
|
||||
if [ -f "$HOME/.nix-profile/etc/profile.d/nix.sh" ]; then
|
||||
. "$HOME/.nix-profile/etc/profile.d/nix.sh";
|
||||
fi
|
||||
PATH="/nix/var/nix/profiles/default/bin:$HOME/.nix-profile/bin:$PATH"
|
||||
|
||||
PATH="/nix/var/nix/profiles/default/bin:$HOME/.nix-profile/bin:$PATH";
|
||||
command -v nix >/dev/null || {
|
||||
echo "ERROR: nix not found."
|
||||
exit 1
|
||||
}
|
||||
|
||||
command -v nix >/dev/null || {
|
||||
echo "ERROR: nix not found.";
|
||||
exit 1;
|
||||
}
|
||||
# Mark the mounted repository as safe to avoid Git ownership errors.
|
||||
# Newer Git (e.g. on Ubuntu) complains about the gitdir (/src/.git),
|
||||
# older versions about the worktree (/src). Nix turns "." into the
|
||||
# flake input "git+file:///src", which then uses Git under the hood.
|
||||
if command -v git >/dev/null 2>&1; then
|
||||
# Worktree path
|
||||
git config --global --add safe.directory /src || true
|
||||
# Gitdir path shown in the "dubious ownership" error
|
||||
git config --global --add safe.directory /src/.git || true
|
||||
# Ephemeral CI containers: allow all paths as a last resort
|
||||
git config --global --add safe.directory '*' || true
|
||||
fi
|
||||
|
||||
git config --global --add safe.directory /src || true;
|
||||
|
||||
nix develop .#default --no-write-lock-file -c \
|
||||
python3 -m unittest discover \
|
||||
-s /src/tests/e2e \
|
||||
-p "$TEST_PATTERN";
|
||||
'
|
||||
done
|
||||
# Run the E2E tests inside the Nix development shell
|
||||
nix develop .#default --no-write-lock-file -c \
|
||||
python3 -m unittest discover \
|
||||
-s /src/tests/e2e \
|
||||
-p "$TEST_PATTERN"
|
||||
'
|
||||
|
||||
@@ -2,23 +2,22 @@
|
||||
set -euo pipefail
|
||||
|
||||
echo "============================================================"
|
||||
echo ">>> Running INTEGRATION tests in Arch container"
|
||||
echo ">>> Running INTEGRATION tests in ${distro} container"
|
||||
echo "============================================================"
|
||||
|
||||
docker run --rm \
|
||||
-v "$(pwd):/src" \
|
||||
-v pkgmgr_nix_store:/nix \
|
||||
-v "pkgmgr_nix_cache:/root/.cache/nix" \
|
||||
-v pkgmgr_nix_store_${distro}:/nix \
|
||||
-v "pkgmgr_nix_cache_${distro}:/root/.cache/nix" \
|
||||
--workdir /src \
|
||||
-e PKGMGR_DEV=1 \
|
||||
-e TEST_PATTERN="${TEST_PATTERN}" \
|
||||
--entrypoint bash \
|
||||
"package-manager-test-arch" \
|
||||
-c '
|
||||
"package-manager-test-${distro}" \
|
||||
bash -lc '
|
||||
set -e;
|
||||
git config --global --add safe.directory /src || true;
|
||||
nix develop .#default --no-write-lock-file -c \
|
||||
python -m unittest discover \
|
||||
python3 -m unittest discover \
|
||||
-s tests/integration \
|
||||
-t /src \
|
||||
-p "$TEST_PATTERN";
|
||||
|
||||
@@ -2,23 +2,22 @@
|
||||
set -euo pipefail
|
||||
|
||||
echo "============================================================"
|
||||
echo ">>> Running UNIT tests in Arch container"
|
||||
echo ">>> Running UNIT tests in ${distro} container"
|
||||
echo "============================================================"
|
||||
|
||||
docker run --rm \
|
||||
-v "$(pwd):/src" \
|
||||
-v "pkgmgr_nix_cache:/root/.cache/nix" \
|
||||
-v pkgmgr_nix_store:/nix \
|
||||
-v "pkgmgr_nix_cache_${distro}:/root/.cache/nix" \
|
||||
-v pkgmgr_nix_store_${distro}:/nix \
|
||||
--workdir /src \
|
||||
-e PKGMGR_DEV=1 \
|
||||
-e TEST_PATTERN="${TEST_PATTERN}" \
|
||||
--entrypoint bash \
|
||||
"package-manager-test-arch" \
|
||||
-c '
|
||||
"package-manager-test-${distro}" \
|
||||
bash -lc '
|
||||
set -e;
|
||||
git config --global --add safe.directory /src || true;
|
||||
nix develop .#default --no-write-lock-file -c \
|
||||
python -m unittest discover \
|
||||
python3 -m unittest discover \
|
||||
-s tests/unit \
|
||||
-t /src \
|
||||
-p "$TEST_PATTERN";
|
||||
|
||||
36
src/pkgmgr/__init__.py
Normal file
36
src/pkgmgr/__init__.py
Normal file
@@ -0,0 +1,36 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Top-level pkgmgr package.
|
||||
|
||||
We deliberately avoid importing heavy submodules (like the CLI)
|
||||
on import to keep unit tests fast and to not require optional
|
||||
dependencies (like PyYAML) unless they are actually used.
|
||||
|
||||
Accessing ``pkgmgr.cli`` will load the CLI module lazily via
|
||||
``__getattr__``. This keeps patterns like
|
||||
|
||||
from pkgmgr import cli
|
||||
|
||||
working as expected in tests and entry points.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from importlib import import_module
|
||||
from typing import Any
|
||||
|
||||
__all__ = ["cli"]
|
||||
|
||||
|
||||
def __getattr__(name: str) -> Any:
|
||||
"""
|
||||
Lazily expose ``pkgmgr.cli`` as attribute on the top-level package.
|
||||
|
||||
This keeps ``import pkgmgr`` lightweight while still allowing
|
||||
``from pkgmgr import cli`` in tests and entry points.
|
||||
"""
|
||||
if name == "cli":
|
||||
return import_module("pkgmgr.cli")
|
||||
raise AttributeError(f"module 'pkgmgr' has no attribute {name!r}")
|
||||
218
src/pkgmgr/actions/install/__init__.py
Normal file
218
src/pkgmgr/actions/install/__init__.py
Normal file
@@ -0,0 +1,218 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
High-level entry point for repository installation.
|
||||
|
||||
Responsibilities:
|
||||
|
||||
- Ensure the repository directory exists (clone if necessary).
|
||||
- Verify the repository (GPG / commit checks).
|
||||
- Build a RepoContext object.
|
||||
- Delegate the actual installation decision logic to InstallationPipeline.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from pkgmgr.core.repository.identifier import get_repo_identifier
|
||||
from pkgmgr.core.repository.dir import get_repo_dir
|
||||
from pkgmgr.core.repository.verify import verify_repository
|
||||
from pkgmgr.actions.repository.clone import clone_repos
|
||||
from pkgmgr.actions.install.context import RepoContext
|
||||
from pkgmgr.actions.install.installers.os_packages import (
|
||||
ArchPkgbuildInstaller,
|
||||
DebianControlInstaller,
|
||||
RpmSpecInstaller,
|
||||
)
|
||||
from pkgmgr.actions.install.installers.nix_flake import (
|
||||
NixFlakeInstaller,
|
||||
)
|
||||
from pkgmgr.actions.install.installers.python import PythonInstaller
|
||||
from pkgmgr.actions.install.installers.makefile import (
|
||||
MakefileInstaller,
|
||||
)
|
||||
from pkgmgr.actions.install.pipeline import InstallationPipeline
|
||||
|
||||
|
||||
Repository = Dict[str, Any]
|
||||
|
||||
# All available installers, in the order they should be considered.
|
||||
INSTALLERS = [
|
||||
ArchPkgbuildInstaller(),
|
||||
DebianControlInstaller(),
|
||||
RpmSpecInstaller(),
|
||||
NixFlakeInstaller(),
|
||||
PythonInstaller(),
|
||||
MakefileInstaller(),
|
||||
]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Internal helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _ensure_repo_dir(
|
||||
repo: Repository,
|
||||
repositories_base_dir: str,
|
||||
all_repos: List[Repository],
|
||||
preview: bool,
|
||||
no_verification: bool,
|
||||
clone_mode: str,
|
||||
identifier: str,
|
||||
) -> str | None:
|
||||
"""
|
||||
Compute and, if necessary, clone the repository directory.
|
||||
|
||||
Returns the absolute repository path or None if cloning ultimately failed.
|
||||
"""
|
||||
repo_dir = get_repo_dir(repositories_base_dir, repo)
|
||||
|
||||
if not os.path.exists(repo_dir):
|
||||
print(
|
||||
f"Repository directory '{repo_dir}' does not exist. "
|
||||
f"Cloning it now..."
|
||||
)
|
||||
clone_repos(
|
||||
[repo],
|
||||
repositories_base_dir,
|
||||
all_repos,
|
||||
preview,
|
||||
no_verification,
|
||||
clone_mode,
|
||||
)
|
||||
if not os.path.exists(repo_dir):
|
||||
print(
|
||||
f"Cloning failed for repository {identifier}. "
|
||||
f"Skipping installation."
|
||||
)
|
||||
return None
|
||||
|
||||
return repo_dir
|
||||
|
||||
|
||||
def _verify_repo(
|
||||
repo: Repository,
|
||||
repo_dir: str,
|
||||
no_verification: bool,
|
||||
identifier: str,
|
||||
) -> bool:
|
||||
"""
|
||||
Verify a repository using the configured verification data.
|
||||
|
||||
Returns True if verification is considered okay and installation may continue.
|
||||
"""
|
||||
verified_info = repo.get("verified")
|
||||
verified_ok, errors, _commit_hash, _signing_key = verify_repository(
|
||||
repo,
|
||||
repo_dir,
|
||||
mode="local",
|
||||
no_verification=no_verification,
|
||||
)
|
||||
|
||||
if not no_verification and verified_info and not verified_ok:
|
||||
print(f"Warning: Verification failed for {identifier}:")
|
||||
for err in errors:
|
||||
print(f" - {err}")
|
||||
choice = input("Continue anyway? [y/N]: ").strip().lower()
|
||||
if choice != "y":
|
||||
print(f"Skipping installation for {identifier}.")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def _create_context(
|
||||
repo: Repository,
|
||||
identifier: str,
|
||||
repo_dir: str,
|
||||
repositories_base_dir: str,
|
||||
bin_dir: str,
|
||||
all_repos: List[Repository],
|
||||
no_verification: bool,
|
||||
preview: bool,
|
||||
quiet: bool,
|
||||
clone_mode: str,
|
||||
update_dependencies: bool,
|
||||
) -> RepoContext:
|
||||
"""
|
||||
Build a RepoContext instance for the given repository.
|
||||
"""
|
||||
return RepoContext(
|
||||
repo=repo,
|
||||
identifier=identifier,
|
||||
repo_dir=repo_dir,
|
||||
repositories_base_dir=repositories_base_dir,
|
||||
bin_dir=bin_dir,
|
||||
all_repos=all_repos,
|
||||
no_verification=no_verification,
|
||||
preview=preview,
|
||||
quiet=quiet,
|
||||
clone_mode=clone_mode,
|
||||
update_dependencies=update_dependencies,
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Public API
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def install_repos(
|
||||
selected_repos: List[Repository],
|
||||
repositories_base_dir: str,
|
||||
bin_dir: str,
|
||||
all_repos: List[Repository],
|
||||
no_verification: bool,
|
||||
preview: bool,
|
||||
quiet: bool,
|
||||
clone_mode: str,
|
||||
update_dependencies: bool,
|
||||
) -> None:
|
||||
"""
|
||||
Install one or more repositories according to the configured installers
|
||||
and the CLI layer precedence rules.
|
||||
"""
|
||||
pipeline = InstallationPipeline(INSTALLERS)
|
||||
|
||||
for repo in selected_repos:
|
||||
identifier = get_repo_identifier(repo, all_repos)
|
||||
|
||||
repo_dir = _ensure_repo_dir(
|
||||
repo=repo,
|
||||
repositories_base_dir=repositories_base_dir,
|
||||
all_repos=all_repos,
|
||||
preview=preview,
|
||||
no_verification=no_verification,
|
||||
clone_mode=clone_mode,
|
||||
identifier=identifier,
|
||||
)
|
||||
if not repo_dir:
|
||||
continue
|
||||
|
||||
if not _verify_repo(
|
||||
repo=repo,
|
||||
repo_dir=repo_dir,
|
||||
no_verification=no_verification,
|
||||
identifier=identifier,
|
||||
):
|
||||
continue
|
||||
|
||||
ctx = _create_context(
|
||||
repo=repo,
|
||||
identifier=identifier,
|
||||
repo_dir=repo_dir,
|
||||
repositories_base_dir=repositories_base_dir,
|
||||
bin_dir=bin_dir,
|
||||
all_repos=all_repos,
|
||||
no_verification=no_verification,
|
||||
preview=preview,
|
||||
quiet=quiet,
|
||||
clone_mode=clone_mode,
|
||||
update_dependencies=update_dependencies,
|
||||
)
|
||||
|
||||
pipeline.run(ctx)
|
||||
@@ -38,7 +38,7 @@ from abc import ABC, abstractmethod
|
||||
from typing import Iterable, TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pkgmgr.actions.repository.install.context import RepoContext
|
||||
from pkgmgr.actions.install.context import RepoContext
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
19
src/pkgmgr/actions/install/installers/__init__.py
Normal file
19
src/pkgmgr/actions/install/installers/__init__.py
Normal file
@@ -0,0 +1,19 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Installer package for pkgmgr.
|
||||
|
||||
This exposes all installer classes so users can import them directly from
|
||||
pkgmgr.actions.install.installers.
|
||||
"""
|
||||
|
||||
from pkgmgr.actions.install.installers.base import BaseInstaller # noqa: F401
|
||||
from pkgmgr.actions.install.installers.nix_flake import NixFlakeInstaller # noqa: F401
|
||||
from pkgmgr.actions.install.installers.python import PythonInstaller # noqa: F401
|
||||
from pkgmgr.actions.install.installers.makefile import MakefileInstaller # noqa: F401
|
||||
|
||||
# OS-specific installers
|
||||
from pkgmgr.actions.install.installers.os_packages.arch_pkgbuild import ArchPkgbuildInstaller # noqa: F401
|
||||
from pkgmgr.actions.install.installers.os_packages.debian_control import DebianControlInstaller # noqa: F401
|
||||
from pkgmgr.actions.install.installers.os_packages.rpm_spec import RpmSpecInstaller # noqa: F401
|
||||
@@ -8,8 +8,8 @@ Base interface for all installer components in the pkgmgr installation pipeline.
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Set
|
||||
|
||||
from pkgmgr.actions.repository.install.context import RepoContext
|
||||
from pkgmgr.actions.repository.install.capabilities import CAPABILITY_MATCHERS
|
||||
from pkgmgr.actions.install.context import RepoContext
|
||||
from pkgmgr.actions.install.capabilities import CAPABILITY_MATCHERS
|
||||
|
||||
|
||||
class BaseInstaller(ABC):
|
||||
97
src/pkgmgr/actions/install/installers/makefile.py
Normal file
97
src/pkgmgr/actions/install/installers/makefile.py
Normal file
@@ -0,0 +1,97 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
from pkgmgr.actions.install.context import RepoContext
|
||||
from pkgmgr.actions.install.installers.base import BaseInstaller
|
||||
from pkgmgr.core.command.run import run_command
|
||||
|
||||
|
||||
class MakefileInstaller(BaseInstaller):
|
||||
"""
|
||||
Generic installer that runs `make install` if a Makefile with an
|
||||
install target is present.
|
||||
|
||||
Safety rules:
|
||||
- If PKGMGR_DISABLE_MAKEFILE_INSTALLER=1 is set, this installer
|
||||
is globally disabled.
|
||||
- The higher-level InstallationPipeline ensures that Makefile
|
||||
installation does not run if a stronger CLI layer already owns
|
||||
the command (e.g. Nix or OS packages).
|
||||
"""
|
||||
|
||||
layer = "makefile"
|
||||
MAKEFILE_NAME = "Makefile"
|
||||
|
||||
def supports(self, ctx: RepoContext) -> bool:
|
||||
"""
|
||||
Return True if this repository has a Makefile and the installer
|
||||
is not globally disabled.
|
||||
"""
|
||||
# Optional global kill switch.
|
||||
if os.environ.get("PKGMGR_DISABLE_MAKEFILE_INSTALLER") == "1":
|
||||
if not ctx.quiet:
|
||||
print(
|
||||
"[INFO] MakefileInstaller is disabled via "
|
||||
"PKGMGR_DISABLE_MAKEFILE_INSTALLER."
|
||||
)
|
||||
return False
|
||||
|
||||
makefile_path = os.path.join(ctx.repo_dir, self.MAKEFILE_NAME)
|
||||
return os.path.exists(makefile_path)
|
||||
|
||||
def _has_install_target(self, makefile_path: str) -> bool:
|
||||
"""
|
||||
Heuristically check whether the Makefile defines an install target.
|
||||
|
||||
We look for:
|
||||
|
||||
- a plain 'install:' target, or
|
||||
- any 'install-*:' style target.
|
||||
"""
|
||||
try:
|
||||
with open(makefile_path, "r", encoding="utf-8", errors="ignore") as f:
|
||||
content = f.read()
|
||||
except OSError:
|
||||
return False
|
||||
|
||||
# Simple heuristics: look for "install:" or targets starting with "install-"
|
||||
if re.search(r"^install\s*:", content, flags=re.MULTILINE):
|
||||
return True
|
||||
|
||||
if re.search(r"^install-[a-zA-Z0-9_-]*\s*:", content, flags=re.MULTILINE):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def run(self, ctx: RepoContext) -> None:
|
||||
"""
|
||||
Execute `make install` in the repository directory if an install
|
||||
target exists.
|
||||
"""
|
||||
makefile_path = os.path.join(ctx.repo_dir, self.MAKEFILE_NAME)
|
||||
|
||||
if not os.path.exists(makefile_path):
|
||||
if not ctx.quiet:
|
||||
print(
|
||||
f"[pkgmgr] Makefile '{makefile_path}' not found, "
|
||||
"skipping MakefileInstaller."
|
||||
)
|
||||
return
|
||||
|
||||
if not self._has_install_target(makefile_path):
|
||||
if not ctx.quiet:
|
||||
print(
|
||||
f"[pkgmgr] No 'install' target found in {makefile_path}."
|
||||
)
|
||||
return
|
||||
|
||||
if not ctx.quiet:
|
||||
print(
|
||||
f"[pkgmgr] Running 'make install' in {ctx.repo_dir} "
|
||||
f"(MakefileInstaller)"
|
||||
)
|
||||
|
||||
cmd = "make install"
|
||||
run_command(cmd, cwd=ctx.repo_dir, preview=ctx.preview)
|
||||
@@ -10,28 +10,31 @@ installer will try to install profile outputs from the flake.
|
||||
Behavior:
|
||||
- If flake.nix is present and `nix` exists on PATH:
|
||||
* First remove any existing `package-manager` profile entry (best-effort).
|
||||
* Then install the flake outputs (`pkgmgr`, `default`) via `nix profile install`.
|
||||
- Failure installing `pkgmgr` is treated as fatal.
|
||||
- Failure installing `default` is logged as an error/warning but does not abort.
|
||||
* Then install one or more flake outputs via `nix profile install`.
|
||||
- For the package-manager repo:
|
||||
* `pkgmgr` is mandatory (CLI), `default` is optional.
|
||||
- For all other repos:
|
||||
* `default` is mandatory.
|
||||
|
||||
Special handling for dev shells / CI:
|
||||
- If IN_NIX_SHELL is set (e.g. inside `nix develop`), the installer is
|
||||
disabled. In that environment the flake outputs are already provided
|
||||
by the dev shell and we must not touch the user profile.
|
||||
Special handling:
|
||||
- If PKGMGR_DISABLE_NIX_FLAKE_INSTALLER=1 is set, the installer is
|
||||
globally disabled (useful for CI or debugging).
|
||||
|
||||
The higher-level InstallationPipeline and CLI-layer model decide when this
|
||||
installer is allowed to run, based on where the current CLI comes from
|
||||
(e.g. Nix, OS packages, Python, Makefile).
|
||||
"""
|
||||
|
||||
import os
|
||||
import shutil
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TYPE_CHECKING, List, Tuple
|
||||
|
||||
from pkgmgr.actions.repository.install.installers.base import BaseInstaller
|
||||
from pkgmgr.actions.install.installers.base import BaseInstaller
|
||||
from pkgmgr.core.command.run import run_command
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pkgmgr.actions.repository.install.context import RepoContext
|
||||
from pkgmgr.actions.repository.install import InstallContext
|
||||
from pkgmgr.actions.install.context import RepoContext
|
||||
from pkgmgr.actions.install import InstallContext
|
||||
|
||||
|
||||
class NixFlakeInstaller(BaseInstaller):
|
||||
@@ -43,33 +46,14 @@ class NixFlakeInstaller(BaseInstaller):
|
||||
FLAKE_FILE = "flake.nix"
|
||||
PROFILE_NAME = "package-manager"
|
||||
|
||||
def _in_nix_shell(self) -> bool:
|
||||
"""
|
||||
Return True if we appear to be running inside a Nix dev shell.
|
||||
|
||||
Nix sets IN_NIX_SHELL in `nix develop` environments. In that case
|
||||
the flake outputs are already available, and touching the user
|
||||
profile (nix profile install/remove) is undesirable.
|
||||
"""
|
||||
return bool(os.environ.get("IN_NIX_SHELL"))
|
||||
|
||||
def supports(self, ctx: "RepoContext") -> bool:
|
||||
"""
|
||||
Only support repositories that:
|
||||
- Are NOT inside a Nix dev shell (IN_NIX_SHELL unset),
|
||||
- Are NOT explicitly disabled via PKGMGR_DISABLE_NIX_FLAKE_INSTALLER=1,
|
||||
- Have a flake.nix,
|
||||
- And have the `nix` command available.
|
||||
"""
|
||||
# 1) Skip when running inside a dev shell – flake is already active.
|
||||
if self._in_nix_shell():
|
||||
print(
|
||||
"[INFO] IN_NIX_SHELL detected; skipping NixFlakeInstaller. "
|
||||
"Flake outputs are provided by the development shell."
|
||||
)
|
||||
return False
|
||||
|
||||
# 2) Optional global kill-switch for CI or debugging.
|
||||
# Optional global kill-switch for CI or debugging.
|
||||
if os.environ.get("PKGMGR_DISABLE_NIX_FLAKE_INSTALLER") == "1":
|
||||
print(
|
||||
"[INFO] PKGMGR_DISABLE_NIX_FLAKE_INSTALLER=1 – "
|
||||
@@ -77,11 +61,11 @@ class NixFlakeInstaller(BaseInstaller):
|
||||
)
|
||||
return False
|
||||
|
||||
# 3) Nix must be available.
|
||||
# Nix must be available.
|
||||
if shutil.which("nix") is None:
|
||||
return False
|
||||
|
||||
# 4) flake.nix must exist in the repository.
|
||||
# flake.nix must exist in the repository.
|
||||
flake_path = os.path.join(ctx.repo_dir, self.FLAKE_FILE)
|
||||
return os.path.exists(flake_path)
|
||||
|
||||
@@ -107,36 +91,56 @@ class NixFlakeInstaller(BaseInstaller):
|
||||
# Unit tests explicitly assert this is swallowed
|
||||
pass
|
||||
|
||||
def _profile_outputs(self, ctx: "RepoContext") -> List[Tuple[str, bool]]:
|
||||
"""
|
||||
Decide which flake outputs to install and whether failures are fatal.
|
||||
|
||||
Returns a list of (output_name, allow_failure) tuples.
|
||||
|
||||
Rules:
|
||||
- For the package-manager repo (identifier 'pkgmgr' or 'package-manager'):
|
||||
[("pkgmgr", False), ("default", True)]
|
||||
- For all other repos:
|
||||
[("default", False)]
|
||||
"""
|
||||
ident = ctx.identifier
|
||||
|
||||
if ident in {"pkgmgr", "package-manager"}:
|
||||
# pkgmgr: main CLI output is "pkgmgr" (mandatory),
|
||||
# "default" is nice-to-have (non-fatal).
|
||||
return [("pkgmgr", False), ("default", True)]
|
||||
|
||||
# Generic repos: we expect a sensible "default" package/app.
|
||||
# Failure to install it is considered fatal.
|
||||
return [("default", False)]
|
||||
|
||||
def run(self, ctx: "InstallContext") -> None:
|
||||
"""
|
||||
Install Nix flake profile outputs (pkgmgr, default).
|
||||
Install Nix flake profile outputs.
|
||||
|
||||
Any failure installing `pkgmgr` is treated as fatal (SystemExit).
|
||||
A failure installing `default` is logged but does not abort.
|
||||
For the package-manager repo, failure installing 'pkgmgr' is fatal,
|
||||
failure installing 'default' is non-fatal.
|
||||
For other repos, failure installing 'default' is fatal.
|
||||
"""
|
||||
# Extra guard in case run() is called directly without supports().
|
||||
if self._in_nix_shell():
|
||||
print(
|
||||
"[INFO] IN_NIX_SHELL detected in run(); "
|
||||
"skipping Nix flake profile installation."
|
||||
)
|
||||
return
|
||||
|
||||
# Reuse supports() to keep logic in one place
|
||||
# Reuse supports() to keep logic in one place.
|
||||
if not self.supports(ctx): # type: ignore[arg-type]
|
||||
return
|
||||
|
||||
print("Nix flake detected, attempting to install profile outputs...")
|
||||
outputs = self._profile_outputs(ctx) # list of (name, allow_failure)
|
||||
|
||||
# Handle the "already installed" case up-front:
|
||||
print(
|
||||
"Nix flake detected in "
|
||||
f"{ctx.identifier}, attempting to install profile outputs: "
|
||||
+ ", ".join(name for name, _ in outputs)
|
||||
)
|
||||
|
||||
# Handle the "already installed" case up-front for the shared profile.
|
||||
self._ensure_old_profile_removed(ctx) # type: ignore[arg-type]
|
||||
|
||||
for output in ("pkgmgr", "default"):
|
||||
for output, allow_failure in outputs:
|
||||
cmd = f"nix profile install {ctx.repo_dir}#{output}"
|
||||
|
||||
try:
|
||||
# For 'default' we don't want the process to exit on error
|
||||
allow_failure = output == "default"
|
||||
run_command(
|
||||
cmd,
|
||||
cwd=ctx.repo_dir,
|
||||
@@ -146,12 +150,11 @@ class NixFlakeInstaller(BaseInstaller):
|
||||
print(f"Nix flake output '{output}' successfully installed.")
|
||||
except SystemExit as e:
|
||||
print(f"[Error] Failed to install Nix flake output '{output}': {e}")
|
||||
if output == "pkgmgr":
|
||||
# Broken main CLI install → fatal
|
||||
if not allow_failure:
|
||||
# Mandatory output failed → fatal for the pipeline.
|
||||
raise
|
||||
# For 'default' we log and continue
|
||||
# Optional output failed → log and continue.
|
||||
print(
|
||||
"[Warning] Continuing despite failure to install 'default' "
|
||||
"because 'pkgmgr' is already installed."
|
||||
"[Warning] Continuing despite failure to install "
|
||||
f"optional output '{output}'."
|
||||
)
|
||||
break
|
||||
@@ -3,8 +3,8 @@
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from pkgmgr.actions.repository.install.context import RepoContext
|
||||
from pkgmgr.actions.repository.install.installers.base import BaseInstaller
|
||||
from pkgmgr.actions.install.context import RepoContext
|
||||
from pkgmgr.actions.install.installers.base import BaseInstaller
|
||||
from pkgmgr.core.command.run import run_command
|
||||
|
||||
|
||||
@@ -19,8 +19,8 @@ import os
|
||||
import shutil
|
||||
from typing import List
|
||||
|
||||
from pkgmgr.actions.repository.install.context import RepoContext
|
||||
from pkgmgr.actions.repository.install.installers.base import BaseInstaller
|
||||
from pkgmgr.actions.install.context import RepoContext
|
||||
from pkgmgr.actions.install.installers.base import BaseInstaller
|
||||
from pkgmgr.core.command.run import run_command
|
||||
|
||||
|
||||
@@ -21,8 +21,8 @@ import shutil
|
||||
import tarfile
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
from pkgmgr.actions.repository.install.context import RepoContext
|
||||
from pkgmgr.actions.repository.install.installers.base import BaseInstaller
|
||||
from pkgmgr.actions.install.context import RepoContext
|
||||
from pkgmgr.actions.install.installers.base import BaseInstaller
|
||||
from pkgmgr.core.command.run import run_command
|
||||
|
||||
|
||||
139
src/pkgmgr/actions/install/installers/python.py
Normal file
139
src/pkgmgr/actions/install/installers/python.py
Normal file
@@ -0,0 +1,139 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
PythonInstaller — install Python projects defined via pyproject.toml.
|
||||
|
||||
Installation rules:
|
||||
|
||||
1. pip command resolution:
|
||||
a) If PKGMGR_PIP is set → use it exactly as provided.
|
||||
b) Else if running inside a virtualenv → use `sys.executable -m pip`.
|
||||
c) Else → create/use a per-repository virtualenv under ~/.venvs/<repo>/.
|
||||
|
||||
2. Installation target:
|
||||
- Always install into the resolved pip environment.
|
||||
- Never modify system Python, never rely on --user.
|
||||
- Nix-immutable systems (PEP 668) are automatically avoided because we
|
||||
never touch system Python.
|
||||
|
||||
3. The installer is skipped when:
|
||||
- PKGMGR_DISABLE_PYTHON_INSTALLER=1 is set.
|
||||
- The repository has no pyproject.toml.
|
||||
|
||||
All pip failures are treated as fatal.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from pkgmgr.actions.install.installers.base import BaseInstaller
|
||||
from pkgmgr.core.command.run import run_command
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pkgmgr.actions.install.context import RepoContext
|
||||
from pkgmgr.actions.install import InstallContext
|
||||
|
||||
|
||||
class PythonInstaller(BaseInstaller):
|
||||
"""Install Python projects and dependencies via pip using isolated environments."""
|
||||
|
||||
layer = "python"
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# Installer activation logic
|
||||
# ----------------------------------------------------------------------
|
||||
def supports(self, ctx: "RepoContext") -> bool:
|
||||
"""
|
||||
Return True if this installer should handle this repository.
|
||||
|
||||
The installer is active only when:
|
||||
- A pyproject.toml exists in the repo, and
|
||||
- PKGMGR_DISABLE_PYTHON_INSTALLER is not set.
|
||||
"""
|
||||
if os.environ.get("PKGMGR_DISABLE_PYTHON_INSTALLER") == "1":
|
||||
print("[INFO] PythonInstaller disabled via PKGMGR_DISABLE_PYTHON_INSTALLER.")
|
||||
return False
|
||||
|
||||
return os.path.exists(os.path.join(ctx.repo_dir, "pyproject.toml"))
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# Virtualenv handling
|
||||
# ----------------------------------------------------------------------
|
||||
def _in_virtualenv(self) -> bool:
|
||||
"""Detect whether the current interpreter is inside a venv."""
|
||||
if os.environ.get("VIRTUAL_ENV"):
|
||||
return True
|
||||
|
||||
base = getattr(sys, "base_prefix", sys.prefix)
|
||||
return sys.prefix != base
|
||||
|
||||
def _ensure_repo_venv(self, ctx: "InstallContext") -> str:
|
||||
"""
|
||||
Ensure that ~/.venvs/<identifier>/ exists and contains a minimal venv.
|
||||
|
||||
Returns the venv directory path.
|
||||
"""
|
||||
venv_dir = os.path.expanduser(f"~/.venvs/{ctx.identifier}")
|
||||
python = sys.executable
|
||||
|
||||
if not os.path.isdir(venv_dir):
|
||||
print(f"[python-installer] Creating virtualenv: {venv_dir}")
|
||||
subprocess.check_call([python, "-m", "venv", venv_dir])
|
||||
|
||||
return venv_dir
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# pip command resolution
|
||||
# ----------------------------------------------------------------------
|
||||
def _pip_cmd(self, ctx: "InstallContext") -> str:
|
||||
"""
|
||||
Determine which pip command to use.
|
||||
|
||||
Priority:
|
||||
1. PKGMGR_PIP override given by user or automation.
|
||||
2. Active virtualenv → use sys.executable -m pip.
|
||||
3. Per-repository venv → ~/.venvs/<repo>/bin/pip
|
||||
"""
|
||||
explicit = os.environ.get("PKGMGR_PIP", "").strip()
|
||||
if explicit:
|
||||
return explicit
|
||||
|
||||
if self._in_virtualenv():
|
||||
return f"{sys.executable} -m pip"
|
||||
|
||||
venv_dir = self._ensure_repo_venv(ctx)
|
||||
pip_path = os.path.join(venv_dir, "bin", "pip")
|
||||
return pip_path
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# Execution
|
||||
# ----------------------------------------------------------------------
|
||||
def run(self, ctx: "InstallContext") -> None:
|
||||
"""
|
||||
Install the project defined by pyproject.toml.
|
||||
|
||||
Uses the resolved pip environment. Installation is isolated and never
|
||||
touches system Python.
|
||||
"""
|
||||
if not self.supports(ctx): # type: ignore[arg-type]
|
||||
return
|
||||
|
||||
pyproject = os.path.join(ctx.repo_dir, "pyproject.toml")
|
||||
if not os.path.exists(pyproject):
|
||||
return
|
||||
|
||||
print(f"[python-installer] Installing Python project for {ctx.identifier}...")
|
||||
|
||||
pip_cmd = self._pip_cmd(ctx)
|
||||
|
||||
# Final install command: ALWAYS isolated, never system-wide.
|
||||
install_cmd = f"{pip_cmd} install ."
|
||||
|
||||
run_command(install_cmd, cwd=ctx.repo_dir, preview=ctx.preview)
|
||||
|
||||
print(f"[python-installer] Installation finished for {ctx.identifier}.")
|
||||
91
src/pkgmgr/actions/install/layers.py
Normal file
91
src/pkgmgr/actions/install/layers.py
Normal file
@@ -0,0 +1,91 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
CLI layer model for the pkgmgr installation pipeline.
|
||||
|
||||
We treat CLI entry points as coming from one of four conceptual layers:
|
||||
|
||||
- os-packages : system package managers (pacman/apt/dnf/…)
|
||||
- nix : Nix flake / nix profile
|
||||
- python : pip / virtualenv / user-local scripts
|
||||
- makefile : repo-local Makefile / scripts inside the repo
|
||||
|
||||
The layer order defines precedence: higher layers "own" the CLI and
|
||||
lower layers will not be executed once a higher-priority CLI exists.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from enum import Enum
|
||||
from typing import Optional
|
||||
|
||||
|
||||
class CliLayer(str, Enum):
|
||||
OS_PACKAGES = "os-packages"
|
||||
NIX = "nix"
|
||||
PYTHON = "python"
|
||||
MAKEFILE = "makefile"
|
||||
|
||||
|
||||
# Highest priority first
|
||||
CLI_LAYERS: list[CliLayer] = [
|
||||
CliLayer.OS_PACKAGES,
|
||||
CliLayer.NIX,
|
||||
CliLayer.PYTHON,
|
||||
CliLayer.MAKEFILE,
|
||||
]
|
||||
|
||||
|
||||
def layer_priority(layer: Optional[CliLayer]) -> int:
|
||||
"""
|
||||
Return a numeric priority index for a given layer.
|
||||
|
||||
Lower index → higher priority.
|
||||
Unknown / None → very low priority.
|
||||
"""
|
||||
if layer is None:
|
||||
return len(CLI_LAYERS)
|
||||
try:
|
||||
return CLI_LAYERS.index(layer)
|
||||
except ValueError:
|
||||
return len(CLI_LAYERS)
|
||||
|
||||
|
||||
def classify_command_layer(command: str, repo_dir: str) -> CliLayer:
|
||||
"""
|
||||
Heuristically classify a resolved command path into a CLI layer.
|
||||
|
||||
Rules (best effort):
|
||||
|
||||
- /usr/... or /bin/... → os-packages
|
||||
- /nix/store/... or ~/.nix-profile → nix
|
||||
- ~/.local/bin/... → python
|
||||
- inside repo_dir → makefile
|
||||
- everything else → python (user/venv scripts, etc.)
|
||||
"""
|
||||
command_abs = os.path.abspath(os.path.expanduser(command))
|
||||
repo_abs = os.path.abspath(repo_dir)
|
||||
home = os.path.expanduser("~")
|
||||
|
||||
# OS package managers
|
||||
if command_abs.startswith("/usr/") or command_abs.startswith("/bin/"):
|
||||
return CliLayer.OS_PACKAGES
|
||||
|
||||
# Nix store / profile
|
||||
if command_abs.startswith("/nix/store/") or command_abs.startswith(
|
||||
os.path.join(home, ".nix-profile")
|
||||
):
|
||||
return CliLayer.NIX
|
||||
|
||||
# User-local bin
|
||||
if command_abs.startswith(os.path.join(home, ".local", "bin")):
|
||||
return CliLayer.PYTHON
|
||||
|
||||
# Inside the repository → usually a Makefile/script
|
||||
if command_abs.startswith(repo_abs):
|
||||
return CliLayer.MAKEFILE
|
||||
|
||||
# Fallback: treat as Python-style/user-level script
|
||||
return CliLayer.PYTHON
|
||||
257
src/pkgmgr/actions/install/pipeline.py
Normal file
257
src/pkgmgr/actions/install/pipeline.py
Normal file
@@ -0,0 +1,257 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Installation pipeline orchestration for repositories.
|
||||
|
||||
This module implements the "Setup Controller" logic:
|
||||
|
||||
1. Detect current CLI command for the repo (if any).
|
||||
2. Classify it into a layer (os-packages, nix, python, makefile).
|
||||
3. Iterate over installers in layer order:
|
||||
- Skip installers whose layer is weaker than an already-loaded one.
|
||||
- Run only installers that support() the repo and add new capabilities.
|
||||
- After each installer, re-resolve the command and update the layer.
|
||||
4. Maintain the repo["command"] field and create/update symlinks via create_ink().
|
||||
|
||||
The goal is to prevent conflicting installations and make the layering
|
||||
behaviour explicit and testable.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional, Sequence, Set
|
||||
|
||||
from pkgmgr.actions.install.context import RepoContext
|
||||
from pkgmgr.actions.install.installers.base import BaseInstaller
|
||||
from pkgmgr.actions.install.layers import (
|
||||
CliLayer,
|
||||
classify_command_layer,
|
||||
layer_priority,
|
||||
)
|
||||
from pkgmgr.core.command.ink import create_ink
|
||||
from pkgmgr.core.command.resolve import resolve_command_for_repo
|
||||
|
||||
|
||||
@dataclass
|
||||
class CommandState:
|
||||
"""
|
||||
Represents the current CLI state for a repository:
|
||||
|
||||
- command: absolute or relative path to the CLI entry point
|
||||
- layer: which conceptual layer this command belongs to
|
||||
"""
|
||||
|
||||
command: Optional[str]
|
||||
layer: Optional[CliLayer]
|
||||
|
||||
|
||||
class CommandResolver:
|
||||
"""
|
||||
Small helper responsible for resolving the current command for a repo
|
||||
and mapping it into a CommandState.
|
||||
"""
|
||||
|
||||
def __init__(self, ctx: RepoContext) -> None:
|
||||
self._ctx = ctx
|
||||
|
||||
def resolve(self) -> CommandState:
|
||||
"""
|
||||
Resolve the current command for this repository.
|
||||
|
||||
If resolve_command_for_repo raises SystemExit (e.g. Python package
|
||||
without installed entry point), we treat this as "no command yet"
|
||||
from the point of view of the installers.
|
||||
"""
|
||||
repo = self._ctx.repo
|
||||
identifier = self._ctx.identifier
|
||||
repo_dir = self._ctx.repo_dir
|
||||
|
||||
try:
|
||||
cmd = resolve_command_for_repo(
|
||||
repo=repo,
|
||||
repo_identifier=identifier,
|
||||
repo_dir=repo_dir,
|
||||
)
|
||||
except SystemExit:
|
||||
cmd = None
|
||||
|
||||
if not cmd:
|
||||
return CommandState(command=None, layer=None)
|
||||
|
||||
layer = classify_command_layer(cmd, repo_dir)
|
||||
return CommandState(command=cmd, layer=layer)
|
||||
|
||||
|
||||
class InstallationPipeline:
|
||||
"""
|
||||
High-level orchestrator that applies a sequence of installers
|
||||
to a repository based on CLI layer precedence.
|
||||
"""
|
||||
|
||||
def __init__(self, installers: Sequence[BaseInstaller]) -> None:
|
||||
self._installers = list(installers)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Public API
|
||||
# ------------------------------------------------------------------
|
||||
def run(self, ctx: RepoContext) -> None:
|
||||
"""
|
||||
Execute the installation pipeline for a single repository.
|
||||
|
||||
- Detect initial command & layer.
|
||||
- Optionally create a symlink.
|
||||
- Run installers in order, skipping those whose layer is weaker
|
||||
than an already-loaded CLI.
|
||||
- After each installer, re-resolve the command and refresh the
|
||||
symlink if needed.
|
||||
"""
|
||||
repo = ctx.repo
|
||||
repo_dir = ctx.repo_dir
|
||||
identifier = ctx.identifier
|
||||
repositories_base_dir = ctx.repositories_base_dir
|
||||
bin_dir = ctx.bin_dir
|
||||
all_repos = ctx.all_repos
|
||||
quiet = ctx.quiet
|
||||
preview = ctx.preview
|
||||
|
||||
resolver = CommandResolver(ctx)
|
||||
state = resolver.resolve()
|
||||
|
||||
# Persist initial command (if any) and create a symlink.
|
||||
if state.command:
|
||||
repo["command"] = state.command
|
||||
create_ink(
|
||||
repo,
|
||||
repositories_base_dir,
|
||||
bin_dir,
|
||||
all_repos,
|
||||
quiet=quiet,
|
||||
preview=preview,
|
||||
)
|
||||
else:
|
||||
repo.pop("command", None)
|
||||
|
||||
provided_capabilities: Set[str] = set()
|
||||
|
||||
# Main installer loop
|
||||
for installer in self._installers:
|
||||
layer_name = getattr(installer, "layer", None)
|
||||
|
||||
# Installers without a layer participate without precedence logic.
|
||||
if layer_name is None:
|
||||
self._run_installer(installer, ctx, identifier, repo_dir, quiet)
|
||||
continue
|
||||
|
||||
try:
|
||||
installer_layer = CliLayer(layer_name)
|
||||
except ValueError:
|
||||
# Unknown layer string → treat as lowest priority.
|
||||
installer_layer = None
|
||||
|
||||
# "Previous/Current layer already loaded?"
|
||||
if state.layer is not None and installer_layer is not None:
|
||||
current_prio = layer_priority(state.layer)
|
||||
installer_prio = layer_priority(installer_layer)
|
||||
|
||||
if current_prio < installer_prio:
|
||||
# Current CLI comes from a higher-priority layer,
|
||||
# so we skip this installer entirely.
|
||||
if not quiet:
|
||||
print(
|
||||
f"[pkgmgr] Skipping installer "
|
||||
f"{installer.__class__.__name__} for {identifier} – "
|
||||
f"CLI already provided by layer {state.layer.value!r}."
|
||||
)
|
||||
continue
|
||||
|
||||
if current_prio == installer_prio:
|
||||
# Same layer already provides a CLI; usually there is no
|
||||
# need to run another installer on top of it.
|
||||
if not quiet:
|
||||
print(
|
||||
f"[pkgmgr] Skipping installer "
|
||||
f"{installer.__class__.__name__} for {identifier} – "
|
||||
f"layer {installer_layer.value!r} is already loaded."
|
||||
)
|
||||
continue
|
||||
|
||||
# Check if this installer is applicable at all.
|
||||
if not installer.supports(ctx):
|
||||
continue
|
||||
|
||||
# Capabilities: if everything this installer would provide is already
|
||||
# covered, we can safely skip it.
|
||||
caps = installer.discover_capabilities(ctx)
|
||||
if caps and caps.issubset(provided_capabilities):
|
||||
if not quiet:
|
||||
print(
|
||||
f"Skipping installer {installer.__class__.__name__} "
|
||||
f"for {identifier} – capabilities {caps} already provided."
|
||||
)
|
||||
continue
|
||||
|
||||
if not quiet:
|
||||
print(
|
||||
f"[pkgmgr] Running installer {installer.__class__.__name__} "
|
||||
f"for {identifier} in '{repo_dir}' "
|
||||
f"(new capabilities: {caps or set()})..."
|
||||
)
|
||||
|
||||
# Run the installer with error reporting.
|
||||
self._run_installer(installer, ctx, identifier, repo_dir, quiet)
|
||||
|
||||
provided_capabilities.update(caps)
|
||||
|
||||
# After running an installer, re-resolve the command and layer.
|
||||
new_state = resolver.resolve()
|
||||
if new_state.command:
|
||||
repo["command"] = new_state.command
|
||||
create_ink(
|
||||
repo,
|
||||
repositories_base_dir,
|
||||
bin_dir,
|
||||
all_repos,
|
||||
quiet=quiet,
|
||||
preview=preview,
|
||||
)
|
||||
else:
|
||||
repo.pop("command", None)
|
||||
|
||||
state = new_state
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Internal helpers
|
||||
# ------------------------------------------------------------------
|
||||
@staticmethod
|
||||
def _run_installer(
|
||||
installer: BaseInstaller,
|
||||
ctx: RepoContext,
|
||||
identifier: str,
|
||||
repo_dir: str,
|
||||
quiet: bool,
|
||||
) -> None:
|
||||
"""
|
||||
Execute a single installer with unified error handling.
|
||||
"""
|
||||
try:
|
||||
installer.run(ctx)
|
||||
except SystemExit as exc:
|
||||
exit_code = exc.code if isinstance(exc.code, int) else str(exc.code)
|
||||
print(
|
||||
f"[ERROR] Installer {installer.__class__.__name__} failed "
|
||||
f"for repository {identifier} (dir: {repo_dir}) "
|
||||
f"with exit code {exit_code}."
|
||||
)
|
||||
print(
|
||||
"[ERROR] This usually means an underlying command failed "
|
||||
"(e.g. 'make install', 'nix build', 'pip install', ...)."
|
||||
)
|
||||
print(
|
||||
"[ERROR] Check the log above for the exact command output. "
|
||||
"You can also run this repository in isolation via:\n"
|
||||
f" pkgmgr install {identifier} "
|
||||
"--clone-mode shallow --no-verification"
|
||||
)
|
||||
raise
|
||||
26
src/pkgmgr/actions/mirror/__init__.py
Normal file
26
src/pkgmgr/actions/mirror/__init__.py
Normal file
@@ -0,0 +1,26 @@
|
||||
from __future__ import annotations
|
||||
|
||||
"""
|
||||
High-level mirror actions.
|
||||
|
||||
Public API:
|
||||
- list_mirrors
|
||||
- diff_mirrors
|
||||
- merge_mirrors
|
||||
- setup_mirrors
|
||||
"""
|
||||
|
||||
from .types import Repository, MirrorMap
|
||||
from .list_cmd import list_mirrors
|
||||
from .diff_cmd import diff_mirrors
|
||||
from .merge_cmd import merge_mirrors
|
||||
from .setup_cmd import setup_mirrors
|
||||
|
||||
__all__ = [
|
||||
"Repository",
|
||||
"MirrorMap",
|
||||
"list_mirrors",
|
||||
"diff_mirrors",
|
||||
"merge_mirrors",
|
||||
"setup_mirrors",
|
||||
]
|
||||
31
src/pkgmgr/actions/mirror/context.py
Normal file
31
src/pkgmgr/actions/mirror/context.py
Normal file
@@ -0,0 +1,31 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import List
|
||||
|
||||
from pkgmgr.core.repository.dir import get_repo_dir
|
||||
from pkgmgr.core.repository.identifier import get_repo_identifier
|
||||
|
||||
from .io import load_config_mirrors, read_mirrors_file
|
||||
from .types import MirrorMap, RepoMirrorContext, Repository
|
||||
|
||||
|
||||
def build_context(
|
||||
repo: Repository,
|
||||
repositories_base_dir: str,
|
||||
all_repos: List[Repository],
|
||||
) -> RepoMirrorContext:
|
||||
"""
|
||||
Build a RepoMirrorContext for a single repository.
|
||||
"""
|
||||
identifier = get_repo_identifier(repo, all_repos)
|
||||
repo_dir = get_repo_dir(repositories_base_dir, repo)
|
||||
|
||||
config_mirrors: MirrorMap = load_config_mirrors(repo)
|
||||
file_mirrors: MirrorMap = read_mirrors_file(repo_dir)
|
||||
|
||||
return RepoMirrorContext(
|
||||
identifier=identifier,
|
||||
repo_dir=repo_dir,
|
||||
config_mirrors=config_mirrors,
|
||||
file_mirrors=file_mirrors,
|
||||
)
|
||||
60
src/pkgmgr/actions/mirror/diff_cmd.py
Normal file
60
src/pkgmgr/actions/mirror/diff_cmd.py
Normal file
@@ -0,0 +1,60 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import List
|
||||
|
||||
from .context import build_context
|
||||
from .printing import print_header
|
||||
from .types import Repository
|
||||
|
||||
|
||||
def diff_mirrors(
|
||||
selected_repos: List[Repository],
|
||||
repositories_base_dir: str,
|
||||
all_repos: List[Repository],
|
||||
) -> None:
|
||||
"""
|
||||
Show differences between config mirrors and MIRRORS file.
|
||||
|
||||
- Mirrors present only in config are reported as "ONLY IN CONFIG".
|
||||
- Mirrors present only in MIRRORS file are reported as "ONLY IN FILE".
|
||||
- Mirrors with same name but different URLs are reported as "URL MISMATCH".
|
||||
"""
|
||||
for repo in selected_repos:
|
||||
ctx = build_context(repo, repositories_base_dir, all_repos)
|
||||
|
||||
print_header("[MIRROR DIFF]", ctx)
|
||||
|
||||
config_m = ctx.config_mirrors
|
||||
file_m = ctx.file_mirrors
|
||||
|
||||
if not config_m and not file_m:
|
||||
print(" No mirrors configured in config or MIRRORS file.")
|
||||
print()
|
||||
continue
|
||||
|
||||
# Mirrors only in config
|
||||
for name, url in sorted(config_m.items()):
|
||||
if name not in file_m:
|
||||
print(f" [ONLY IN CONFIG] {name}: {url}")
|
||||
|
||||
# Mirrors only in MIRRORS file
|
||||
for name, url in sorted(file_m.items()):
|
||||
if name not in config_m:
|
||||
print(f" [ONLY IN FILE] {name}: {url}")
|
||||
|
||||
# Mirrors with same name but different URLs
|
||||
shared = set(config_m) & set(file_m)
|
||||
for name in sorted(shared):
|
||||
url_cfg = config_m.get(name)
|
||||
url_file = file_m.get(name)
|
||||
if url_cfg != url_file:
|
||||
print(
|
||||
f" [URL MISMATCH] {name}:\n"
|
||||
f" config: {url_cfg}\n"
|
||||
f" file: {url_file}"
|
||||
)
|
||||
|
||||
if config_m and file_m and config_m == file_m:
|
||||
print(" [OK] Mirrors in config and MIRRORS file are in sync.")
|
||||
|
||||
print()
|
||||
141
src/pkgmgr/actions/mirror/git_remote.py
Normal file
141
src/pkgmgr/actions/mirror/git_remote.py
Normal file
@@ -0,0 +1,141 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from typing import List, Optional
|
||||
|
||||
from pkgmgr.core.command.run import run_command
|
||||
from pkgmgr.core.git import GitError, run_git
|
||||
|
||||
from .types import MirrorMap, RepoMirrorContext, Repository
|
||||
|
||||
|
||||
def build_default_ssh_url(repo: Repository) -> Optional[str]:
|
||||
"""
|
||||
Build a simple SSH URL from repo config if no explicit mirror is defined.
|
||||
|
||||
Example: git@github.com:account/repository.git
|
||||
"""
|
||||
provider = repo.get("provider")
|
||||
account = repo.get("account")
|
||||
name = repo.get("repository")
|
||||
port = repo.get("port")
|
||||
|
||||
if not provider or not account or not name:
|
||||
return None
|
||||
|
||||
provider = str(provider)
|
||||
account = str(account)
|
||||
name = str(name)
|
||||
|
||||
if port:
|
||||
return f"ssh://git@{provider}:{port}/{account}/{name}.git"
|
||||
|
||||
# GitHub-style shorthand
|
||||
return f"git@{provider}:{account}/{name}.git"
|
||||
|
||||
|
||||
def determine_primary_remote_url(
|
||||
repo: Repository,
|
||||
resolved_mirrors: MirrorMap,
|
||||
) -> Optional[str]:
|
||||
"""
|
||||
Determine the primary remote URL in a consistent way:
|
||||
|
||||
1. resolved_mirrors["origin"]
|
||||
2. any resolved mirror (first by name)
|
||||
3. default SSH URL from provider/account/repository
|
||||
"""
|
||||
if "origin" in resolved_mirrors:
|
||||
return resolved_mirrors["origin"]
|
||||
|
||||
if resolved_mirrors:
|
||||
first_name = sorted(resolved_mirrors.keys())[0]
|
||||
return resolved_mirrors[first_name]
|
||||
|
||||
return build_default_ssh_url(repo)
|
||||
|
||||
|
||||
def _safe_git_output(args: List[str], cwd: str) -> Optional[str]:
|
||||
"""
|
||||
Run a Git command via run_git and return its stdout, or None on failure.
|
||||
"""
|
||||
try:
|
||||
return run_git(args, cwd=cwd)
|
||||
except GitError:
|
||||
return None
|
||||
|
||||
|
||||
def current_origin_url(repo_dir: str) -> Optional[str]:
|
||||
"""
|
||||
Return the current URL for remote 'origin', or None if not present.
|
||||
"""
|
||||
output = _safe_git_output(["remote", "get-url", "origin"], cwd=repo_dir)
|
||||
if not output:
|
||||
return None
|
||||
url = output.strip()
|
||||
return url or None
|
||||
|
||||
|
||||
def has_origin_remote(repo_dir: str) -> bool:
|
||||
"""
|
||||
Check whether a remote called 'origin' exists in the repository.
|
||||
"""
|
||||
output = _safe_git_output(["remote"], cwd=repo_dir)
|
||||
if not output:
|
||||
return False
|
||||
names = output.split()
|
||||
return "origin" in names
|
||||
|
||||
|
||||
def ensure_origin_remote(
|
||||
repo: Repository,
|
||||
ctx: RepoMirrorContext,
|
||||
preview: bool,
|
||||
) -> None:
|
||||
"""
|
||||
Ensure that a usable 'origin' remote exists.
|
||||
|
||||
Priority for choosing URL:
|
||||
1. resolved_mirrors["origin"]
|
||||
2. any resolved mirror (first by name)
|
||||
3. default SSH URL derived from provider/account/repository
|
||||
"""
|
||||
repo_dir = ctx.repo_dir
|
||||
resolved_mirrors = ctx.resolved_mirrors
|
||||
|
||||
if not os.path.isdir(os.path.join(repo_dir, ".git")):
|
||||
print(f"[WARN] {repo_dir} is not a Git repository (no .git directory).")
|
||||
return
|
||||
|
||||
url = determine_primary_remote_url(repo, resolved_mirrors)
|
||||
|
||||
if not url:
|
||||
print(
|
||||
"[WARN] Could not determine URL for 'origin' remote. "
|
||||
"Please configure mirrors or provider/account/repository."
|
||||
)
|
||||
return
|
||||
|
||||
if not has_origin_remote(repo_dir):
|
||||
cmd = f"git remote add origin {url}"
|
||||
if preview:
|
||||
print(f"[PREVIEW] Would run in {repo_dir!r}: {cmd}")
|
||||
else:
|
||||
print(f"[INFO] Adding 'origin' remote in {repo_dir}: {url}")
|
||||
run_command(cmd, cwd=repo_dir, preview=False)
|
||||
return
|
||||
|
||||
current = current_origin_url(repo_dir)
|
||||
if current == url:
|
||||
print(f"[INFO] 'origin' already points to {url} (no change needed).")
|
||||
return
|
||||
|
||||
cmd = f"git remote set-url origin {url}"
|
||||
if preview:
|
||||
print(f"[PREVIEW] Would run in {repo_dir!r}: {cmd}")
|
||||
else:
|
||||
print(
|
||||
f"[INFO] Updating 'origin' remote in {repo_dir} "
|
||||
f"from {current or '<unknown>'} to {url}"
|
||||
)
|
||||
run_command(cmd, cwd=repo_dir, preview=False)
|
||||
115
src/pkgmgr/actions/mirror/io.py
Normal file
115
src/pkgmgr/actions/mirror/io.py
Normal file
@@ -0,0 +1,115 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from typing import List, Mapping
|
||||
|
||||
from .types import MirrorMap, Repository
|
||||
|
||||
|
||||
def load_config_mirrors(repo: Repository) -> MirrorMap:
|
||||
"""
|
||||
Load mirrors from the repository configuration entry.
|
||||
|
||||
Supported shapes:
|
||||
|
||||
repo["mirrors"] = {
|
||||
"origin": "ssh://git@example.com/...",
|
||||
"backup": "ssh://git@backup/...",
|
||||
}
|
||||
|
||||
or
|
||||
|
||||
repo["mirrors"] = [
|
||||
{"name": "origin", "url": "ssh://git@example.com/..."},
|
||||
{"name": "backup", "url": "ssh://git@backup/..."},
|
||||
]
|
||||
"""
|
||||
mirrors = repo.get("mirrors") or {}
|
||||
result: MirrorMap = {}
|
||||
|
||||
if isinstance(mirrors, dict):
|
||||
for name, url in mirrors.items():
|
||||
if not url:
|
||||
continue
|
||||
result[str(name)] = str(url)
|
||||
return result
|
||||
|
||||
if isinstance(mirrors, list):
|
||||
for entry in mirrors:
|
||||
if not isinstance(entry, dict):
|
||||
continue
|
||||
name = entry.get("name")
|
||||
url = entry.get("url")
|
||||
if not name or not url:
|
||||
continue
|
||||
result[str(name)] = str(url)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def read_mirrors_file(repo_dir: str, filename: str = "MIRRORS") -> MirrorMap:
|
||||
"""
|
||||
Read mirrors from the MIRRORS file in the repository directory.
|
||||
|
||||
Simple text format:
|
||||
|
||||
# comment
|
||||
origin ssh://git@example.com/account/repo.git
|
||||
backup ssh://git@backup/account/repo.git
|
||||
"""
|
||||
path = os.path.join(repo_dir, filename)
|
||||
mirrors: MirrorMap = {}
|
||||
|
||||
if not os.path.exists(path):
|
||||
return mirrors
|
||||
|
||||
try:
|
||||
with open(path, "r", encoding="utf-8") as fh:
|
||||
for line in fh:
|
||||
stripped = line.strip()
|
||||
if not stripped or stripped.startswith("#"):
|
||||
continue
|
||||
|
||||
parts = stripped.split(None, 1)
|
||||
if len(parts) != 2:
|
||||
# Ignore malformed lines silently
|
||||
continue
|
||||
name, url = parts
|
||||
mirrors[name] = url
|
||||
except OSError as exc:
|
||||
print(f"[WARN] Could not read MIRRORS file at {path}: {exc}")
|
||||
|
||||
return mirrors
|
||||
|
||||
|
||||
def write_mirrors_file(
|
||||
repo_dir: str,
|
||||
mirrors: Mapping[str, str],
|
||||
filename: str = "MIRRORS",
|
||||
preview: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
Write mirrors to MIRRORS file.
|
||||
|
||||
Existing file is overwritten. In preview mode we only print what would
|
||||
be written.
|
||||
"""
|
||||
path = os.path.join(repo_dir, filename)
|
||||
lines: List[str] = [f"{name} {url}" for name, url in sorted(mirrors.items())]
|
||||
content = "\n".join(lines) + ("\n" if lines else "")
|
||||
|
||||
if preview:
|
||||
print(f"[PREVIEW] Would write MIRRORS file at {path}:")
|
||||
if content:
|
||||
print(content.rstrip())
|
||||
else:
|
||||
print("(empty)")
|
||||
return
|
||||
|
||||
try:
|
||||
os.makedirs(os.path.dirname(path), exist_ok=True)
|
||||
with open(path, "w", encoding="utf-8") as fh:
|
||||
fh.write(content)
|
||||
print(f"[INFO] Wrote MIRRORS file at {path}")
|
||||
except OSError as exc:
|
||||
print(f"[ERROR] Failed to write MIRRORS file at {path}: {exc}")
|
||||
46
src/pkgmgr/actions/mirror/list_cmd.py
Normal file
46
src/pkgmgr/actions/mirror/list_cmd.py
Normal file
@@ -0,0 +1,46 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import List
|
||||
|
||||
from .context import build_context
|
||||
from .printing import print_header, print_named_mirrors
|
||||
from .types import Repository
|
||||
|
||||
|
||||
def list_mirrors(
|
||||
selected_repos: List[Repository],
|
||||
repositories_base_dir: str,
|
||||
all_repos: List[Repository],
|
||||
source: str = "all",
|
||||
) -> None:
|
||||
"""
|
||||
List mirrors for the selected repositories.
|
||||
|
||||
source:
|
||||
- "config" → only mirrors from configuration
|
||||
- "file" → only mirrors from MIRRORS file
|
||||
- "resolved" → merged view (config + file, file wins)
|
||||
- "all" → show config + file + resolved
|
||||
"""
|
||||
for repo in selected_repos:
|
||||
ctx = build_context(repo, repositories_base_dir, all_repos)
|
||||
resolved_m = ctx.resolved_mirrors
|
||||
|
||||
print_header("[MIRROR]", ctx)
|
||||
|
||||
if source in ("config", "all"):
|
||||
print_named_mirrors("config mirrors", ctx.config_mirrors)
|
||||
if source == "config":
|
||||
print()
|
||||
continue # next repo
|
||||
|
||||
if source in ("file", "all"):
|
||||
print_named_mirrors("MIRRORS file", ctx.file_mirrors)
|
||||
if source == "file":
|
||||
print()
|
||||
continue # next repo
|
||||
|
||||
if source in ("resolved", "all"):
|
||||
print_named_mirrors("resolved mirrors", resolved_m)
|
||||
|
||||
print()
|
||||
162
src/pkgmgr/actions/mirror/merge_cmd.py
Normal file
162
src/pkgmgr/actions/mirror/merge_cmd.py
Normal file
@@ -0,0 +1,162 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from typing import Dict, List, Tuple, Optional
|
||||
|
||||
import yaml
|
||||
|
||||
from pkgmgr.core.config.save import save_user_config
|
||||
|
||||
from .context import build_context
|
||||
from .io import write_mirrors_file
|
||||
from .types import MirrorMap, Repository
|
||||
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Helpers
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
def _repo_key(repo: Repository) -> Tuple[str, str, str]:
|
||||
"""
|
||||
Normalised key for identifying a repository in config files.
|
||||
"""
|
||||
return (
|
||||
str(repo.get("provider", "")),
|
||||
str(repo.get("account", "")),
|
||||
str(repo.get("repository", "")),
|
||||
)
|
||||
|
||||
|
||||
def _load_user_config(path: str) -> Dict[str, object]:
|
||||
"""
|
||||
Load a user config YAML file as dict.
|
||||
Non-dicts yield {}.
|
||||
"""
|
||||
if not os.path.exists(path):
|
||||
return {}
|
||||
|
||||
try:
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
data = yaml.safe_load(f) or {}
|
||||
return data if isinstance(data, dict) else {}
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Main merge command
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
def merge_mirrors(
|
||||
selected_repos: List[Repository],
|
||||
repositories_base_dir: str,
|
||||
all_repos: List[Repository],
|
||||
source: str,
|
||||
target: str,
|
||||
preview: bool = False,
|
||||
user_config_path: Optional[str] = None,
|
||||
) -> None:
|
||||
"""
|
||||
Merge mirrors between config and MIRRORS file.
|
||||
|
||||
Rules:
|
||||
- source, target ∈ {"config", "file"}.
|
||||
- merged = (target_mirrors overridden by source_mirrors)
|
||||
- If target == "file" → write MIRRORS file.
|
||||
- If target == "config":
|
||||
* update the user config YAML directly
|
||||
* write it using save_user_config()
|
||||
|
||||
The merge strategy is:
|
||||
dst + src (src wins on same name)
|
||||
"""
|
||||
|
||||
# Load user config once if we intend to write to it.
|
||||
user_cfg: Optional[Dict[str, object]] = None
|
||||
user_cfg_path_expanded: Optional[str] = None
|
||||
|
||||
if target == "config" and user_config_path and not preview:
|
||||
user_cfg_path_expanded = os.path.expanduser(user_config_path)
|
||||
user_cfg = _load_user_config(user_cfg_path_expanded)
|
||||
if not isinstance(user_cfg.get("repositories"), list):
|
||||
user_cfg["repositories"] = []
|
||||
|
||||
for repo in selected_repos:
|
||||
ctx = build_context(repo, repositories_base_dir, all_repos)
|
||||
|
||||
print("============================================================")
|
||||
print(f"[MIRROR MERGE] Repository: {ctx.identifier}")
|
||||
print(f"[MIRROR MERGE] Directory: {ctx.repo_dir}")
|
||||
print(f"[MIRROR MERGE] {source} → {target}")
|
||||
print("============================================================")
|
||||
|
||||
# Pick the correct source/target maps
|
||||
if source == "config":
|
||||
src = ctx.config_mirrors
|
||||
dst = ctx.file_mirrors
|
||||
else: # source == "file"
|
||||
src = ctx.file_mirrors
|
||||
dst = ctx.config_mirrors
|
||||
|
||||
# Merge (src overrides dst)
|
||||
merged: MirrorMap = dict(dst)
|
||||
merged.update(src)
|
||||
|
||||
# ---------------------------------------------------------
|
||||
# WRITE TO FILE
|
||||
# ---------------------------------------------------------
|
||||
if target == "file":
|
||||
write_mirrors_file(ctx.repo_dir, merged, preview=preview)
|
||||
print()
|
||||
continue
|
||||
|
||||
# ---------------------------------------------------------
|
||||
# WRITE TO CONFIG
|
||||
# ---------------------------------------------------------
|
||||
if target == "config":
|
||||
# If preview or no config path → show intended output
|
||||
if preview or not user_cfg:
|
||||
print("[INFO] The following mirrors would be written to config:")
|
||||
if not merged:
|
||||
print(" (no mirrors)")
|
||||
else:
|
||||
for name, url in sorted(merged.items()):
|
||||
print(f" - {name}: {url}")
|
||||
print(" (Config not modified due to preview or missing path.)")
|
||||
print()
|
||||
continue
|
||||
|
||||
repos = user_cfg.get("repositories")
|
||||
target_key = _repo_key(repo)
|
||||
existing_repo: Optional[Repository] = None
|
||||
|
||||
# Find existing repo entry
|
||||
for entry in repos:
|
||||
if isinstance(entry, dict) and _repo_key(entry) == target_key:
|
||||
existing_repo = entry
|
||||
break
|
||||
|
||||
# Create entry if missing
|
||||
if existing_repo is None:
|
||||
existing_repo = {
|
||||
"provider": repo.get("provider"),
|
||||
"account": repo.get("account"),
|
||||
"repository": repo.get("repository"),
|
||||
}
|
||||
repos.append(existing_repo)
|
||||
|
||||
# Write or delete mirrors
|
||||
if merged:
|
||||
existing_repo["mirrors"] = dict(merged)
|
||||
else:
|
||||
existing_repo.pop("mirrors", None)
|
||||
|
||||
print(" [OK] Updated repo['mirrors'] in user config.")
|
||||
print()
|
||||
|
||||
# -------------------------------------------------------------
|
||||
# SAVE CONFIG (once at the end)
|
||||
# -------------------------------------------------------------
|
||||
if user_cfg is not None and user_cfg_path_expanded is not None and not preview:
|
||||
save_user_config(user_cfg, user_cfg_path_expanded)
|
||||
print(f"[OK] Saved updated config: {user_cfg_path_expanded}")
|
||||
35
src/pkgmgr/actions/mirror/printing.py
Normal file
35
src/pkgmgr/actions/mirror/printing.py
Normal file
@@ -0,0 +1,35 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from .types import MirrorMap, RepoMirrorContext
|
||||
|
||||
|
||||
def print_header(
|
||||
title_prefix: str,
|
||||
ctx: RepoMirrorContext,
|
||||
) -> None:
|
||||
"""
|
||||
Print a standard header for mirror-related output.
|
||||
|
||||
title_prefix examples:
|
||||
- "[MIRROR]"
|
||||
- "[MIRROR DIFF]"
|
||||
- "[MIRROR MERGE]"
|
||||
- "[MIRROR SETUP:LOCAL]"
|
||||
- "[MIRROR SETUP:REMOTE]"
|
||||
"""
|
||||
print("============================================================")
|
||||
print(f"{title_prefix} Repository: {ctx.identifier}")
|
||||
print(f"{title_prefix} Directory: {ctx.repo_dir}")
|
||||
print("============================================================")
|
||||
|
||||
|
||||
def print_named_mirrors(label: str, mirrors: MirrorMap) -> None:
|
||||
"""
|
||||
Print a labeled mirror block (e.g. '[config mirrors]').
|
||||
"""
|
||||
print(f" [{label}]")
|
||||
if mirrors:
|
||||
for name, url in sorted(mirrors.items()):
|
||||
print(f" - {name}: {url}")
|
||||
else:
|
||||
print(" (none)")
|
||||
109
src/pkgmgr/actions/mirror/setup_cmd.py
Normal file
109
src/pkgmgr/actions/mirror/setup_cmd.py
Normal file
@@ -0,0 +1,109 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import List
|
||||
|
||||
from .context import build_context
|
||||
from .git_remote import determine_primary_remote_url, ensure_origin_remote
|
||||
from .types import Repository
|
||||
|
||||
|
||||
def _setup_local_mirrors_for_repo(
|
||||
repo: Repository,
|
||||
repositories_base_dir: str,
|
||||
all_repos: List[Repository],
|
||||
preview: bool,
|
||||
) -> None:
|
||||
ctx = build_context(repo, repositories_base_dir, all_repos)
|
||||
|
||||
print("------------------------------------------------------------")
|
||||
print(f"[MIRROR SETUP:LOCAL] {ctx.identifier}")
|
||||
print(f"[MIRROR SETUP:LOCAL] dir: {ctx.repo_dir}")
|
||||
print("------------------------------------------------------------")
|
||||
|
||||
ensure_origin_remote(repo, ctx, preview=preview)
|
||||
print()
|
||||
|
||||
|
||||
def _setup_remote_mirrors_for_repo(
|
||||
repo: Repository,
|
||||
repositories_base_dir: str,
|
||||
all_repos: List[Repository],
|
||||
preview: bool,
|
||||
) -> None:
|
||||
"""
|
||||
Placeholder for remote-side setup.
|
||||
|
||||
This is intentionally conservative:
|
||||
- We *do not* call any provider APIs automatically here.
|
||||
- Instead, we show what should exist and which URL should be created.
|
||||
"""
|
||||
ctx = build_context(repo, repositories_base_dir, all_repos)
|
||||
resolved_m = ctx.resolved_mirrors
|
||||
|
||||
primary_url = determine_primary_remote_url(repo, resolved_m)
|
||||
|
||||
print("------------------------------------------------------------")
|
||||
print(f"[MIRROR SETUP:REMOTE] {ctx.identifier}")
|
||||
print(f"[MIRROR SETUP:REMOTE] dir: {ctx.repo_dir}")
|
||||
print("------------------------------------------------------------")
|
||||
|
||||
if not primary_url:
|
||||
print(
|
||||
"[WARN] Could not determine primary remote URL for this repository.\n"
|
||||
" Please ensure provider/account/repository and/or mirrors "
|
||||
"are set in your config."
|
||||
)
|
||||
print()
|
||||
return
|
||||
|
||||
if preview:
|
||||
print(
|
||||
"[PREVIEW] Would ensure that a remote repository exists for:\n"
|
||||
f" {primary_url}\n"
|
||||
" (Provider-specific API calls not implemented yet.)"
|
||||
)
|
||||
else:
|
||||
print(
|
||||
"[INFO] Remote-setup logic is not implemented yet.\n"
|
||||
" Please create the remote repository manually if needed:\n"
|
||||
f" {primary_url}\n"
|
||||
)
|
||||
|
||||
print()
|
||||
|
||||
|
||||
def setup_mirrors(
|
||||
selected_repos: List[Repository],
|
||||
repositories_base_dir: str,
|
||||
all_repos: List[Repository],
|
||||
preview: bool = False,
|
||||
local: bool = True,
|
||||
remote: bool = True,
|
||||
) -> None:
|
||||
"""
|
||||
Setup mirrors for the selected repositories.
|
||||
|
||||
local:
|
||||
- Configure local Git remotes (currently: ensure 'origin' is present and
|
||||
points to a reasonable URL).
|
||||
|
||||
remote:
|
||||
- Placeholder that prints what should exist on the remote side.
|
||||
Actual API calls to providers are not implemented yet.
|
||||
"""
|
||||
for repo in selected_repos:
|
||||
if local:
|
||||
_setup_local_mirrors_for_repo(
|
||||
repo,
|
||||
repositories_base_dir=repositories_base_dir,
|
||||
all_repos=all_repos,
|
||||
preview=preview,
|
||||
)
|
||||
|
||||
if remote:
|
||||
_setup_remote_mirrors_for_repo(
|
||||
repo,
|
||||
repositories_base_dir=repositories_base_dir,
|
||||
all_repos=all_repos,
|
||||
preview=preview,
|
||||
)
|
||||
32
src/pkgmgr/actions/mirror/types.py
Normal file
32
src/pkgmgr/actions/mirror/types.py
Normal file
@@ -0,0 +1,32 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Dict
|
||||
|
||||
Repository = Dict[str, Any]
|
||||
MirrorMap = Dict[str, str]
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class RepoMirrorContext:
|
||||
"""
|
||||
Bundle mirror-related information for a single repository.
|
||||
"""
|
||||
|
||||
identifier: str
|
||||
repo_dir: str
|
||||
config_mirrors: MirrorMap
|
||||
file_mirrors: MirrorMap
|
||||
|
||||
@property
|
||||
def resolved_mirrors(self) -> MirrorMap:
|
||||
"""
|
||||
Combined mirrors from config and MIRRORS file.
|
||||
|
||||
Strategy:
|
||||
- Start from config mirrors
|
||||
- Overlay MIRRORS file (file wins on same name)
|
||||
"""
|
||||
merged: MirrorMap = dict(self.config_mirrors)
|
||||
merged.update(self.file_mirrors)
|
||||
return merged
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user