Compare commits

...

19 Commits

Author SHA1 Message Date
Kevin Veen-Birkenbach
a20814cb37 Release version 1.8.7
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2025-12-19 14:15:47 +01:00
Kevin Veen-Birkenbach
feb5ba267f refactor(release): move file helpers into files package
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
https://chatgpt.com/share/69454ef4-e038-800f-a14b-4e633e76f241
2025-12-19 14:11:04 +01:00
Kevin Veen-Birkenbach
591be4ef35 test(release): update pyproject version tests for PEP 621 and RuntimeError handling
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
- Adjust tests to expect RuntimeError instead of SystemExit
- Add coverage for missing [project] section in pyproject.toml
- Keep spec macro %{?dist} intact in test fixtures
- Minor cleanup and reformatting of test cases

https://chatgpt.com/share/69454836-4698-800f-9d19-7e67e8e789d6
2025-12-19 14:06:33 +01:00
Kevin Veen-Birkenbach
3e6ef0fd68 release: fix pyproject.toml version update for PEP 621 projects
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
Update version handling to correctly modify [project].version in pyproject.toml.
The previous implementation only matched top-level version assignments and
failed for PEP 621 layouts.

- Restrict update to the [project] section
- Allow leading whitespace in version lines
- Replace sys.exit() with proper exceptions
- Remove unused sys import

https://chatgpt.com/share/69454836-4698-800f-9d19-7e67e8e789d6
2025-12-19 13:42:26 +01:00
Kevin Veen-Birkenbach
3d5c770def Solved ruff F401
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2025-12-18 19:16:15 +01:00
Kevin Veen-Birkenbach
f4339a746a executet 'ruff format --check .'
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2025-12-18 14:04:44 +01:00
Kevin Veen-Birkenbach
763f02a9a4 Release version 1.8.6
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2025-12-17 23:50:31 +01:00
Kevin Veen-Birkenbach
2eec873a17 Solved Debian Bug
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
https://chatgpt.com/share/69432655-a948-800f-8c0d-353921cdf644
2025-12-17 23:29:04 +01:00
Kevin Veen-Birkenbach
17ee947930 ci: pass NIX_CONFIG with GitHub token into all test containers
- Add NIX_CONFIG with GitHub access token to all CI test workflows
- Export NIX_CONFIG in Makefile for propagation to test scripts
- Forward NIX_CONFIG explicitly into all docker run invocations
- Prevent GitHub API rate limit errors during Nix-based tests

https://chatgpt.com/share/69432655-a948-800f-8c0d-353921cdf644
2025-12-17 23:29:04 +01:00
Kevin Veen-Birkenbach
b989bdd4eb Release version 1.8.5 2025-12-17 23:29:04 +01:00
Kevin Veen-Birkenbach
c4da8368d8 --- Release Error --- 2025-12-17 23:28:45 +01:00
Kevin Veen-Birkenbach
997c265cfb refactor(git): introduce GitRunError hierarchy, surface non-repo errors, and improve verification queries
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
* Replace legacy GitError usage with a clearer exception hierarchy:

  * GitBaseError as the common root for all git-related failures
  * GitRunError for subprocess execution failures
  * GitQueryError for read-only query failures
  * GitCommandError for state-changing command failures
  * GitNotRepositoryError to explicitly signal “not a git repository” situations
* Update git runner to detect “not a git repository” stderr and raise GitNotRepositoryError with rich context (cwd, command, stderr)
* Refactor repository verification to use dedicated query helpers instead of ad-hoc subprocess calls:

  * get_remote_head_commit (ls-remote) for pull mode
  * get_head_commit for local mode
  * get_latest_signing_key (%GK) for signature verification
* Add strict vs best-effort behavior in verify_repository:

  * Best-effort collection for reporting (does not block when no verification config exists)
  * Strict retrieval and explicit error messages when verification is configured
  * Clear failure cases when commit/signing key cannot be determined
* Add new unit tests covering:

  * get_latest_signing_key output stripping and error wrapping
  * get_remote_head_commit parsing, empty output, and error wrapping
  * verify_repository success/failure scenarios and “do not swallow GitNotRepositoryError”
* Adjust imports and exception handling across actions/commands/queries to align with GitRunError-based handling while keeping GitNotRepositoryError uncaught for debugging clarity

https://chatgpt.com/share/6943173c-508c-800f-8879-af75d131c79b
2025-12-17 21:48:03 +01:00
Kevin Veen-Birkenbach
955028288f Release version 1.8.4
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2025-12-17 11:20:16 +01:00
Kevin Veen-Birkenbach
866572e252 ci(docker): fix repo mount path for pkgmgr as base layer of Infinito.Nexus
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
Standardize Docker/CI/test environments to mount pkgmgr at /opt/src/pkgmgr.
This makes the layering explicit: pkgmgr is the lower-level foundation used by
Infinito.Nexus.

Infra-only change (Docker, CI, shell scripts). No runtime or Nix semantics changed.

https://chatgpt.com/share/69427fe7-e288-800f-90a4-c1c3c11a8484
2025-12-17 11:03:02 +01:00
Kevin Veen-Birkenbach
b0a733369e Optimized output for debugging
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2025-12-17 10:51:56 +01:00
Kevin Veen-Birkenbach
c5843ccd30 Release version 1.8.3
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2025-12-16 19:49:51 +01:00
Kevin Veen-Birkenbach
3cb7852cb4 feat(mirrors): support URL-only MIRRORS entries and keep git config clean
- Allow MIRRORS to contain plain URLs (one per line) in addition to legacy "NAME URL"
- Treat strings as single URLs to avoid iterable pitfalls
- Write PyPI URLs as metadata-only entries (never added to git config)
- Keep MIRRORS as the single source of truth for mirror setup
- Update integration test to assert URL-only MIRRORS output

https://chatgpt.com/share/6941a9aa-b8b4-800f-963d-2486b34856b1
2025-12-16 19:49:09 +01:00
Kevin Veen-Birkenbach
f995e3d368 Release version 1.8.2
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2025-12-16 19:22:41 +01:00
Kevin Veen-Birkenbach
ffa9d9660a gpt-5.2 ChatGPT: refactor tools code into cli.tools.vscode and add unit tests
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
* Move VS Code workspace logic (incl. guards) from cli.commands.tools into cli.tools.vscode
* Extract shared repo path resolution into cli.tools.paths and reuse for explore/terminal
* Simplify cli.commands.tools to pure orchestration via open_vscode_workspace
* Update existing tools command unit test to assert delegation instead of patching removed internals
* Add new unit tests for cli.tools.paths and cli.tools.vscode (workspace creation, reuse, guard errors)

https://chatgpt.com/share/69419a6a-c9e4-800f-9538-b6652b2da6b3
2025-12-16 18:43:56 +01:00
231 changed files with 2789 additions and 1609 deletions

View File

@@ -11,7 +11,9 @@ jobs:
fail-fast: false
matrix:
distro: [arch, debian, ubuntu, fedora, centos]
env:
NIX_CONFIG: |
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
steps:
- name: Checkout repository
uses: actions/checkout@v4

View File

@@ -12,7 +12,9 @@ jobs:
fail-fast: false
matrix:
distro: [arch, debian, ubuntu, fedora, centos]
env:
NIX_CONFIG: |
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
steps:
- name: Checkout repository
uses: actions/checkout@v4

View File

@@ -11,7 +11,9 @@ jobs:
fail-fast: false
matrix:
distro: [arch, debian, ubuntu, fedora, centos]
env:
NIX_CONFIG: |
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
steps:
- name: Checkout repository
uses: actions/checkout@v4

View File

@@ -7,7 +7,9 @@ jobs:
test-integration:
runs-on: ubuntu-latest
timeout-minutes: 30
env:
NIX_CONFIG: |
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
steps:
- name: Checkout repository
uses: actions/checkout@v4

View File

@@ -7,7 +7,9 @@ jobs:
test-unit:
runs-on: ubuntu-latest
timeout-minutes: 30
env:
NIX_CONFIG: |
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
steps:
- name: Checkout repository
uses: actions/checkout@v4

View File

@@ -11,7 +11,9 @@ jobs:
fail-fast: false
matrix:
distro: [arch, debian, ubuntu, fedora, centos]
env:
NIX_CONFIG: |
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
@@ -19,27 +21,26 @@ jobs:
- name: Show Docker version
run: docker version
# 🔹 BUILD virgin image if missing
- name: Build virgin container (${{ matrix.distro }})
run: |
set -euo pipefail
PKGMGR_DISTRO="${{ matrix.distro }}" make build-missing-virgin
# 🔹 RUN test inside virgin image
- name: Virgin ${{ matrix.distro }} pkgmgr test (root)
run: |
set -euo pipefail
docker run --rm \
-v "$PWD":/src \
-v "$PWD":/opt/src/pkgmgr \
-v pkgmgr_repos:/root/Repositories \
-v pkgmgr_pip_cache:/root/.cache/pip \
-w /src \
-e NIX_CONFIG="${NIX_CONFIG}" \
-w /opt/src/pkgmgr \
"pkgmgr-${{ matrix.distro }}-virgin" \
bash -lc '
set -euo pipefail
git config --global --add safe.directory /src
git config --global --add safe.directory /opt/src/pkgmgr
make install
make setup
@@ -50,5 +51,5 @@ jobs:
pkgmgr version pkgmgr
echo ">>> Running Nix-based: nix run .#pkgmgr -- version pkgmgr"
nix run /src#pkgmgr -- version pkgmgr
nix run /opt/src/pkgmgr#pkgmgr -- version pkgmgr
'

View File

@@ -11,7 +11,9 @@ jobs:
fail-fast: false
matrix:
distro: [arch, debian, ubuntu, fedora, centos]
env:
NIX_CONFIG: |
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
@@ -19,20 +21,19 @@ jobs:
- name: Show Docker version
run: docker version
# 🔹 BUILD virgin image if missing
- name: Build virgin container (${{ matrix.distro }})
run: |
set -euo pipefail
PKGMGR_DISTRO="${{ matrix.distro }}" make build-missing-virgin
# 🔹 RUN test inside virgin image as non-root
- name: Virgin ${{ matrix.distro }} pkgmgr test (user)
run: |
set -euo pipefail
docker run --rm \
-v "$PWD":/src \
-w /src \
-v "$PWD":/opt/src/pkgmgr \
-e NIX_CONFIG="${NIX_CONFIG}" \
-w /opt/src/pkgmgr \
"pkgmgr-${{ matrix.distro }}-virgin" \
bash -lc '
set -euo pipefail
@@ -42,7 +43,7 @@ jobs:
useradd -m dev
echo "dev ALL=(ALL) NOPASSWD: ALL" > /etc/sudoers.d/dev
chmod 0440 /etc/sudoers.d/dev
chown -R dev:dev /src
chown -R dev:dev /opt/src/pkgmgr
mkdir -p /nix/store /nix/var/nix /nix/var/log/nix /nix/var/nix/profiles
chown -R dev:dev /nix
@@ -51,7 +52,7 @@ jobs:
sudo -H -u dev env HOME=/home/dev PKGMGR_DISABLE_NIX_FLAKE_INSTALLER=1 bash -lc "
set -euo pipefail
cd /src
cd /opt/src/pkgmgr
make setup-venv
. \"\$HOME/.venvs/pkgmgr/bin/activate\"
@@ -59,6 +60,6 @@ jobs:
pkgmgr version pkgmgr
export NIX_REMOTE=local
nix run /src#pkgmgr -- version pkgmgr
nix run /opt/src/pkgmgr#pkgmgr -- version pkgmgr
"
'

View File

@@ -1,3 +1,42 @@
## [1.8.7] - 2025-12-19
* * **Release version updates now correctly modify ***pyproject.toml*** files that follow PEP 621**, ensuring the ***[project].version*** field is updated as expected.
* **Invalid or incomplete ***pyproject.toml*** files are now handled gracefully** with clear error messages instead of abrupt process termination.
* **RPM spec files remain compatible during releases**: existing macros such as ***%{?dist}*** are preserved and no longer accidentally modified.
## [1.8.6] - 2025-12-17
* Prevent Rate Limits during GitHub Nix Setups
## [1.8.5] - 2025-12-17
* * Clearer Git error handling, especially when a directory is not a Git repository.
* More reliable repository verification with improved commit and GPG signature checks.
* Better error messages and overall robustness when working with Git-based workflows.
## [1.9.0] - 2025-12-17
* Automated release.
## [1.8.4] - 2025-12-17
* * Made pkgmgrs base-layer role explicit by standardizing the Docker/CI mount path to *`/opt/src/pkgmgr`*.
## [1.8.3] - 2025-12-16
* MIRRORS now supports plain URL entries, ensuring metadata-only sources like PyPI are recorded without ever being added to the Git configuration.
## [1.8.2] - 2025-12-16
* * ***pkgmgr tools code*** is more robust and predictable: it now fails early with clear errors if VS Code is not installed or a repository is not yet identified.
## [1.8.1] - 2025-12-16
* * Improved stability and consistency of all Git operations (clone, pull, push, release, branch handling) with clearer error messages and predictable preview behavior.

View File

@@ -50,6 +50,6 @@ RUN set -euo pipefail; \
# Entry point
COPY scripts/docker/entry.sh /usr/local/bin/docker-entry.sh
WORKDIR /src
WORKDIR /opt/src/pkgmgr
ENTRYPOINT ["/usr/local/bin/docker-entry.sh"]
CMD ["pkgmgr", "--help"]

View File

@@ -10,6 +10,10 @@ DISTROS ?= arch debian ubuntu fedora centos
PKGMGR_DISTRO ?= arch
export PKGMGR_DISTRO
# Nix Config Variable (To avoid rate limit)
NIX_CONFIG ?=
export NIX_CONFIG
# ------------------------------------------------------------
# Base images
# (kept for documentation/reference; actual build logic is in scripts/build)

View File

@@ -32,7 +32,7 @@
rec {
pkgmgr = pyPkgs.buildPythonApplication {
pname = "package-manager";
version = "1.8.1";
version = "1.8.7";
# Use the git repo as source
src = ./.;

View File

@@ -1,7 +1,7 @@
# Maintainer: Kevin Veen-Birkenbach <info@veen.world>
pkgname=package-manager
pkgver=1.8.1
pkgver=1.8.7
pkgrel=1
pkgdesc="Local-flake wrapper for Kevin's package-manager (Nix-based)."
arch=('any')

View File

@@ -1,3 +1,49 @@
package-manager (1.8.7-1) unstable; urgency=medium
* * **Release version updates now correctly modify ***pyproject.toml*** files that follow PEP 621**, ensuring the ***[project].version*** field is updated as expected.
* **Invalid or incomplete ***pyproject.toml*** files are now handled gracefully** with clear error messages instead of abrupt process termination.
* **RPM spec files remain compatible during releases**: existing macros such as ***%{?dist}*** are preserved and no longer accidentally modified.
-- Kevin Veen-Birkenbach <kevin@veen.world> Fri, 19 Dec 2025 14:15:47 +0100
package-manager (1.8.6-1) unstable; urgency=medium
* Prevent Rate Limits during GitHub Nix Setups
-- Kevin Veen-Birkenbach <kevin@veen.world> Wed, 17 Dec 2025 23:50:31 +0100
package-manager (1.8.5-1) unstable; urgency=medium
* * Clearer Git error handling, especially when a directory is not a Git repository.
* More reliable repository verification with improved commit and GPG signature checks.
* Better error messages and overall robustness when working with Git-based workflows.
-- Kevin Veen-Birkenbach <kevin@veen.world> Wed, 17 Dec 2025 22:15:48 +0100
package-manager (1.9.0-1) unstable; urgency=medium
* Automated release.
-- Kevin Veen-Birkenbach <kevin@veen.world> Wed, 17 Dec 2025 22:10:31 +0100
package-manager (1.8.4-1) unstable; urgency=medium
* * Made pkgmgrs base-layer role explicit by standardizing the Docker/CI mount path to *`/opt/src/pkgmgr`*.
-- Kevin Veen-Birkenbach <kevin@veen.world> Wed, 17 Dec 2025 11:20:16 +0100
package-manager (1.8.3-1) unstable; urgency=medium
* MIRRORS now supports plain URL entries, ensuring metadata-only sources like PyPI are recorded without ever being added to the Git configuration.
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 16 Dec 2025 19:49:51 +0100
package-manager (1.8.2-1) unstable; urgency=medium
* * ***pkgmgr tools code*** is more robust and predictable: it now fails early with clear errors if VS Code is not installed or a repository is not yet identified.
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 16 Dec 2025 19:22:41 +0100
package-manager (1.8.1-1) unstable; urgency=medium
* * Improved stability and consistency of all Git operations (clone, pull, push, release, branch handling) with clearer error messages and predictable preview behavior.

View File

@@ -1,5 +1,5 @@
Name: package-manager
Version: 1.8.1
Version: 1.8.7
Release: 1%{?dist}
Summary: Wrapper that runs Kevin's package-manager via Nix flake
@@ -74,6 +74,31 @@ echo ">>> package-manager removed. Nix itself was not removed."
/usr/lib/package-manager/
%changelog
* Fri Dec 19 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.8.7-1
- * **Release version updates now correctly modify ***pyproject.toml*** files that follow PEP 621**, ensuring the ***[project].version*** field is updated as expected.
* **Invalid or incomplete ***pyproject.toml*** files are now handled gracefully** with clear error messages instead of abrupt process termination.
* **RPM spec files remain compatible during releases**: existing macros such as ***%{?dist}*** are preserved and no longer accidentally modified.
* Wed Dec 17 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.8.6-1
- Prevent Rate Limits during GitHub Nix Setups
* Wed Dec 17 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.8.5-1
- * Clearer Git error handling, especially when a directory is not a Git repository.
* More reliable repository verification with improved commit and GPG signature checks.
* Better error messages and overall robustness when working with Git-based workflows.
* Wed Dec 17 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.9.0-1
- Automated release.
* Wed Dec 17 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.8.4-1
- * Made pkgmgrs base-layer role explicit by standardizing the Docker/CI mount path to *`/opt/src/pkgmgr`*.
* Tue Dec 16 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.8.3-1
- MIRRORS now supports plain URL entries, ensuring metadata-only sources like PyPI are recorded without ever being added to the Git configuration.
* Tue Dec 16 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.8.2-1
- * ***pkgmgr tools code*** is more robust and predictable: it now fails early with clear errors if VS Code is not installed or a repository is not yet identified.
* Tue Dec 16 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.8.1-1
- * Improved stability and consistency of all Git operations (clone, pull, push, release, branch handling) with clearer error messages and predictable preview behavior.
* Mirrors are now handled cleanly: only valid Git remotes are used for Git operations, while non-Git URLs (e.g. PyPI) are excluded, preventing broken or confusing repository configs.

View File

@@ -7,7 +7,7 @@ build-backend = "setuptools.build_meta"
[project]
name = "kpmx"
version = "1.8.1"
version = "1.8.7"
description = "Kevin's package-manager tool (pkgmgr)"
readme = "README.md"
requires-python = ">=3.9"

View File

@@ -1,7 +1,7 @@
#!/usr/bin/env bash
set -euo pipefail
echo "[docker] Starting package-manager container"
echo "[docker-pkgmgr] Starting package-manager container"
# ---------------------------------------------------------------------------
# Log distribution info
@@ -9,19 +9,19 @@ echo "[docker] Starting package-manager container"
if [[ -f /etc/os-release ]]; then
# shellcheck disable=SC1091
. /etc/os-release
echo "[docker] Detected distro: ${ID:-unknown} (like: ${ID_LIKE:-})"
echo "[docker-pkgmgr] Detected distro: ${ID:-unknown} (like: ${ID_LIKE:-})"
fi
# Always use /src (mounted from host) as working directory
echo "[docker] Using /src as working directory"
cd /src
# Always use /opt/src/pkgmgr (mounted from host) as working directory
echo "[docker-pkgmgr] Using /opt/src/pkgmgr as working directory"
cd /opt/src/pkgmgr
# ---------------------------------------------------------------------------
# DEV mode: rebuild package-manager from the mounted /src tree
# DEV mode: rebuild package-manager from the mounted /opt/src/pkgmgr tree
# ---------------------------------------------------------------------------
if [[ "${REINSTALL_PKGMGR:-0}" == "1" ]]; then
echo "[docker] DEV mode enabled (REINSTALL_PKGMGR=1)"
echo "[docker] Rebuilding package-manager from /src via scripts/installation/package.sh..."
echo "[docker-pkgmgr] DEV mode enabled (REINSTALL_PKGMGR=1)"
echo "[docker-pkgmgr] Rebuilding package-manager from /opt/src/pkgmgr via scripts/installation/package.sh..."
bash scripts/installation/package.sh || exit 1
fi
@@ -29,9 +29,9 @@ fi
# Hand off to pkgmgr or arbitrary command
# ---------------------------------------------------------------------------
if [[ $# -eq 0 ]]; then
echo "[docker] No arguments provided. Showing pkgmgr help..."
echo "[docker-pkgmgr] No arguments provided. Showing pkgmgr help..."
exec pkgmgr --help
else
echo "[docker] Executing command: $*"
echo "[docker-pkgmgr] Executing command: $*"
exec "$@"
fi

View File

@@ -6,7 +6,7 @@ echo "[arch/package] Building Arch package (makepkg --nodeps) in an isolated bui
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "${SCRIPT_DIR}/../../.." && pwd)"
# We must not build inside /src (mounted repo). Build in /tmp to avoid permission issues.
# We must not build inside /opt/src/pkgmgr (mounted repo). Build in /tmp to avoid permission issues.
BUILD_ROOT="/tmp/package-manager-arch-build"
PKG_SRC_DIR="${PROJECT_ROOT}/packaging/arch"
PKG_BUILD_DIR="${BUILD_ROOT}/packaging/arch"

View File

@@ -6,12 +6,13 @@ echo ">>> Running E2E tests: $PKGMGR_DISTRO"
echo "============================================================"
docker run --rm \
-v "$(pwd):/src" \
-v "$(pwd):/opt/src/pkgmgr" \
-v "pkgmgr_nix_store_${PKGMGR_DISTRO}:/nix" \
-v "pkgmgr_nix_cache_${PKGMGR_DISTRO}:/root/.cache/nix" \
-e REINSTALL_PKGMGR=1 \
-e TEST_PATTERN="${TEST_PATTERN}" \
--workdir /src \
-e NIX_CONFIG="${NIX_CONFIG}" \
--workdir /opt/src/pkgmgr \
"pkgmgr-${PKGMGR_DISTRO}" \
bash -lc '
set -euo pipefail
@@ -40,14 +41,14 @@ docker run --rm \
}
# Mark the mounted repository as safe to avoid Git ownership errors.
# Newer Git (e.g. on Ubuntu) complains about the gitdir (/src/.git),
# older versions about the worktree (/src). Nix turns "." into the
# flake input "git+file:///src", which then uses Git under the hood.
# Newer Git (e.g. on Ubuntu) complains about the gitdir (/opt/src/pkgmgr/.git),
# older versions about the worktree (/opt/src/pkgmgr). Nix turns "." into the
# flake input "git+file:///opt/src/pkgmgr", which then uses Git under the hood.
if command -v git >/dev/null 2>&1; then
# Worktree path
git config --global --add safe.directory /src || true
git config --global --add safe.directory /opt/src/pkgmgr || true
# Gitdir path shown in the "dubious ownership" error
git config --global --add safe.directory /src/.git || true
git config --global --add safe.directory /opt/src/pkgmgr/.git || true
# Ephemeral CI containers: allow all paths as a last resort
git config --global --add safe.directory "*" || true
fi
@@ -55,6 +56,6 @@ docker run --rm \
# Run the E2E tests inside the Nix development shell
nix develop .#default --no-write-lock-file -c \
python3 -m unittest discover \
-s /src/tests/e2e \
-s /opt/src/pkgmgr/tests/e2e \
-p "$TEST_PATTERN"
'

View File

@@ -9,18 +9,19 @@ echo ">>> Image: ${IMAGE}"
echo "============================================================"
docker run --rm \
-v "$(pwd):/src" \
-v "$(pwd):/opt/src/pkgmgr" \
-v "pkgmgr_nix_store_${PKGMGR_DISTRO}:/nix" \
-v "pkgmgr_nix_cache_${PKGMGR_DISTRO}:/root/.cache/nix" \
--workdir /src \
--workdir /opt/src/pkgmgr \
-e REINSTALL_PKGMGR=1 \
-e NIX_CONFIG="${NIX_CONFIG}" \
"${IMAGE}" \
bash -lc '
set -euo pipefail
if command -v git >/dev/null 2>&1; then
git config --global --add safe.directory /src || true
git config --global --add safe.directory /src/.git || true
git config --global --add safe.directory /opt/src/pkgmgr || true
git config --global --add safe.directory /opt/src/pkgmgr/.git || true
git config --global --add safe.directory "*" || true
fi
@@ -38,9 +39,9 @@ docker run --rm \
# ------------------------------------------------------------
# Retry helper for GitHub API rate-limit (HTTP 403)
# ------------------------------------------------------------
if [[ -f /src/scripts/nix/lib/retry_403.sh ]]; then
if [[ -f /opt/src/pkgmgr/scripts/nix/lib/retry_403.sh ]]; then
# shellcheck source=./scripts/nix/lib/retry_403.sh
source /src/scripts/nix/lib/retry_403.sh
source /opt/src/pkgmgr/scripts/nix/lib/retry_403.sh
elif [[ -f ./scripts/nix/lib/retry_403.sh ]]; then
# shellcheck source=./scripts/nix/lib/retry_403.sh
source ./scripts/nix/lib/retry_403.sh

View File

@@ -17,8 +17,9 @@ echo
# ------------------------------------------------------------
if OUTPUT=$(docker run --rm \
-e REINSTALL_PKGMGR=1 \
-v "$(pwd):/src" \
-w /src \
-v "$(pwd):/opt/src/pkgmgr" \
-w /opt/src/pkgmgr \
-e NIX_CONFIG="${NIX_CONFIG}" \
"${IMAGE}" \
bash -lc '
set -euo pipefail

View File

@@ -6,19 +6,20 @@ echo ">>> Running INTEGRATION tests in ${PKGMGR_DISTRO} container"
echo "============================================================"
docker run --rm \
-v "$(pwd):/src" \
-v "$(pwd):/opt/src/pkgmgr" \
-v "pkgmgr_nix_store_${PKGMGR_DISTRO}:/nix" \
-v "pkgmgr_nix_cache_${PKGMGR_DISTRO}:/root/.cache/nix" \
--workdir /src \
--workdir /opt/src/pkgmgr \
-e REINSTALL_PKGMGR=1 \
-e TEST_PATTERN="${TEST_PATTERN}" \
-e NIX_CONFIG="${NIX_CONFIG}" \
"pkgmgr-${PKGMGR_DISTRO}" \
bash -lc '
set -e;
git config --global --add safe.directory /src || true;
git config --global --add safe.directory /opt/src/pkgmgr || true;
nix develop .#default --no-write-lock-file -c \
python3 -m unittest discover \
-s tests/integration \
-t /src \
-t /opt/src/pkgmgr \
-p "$TEST_PATTERN";
'

View File

@@ -6,19 +6,20 @@ echo ">>> Running UNIT tests in ${PKGMGR_DISTRO} container"
echo "============================================================"
docker run --rm \
-v "$(pwd):/src" \
-v "$(pwd):/opt/src/pkgmgr" \
-v "pkgmgr_nix_cache_${PKGMGR_DISTRO}:/root/.cache/nix" \
-v "pkgmgr_nix_store_${PKGMGR_DISTRO}:/nix" \
--workdir /src \
--workdir /opt/src/pkgmgr \
-e REINSTALL_PKGMGR=1 \
-e TEST_PATTERN="${TEST_PATTERN}" \
-e NIX_CONFIG="${NIX_CONFIG}" \
"pkgmgr-${PKGMGR_DISTRO}" \
bash -lc '
set -e;
git config --global --add safe.directory /src || true;
git config --global --add safe.directory /opt/src/pkgmgr || true;
nix develop .#default --no-write-lock-file -c \
python3 -m unittest discover \
-s tests/unit \
-t /src \
-t /opt/src/pkgmgr \
-p "$TEST_PATTERN";
'

View File

@@ -25,12 +25,12 @@ __all__ = ["cli"]
def __getattr__(name: str) -> Any:
"""
Lazily expose ``pkgmgr.cli`` as attribute on the top-level package.
"""
Lazily expose ``pkgmgr.cli`` as attribute on the top-level package.
This keeps ``import pkgmgr`` lightweight while still allowing
``from pkgmgr import cli`` in tests and entry points.
"""
if name == "cli":
return import_module("pkgmgr.cli")
raise AttributeError(f"module 'pkgmgr' has no attribute {name!r}")
This keeps ``import pkgmgr`` lightweight while still allowing
``from pkgmgr import cli`` in tests and entry points.
"""
if name == "cli":
return import_module("pkgmgr.cli")
raise AttributeError(f"module 'pkgmgr' has no attribute {name!r}")

View File

@@ -3,4 +3,4 @@ from __future__ import annotations
# expose subpackages for patch() / resolve_name() friendliness
from . import release as release # noqa: F401
__all__ = ["release"]
__all__ = ["release"]

View File

@@ -2,7 +2,7 @@ from __future__ import annotations
from typing import Optional
from pkgmgr.core.git.errors import GitError
from pkgmgr.core.git.errors import GitRunError
from pkgmgr.core.git.queries import get_current_branch
from pkgmgr.core.git.commands import (
GitDeleteRemoteBranchError,
@@ -32,7 +32,7 @@ def close_branch(
if not name:
try:
name = get_current_branch(cwd=cwd)
except GitError as exc:
except GitRunError as exc:
raise RuntimeError(f"Failed to detect current branch: {exc}") from exc
if not name:
@@ -48,14 +48,18 @@ def close_branch(
# Confirmation
if not force:
answer = input(
f"Merge branch '{name}' into '{target_base}' and delete it afterwards? (y/N): "
).strip().lower()
answer = (
input(
f"Merge branch '{name}' into '{target_base}' and delete it afterwards? (y/N): "
)
.strip()
.lower()
)
if answer != "y":
print("Aborted closing branch.")
return
# Execute workflow (commands raise specific GitError subclasses)
# Execute workflow (commands raise specific GitRunError subclasses)
fetch("origin", cwd=cwd)
checkout(target_base, cwd=cwd)
pull("origin", target_base, cwd=cwd)

View File

@@ -2,7 +2,7 @@ from __future__ import annotations
from typing import Optional
from pkgmgr.core.git.errors import GitError
from pkgmgr.core.git.errors import GitRunError
from pkgmgr.core.git.queries import get_current_branch
from pkgmgr.core.git.commands import (
GitDeleteRemoteBranchError,
@@ -26,7 +26,7 @@ def drop_branch(
if not name:
try:
name = get_current_branch(cwd=cwd)
except GitError as exc:
except GitRunError as exc:
raise RuntimeError(f"Failed to detect current branch: {exc}") from exc
if not name:
@@ -41,15 +41,19 @@ def drop_branch(
# Confirmation
if not force:
answer = input(
f"Delete branch '{name}' locally and on origin? This is destructive! (y/N): "
).strip().lower()
answer = (
input(
f"Delete branch '{name}' locally and on origin? This is destructive! (y/N): "
)
.strip()
.lower()
)
if answer != "y":
print("Aborted dropping branch.")
return
delete_local_branch(name, cwd=cwd, force=False)
# Remote delete (special-case message)
try:
delete_remote_branch("origin", name, cwd=cwd)

View File

@@ -30,7 +30,7 @@ def open_branch(
resolved_base = resolve_base_branch(base_branch, fallback_base, cwd=cwd)
# Workflow (commands raise specific GitError subclasses)
# Workflow (commands raise specific GitBaseError subclasses)
fetch("origin", cwd=cwd)
checkout(resolved_base, cwd=cwd)
pull("origin", resolved_base, cwd=cwd)

View File

@@ -1,15 +1,18 @@
import yaml
import os
from pkgmgr.core.config.save import save_user_config
from pkgmgr.core.config.save import save_user_config
def interactive_add(config,USER_CONFIG_PATH:str):
def interactive_add(config, USER_CONFIG_PATH: str):
"""Interactively prompt the user to add a new repository entry to the user config."""
print("Adding a new repository configuration entry.")
new_entry = {}
new_entry["provider"] = input("Provider (e.g., github.com): ").strip()
new_entry["account"] = input("Account (e.g., yourusername): ").strip()
new_entry["repository"] = input("Repository name (e.g., mytool): ").strip()
new_entry["command"] = input("Command (optional, leave blank to auto-detect): ").strip()
new_entry["command"] = input(
"Command (optional, leave blank to auto-detect): "
).strip()
new_entry["description"] = input("Description (optional): ").strip()
new_entry["replacement"] = input("Replacement (optional): ").strip()
new_entry["alias"] = input("Alias (optional): ").strip()
@@ -25,12 +28,12 @@ def interactive_add(config,USER_CONFIG_PATH:str):
confirm = input("Add this entry to user config? (y/N): ").strip().lower()
if confirm == "y":
if os.path.exists(USER_CONFIG_PATH):
with open(USER_CONFIG_PATH, 'r') as f:
with open(USER_CONFIG_PATH, "r") as f:
user_config = yaml.safe_load(f) or {}
else:
user_config = {"repositories": []}
user_config.setdefault("repositories", [])
user_config["repositories"].append(new_entry)
save_user_config(user_config,USER_CONFIG_PATH)
save_user_config(user_config, USER_CONFIG_PATH)
else:
print("Entry not added.")
print("Entry not added.")

View File

@@ -107,11 +107,15 @@ def config_init(
# Already known?
if key in default_keys:
skipped += 1
print(f"[SKIP] (defaults) {provider}/{account}/{repo_name}")
print(
f"[SKIP] (defaults) {provider}/{account}/{repo_name}"
)
continue
if key in existing_keys:
skipped += 1
print(f"[SKIP] (user-config) {provider}/{account}/{repo_name}")
print(
f"[SKIP] (user-config) {provider}/{account}/{repo_name}"
)
continue
print(f"[ADD] {provider}/{account}/{repo_name}")
@@ -121,7 +125,9 @@ def config_init(
if verified_commit:
print(f"[INFO] Latest commit: {verified_commit}")
else:
print("[WARN] Could not read commit (not a git repo or no commits).")
print(
"[WARN] Could not read commit (not a git repo or no commits)."
)
entry: Dict[str, Any] = {
"provider": provider,

View File

@@ -1,6 +1,7 @@
import yaml
from pkgmgr.core.config.load import load_config
def show_config(selected_repos, user_config_path, full_config=False):
"""Display configuration for one or more repositories, or the entire merged config."""
if full_config:
@@ -8,8 +9,10 @@ def show_config(selected_repos, user_config_path, full_config=False):
print(yaml.dump(merged, default_flow_style=False))
else:
for repo in selected_repos:
identifier = f'{repo.get("provider")}/{repo.get("account")}/{repo.get("repository")}'
identifier = (
f"{repo.get('provider')}/{repo.get('account')}/{repo.get('repository')}"
)
print(f"Repository: {identifier}")
for key, value in repo.items():
print(f" {key}: {value}")
print("-" * 40)
print("-" * 40)

View File

@@ -66,10 +66,7 @@ def _ensure_repo_dir(
repo_dir = get_repo_dir(repositories_base_dir, repo)
if not os.path.exists(repo_dir):
print(
f"Repository directory '{repo_dir}' does not exist. "
"Cloning it now..."
)
print(f"Repository directory '{repo_dir}' does not exist. Cloning it now...")
clone_repos(
[repo],
repositories_base_dir,
@@ -79,10 +76,7 @@ def _ensure_repo_dir(
clone_mode,
)
if not os.path.exists(repo_dir):
print(
f"Cloning failed for repository {identifier}. "
"Skipping installation."
)
print(f"Cloning failed for repository {identifier}. Skipping installation.")
return None
return repo_dir
@@ -115,7 +109,9 @@ def _verify_repo(
if silent:
# Non-interactive mode: continue with a warning.
print(f"[Warning] Continuing despite verification failure for {identifier} (--silent).")
print(
f"[Warning] Continuing despite verification failure for {identifier} (--silent)."
)
else:
choice = input("Continue anyway? [y/N]: ").strip().lower()
if choice != "y":
@@ -232,12 +228,16 @@ def install_repos(
code = exc.code if isinstance(exc.code, int) else str(exc.code)
failures.append((identifier, f"installer failed (exit={code})"))
if not quiet:
print(f"[Warning] install: repository {identifier} failed (exit={code}). Continuing...")
print(
f"[Warning] install: repository {identifier} failed (exit={code}). Continuing..."
)
continue
except Exception as exc:
failures.append((identifier, f"unexpected error: {exc}"))
if not quiet:
print(f"[Warning] install: repository {identifier} hit an unexpected error: {exc}. Continuing...")
print(
f"[Warning] install: repository {identifier} hit an unexpected error: {exc}. Continuing..."
)
continue
if failures and emit_summary and not quiet:

View File

@@ -14,6 +14,10 @@ from pkgmgr.actions.install.installers.python import PythonInstaller # noqa: F4
from pkgmgr.actions.install.installers.makefile import MakefileInstaller # noqa: F401
# OS-specific installers
from pkgmgr.actions.install.installers.os_packages.arch_pkgbuild import ArchPkgbuildInstaller # noqa: F401
from pkgmgr.actions.install.installers.os_packages.debian_control import DebianControlInstaller # noqa: F401
from pkgmgr.actions.install.installers.os_packages.arch_pkgbuild import (
ArchPkgbuildInstaller as ArchPkgbuildInstaller,
) # noqa: F401
from pkgmgr.actions.install.installers.os_packages.debian_control import (
DebianControlInstaller as DebianControlInstaller,
) # noqa: F401
from pkgmgr.actions.install.installers.os_packages.rpm_spec import RpmSpecInstaller # noqa: F401

View File

@@ -41,7 +41,9 @@ class BaseInstaller(ABC):
return caps
for matcher in CAPABILITY_MATCHERS:
if matcher.applies_to_layer(self.layer) and matcher.is_provided(ctx, self.layer):
if matcher.applies_to_layer(self.layer) and matcher.is_provided(
ctx, self.layer
):
caps.add(matcher.name)
return caps

View File

@@ -16,7 +16,9 @@ class MakefileInstaller(BaseInstaller):
def supports(self, ctx: RepoContext) -> bool:
if os.environ.get("PKGMGR_DISABLE_MAKEFILE_INSTALLER") == "1":
if not ctx.quiet:
print("[INFO] PKGMGR_DISABLE_MAKEFILE_INSTALLER=1 skipping MakefileInstaller.")
print(
"[INFO] PKGMGR_DISABLE_MAKEFILE_INSTALLER=1 skipping MakefileInstaller."
)
return False
makefile_path = os.path.join(ctx.repo_dir, self.MAKEFILE_NAME)
@@ -46,7 +48,9 @@ class MakefileInstaller(BaseInstaller):
return
if not ctx.quiet:
print(f"[pkgmgr] Running make install for {ctx.identifier} (MakefileInstaller)")
print(
f"[pkgmgr] Running make install for {ctx.identifier} (MakefileInstaller)"
)
run_command("make install", cwd=ctx.repo_dir, preview=ctx.preview)

View File

@@ -57,7 +57,9 @@ class NixConflictResolver:
# 3) Fallback: output-name based lookup (also covers nix suggesting: `nix profile remove pkgmgr`)
if not tokens:
tokens = self._profile.find_remove_tokens_for_output(ctx, self._runner, output)
tokens = self._profile.find_remove_tokens_for_output(
ctx, self._runner, output
)
if tokens:
if not quiet:
@@ -94,7 +96,9 @@ class NixConflictResolver:
continue
if not quiet:
print("[nix] conflict detected but could not resolve profile entries to remove.")
print(
"[nix] conflict detected but could not resolve profile entries to remove."
)
return False
return False

View File

@@ -75,7 +75,9 @@ class NixFlakeInstaller(BaseInstaller):
# Core install path
# ---------------------------------------------------------------------
def _install_only(self, ctx: "RepoContext", output: str, allow_failure: bool) -> None:
def _install_only(
self, ctx: "RepoContext", output: str, allow_failure: bool
) -> None:
install_cmd = f"nix profile install {self._installable(ctx, output)}"
if not ctx.quiet:
@@ -96,7 +98,9 @@ class NixFlakeInstaller(BaseInstaller):
output=output,
):
if not ctx.quiet:
print(f"[nix] output '{output}' successfully installed after conflict cleanup.")
print(
f"[nix] output '{output}' successfully installed after conflict cleanup."
)
return
if not ctx.quiet:
@@ -107,20 +111,26 @@ class NixFlakeInstaller(BaseInstaller):
# If indices are supported, try legacy index-upgrade path.
if self._indices_supported is not False:
indices = self._profile.find_installed_indices_for_output(ctx, self._runner, output)
indices = self._profile.find_installed_indices_for_output(
ctx, self._runner, output
)
upgraded = False
for idx in indices:
if self._upgrade_index(ctx, idx):
upgraded = True
if not ctx.quiet:
print(f"[nix] output '{output}' successfully upgraded (index {idx}).")
print(
f"[nix] output '{output}' successfully upgraded (index {idx})."
)
if upgraded:
return
if indices and not ctx.quiet:
print(f"[nix] upgrade failed; removing indices {indices} and reinstalling '{output}'.")
print(
f"[nix] upgrade failed; removing indices {indices} and reinstalling '{output}'."
)
for idx in indices:
self._remove_index(ctx, idx)
@@ -139,7 +149,9 @@ class NixFlakeInstaller(BaseInstaller):
print(f"[nix] output '{output}' successfully re-installed.")
return
print(f"[ERROR] Failed to install Nix flake output '{output}' (exit {final.returncode})")
print(
f"[ERROR] Failed to install Nix flake output '{output}' (exit {final.returncode})"
)
if not allow_failure:
raise SystemExit(final.returncode)
@@ -149,7 +161,9 @@ class NixFlakeInstaller(BaseInstaller):
# force_update path
# ---------------------------------------------------------------------
def _force_upgrade_output(self, ctx: "RepoContext", output: str, allow_failure: bool) -> None:
def _force_upgrade_output(
self, ctx: "RepoContext", output: str, allow_failure: bool
) -> None:
# Prefer token path if indices unsupported (new nix)
if self._indices_supported is False:
self._remove_tokens_for_output(ctx, output)
@@ -158,14 +172,18 @@ class NixFlakeInstaller(BaseInstaller):
print(f"[nix] output '{output}' successfully upgraded.")
return
indices = self._profile.find_installed_indices_for_output(ctx, self._runner, output)
indices = self._profile.find_installed_indices_for_output(
ctx, self._runner, output
)
upgraded_any = False
for idx in indices:
if self._upgrade_index(ctx, idx):
upgraded_any = True
if not ctx.quiet:
print(f"[nix] output '{output}' successfully upgraded (index {idx}).")
print(
f"[nix] output '{output}' successfully upgraded (index {idx})."
)
if upgraded_any:
if not ctx.quiet:
@@ -173,7 +191,9 @@ class NixFlakeInstaller(BaseInstaller):
return
if indices and not ctx.quiet:
print(f"[nix] upgrade failed; removing indices {indices} and reinstalling '{output}'.")
print(
f"[nix] upgrade failed; removing indices {indices} and reinstalling '{output}'."
)
for idx in indices:
self._remove_index(ctx, idx)
@@ -223,7 +243,9 @@ class NixFlakeInstaller(BaseInstaller):
return
if not ctx.quiet:
print(f"[nix] indices unsupported; removing by token(s): {', '.join(tokens)}")
print(
f"[nix] indices unsupported; removing by token(s): {', '.join(tokens)}"
)
for t in tokens:
self._runner.run(ctx, f"nix profile remove {t}", allow_failure=True)

View File

@@ -101,7 +101,9 @@ class NixProfileInspector:
data = self.list_json(ctx, runner)
entries = normalize_elements(data)
tokens: List[str] = [out] # critical: matches nix's own suggestion for conflicts
tokens: List[str] = [
out
] # critical: matches nix's own suggestion for conflicts
for e in entries:
if entry_matches_output(e, out):

View File

@@ -48,7 +48,9 @@ class NixProfileListReader:
return uniq
def indices_matching_store_prefixes(self, ctx: "RepoContext", prefixes: List[str]) -> List[int]:
def indices_matching_store_prefixes(
self, ctx: "RepoContext", prefixes: List[str]
) -> List[int]:
prefixes = [self._store_prefix(p) for p in prefixes if p]
prefixes = [p for p in prefixes if p]
if not prefixes:

View File

@@ -11,6 +11,7 @@ if TYPE_CHECKING:
from pkgmgr.actions.install.context import RepoContext
from .runner import CommandRunner
@dataclass(frozen=True)
class RetryPolicy:
max_attempts: int = 7
@@ -35,13 +36,19 @@ class GitHubRateLimitRetry:
install_cmd: str,
) -> RunResult:
quiet = bool(getattr(ctx, "quiet", False))
delays = list(self._fibonacci_backoff(self._policy.base_delay_seconds, self._policy.max_attempts))
delays = list(
self._fibonacci_backoff(
self._policy.base_delay_seconds, self._policy.max_attempts
)
)
last: RunResult | None = None
for attempt, base_delay in enumerate(delays, start=1):
if not quiet:
print(f"[nix] attempt {attempt}/{self._policy.max_attempts}: {install_cmd}")
print(
f"[nix] attempt {attempt}/{self._policy.max_attempts}: {install_cmd}"
)
res = runner.run(ctx, install_cmd, allow_failure=True)
last = res
@@ -56,7 +63,9 @@ class GitHubRateLimitRetry:
if attempt >= self._policy.max_attempts:
break
jitter = random.randint(self._policy.jitter_seconds_min, self._policy.jitter_seconds_max)
jitter = random.randint(
self._policy.jitter_seconds_min, self._policy.jitter_seconds_max
)
wait_time = base_delay + jitter
if not quiet:
@@ -67,7 +76,11 @@ class GitHubRateLimitRetry:
time.sleep(wait_time)
return last if last is not None else RunResult(returncode=1, stdout="", stderr="nix install retry failed")
return (
last
if last is not None
else RunResult(returncode=1, stdout="", stderr="nix install retry failed")
)
@staticmethod
def _is_github_rate_limit_error(text: str) -> bool:

View File

@@ -9,6 +9,7 @@ from .types import RunResult
if TYPE_CHECKING:
from pkgmgr.actions.install.context import RepoContext
class CommandRunner:
"""
Executes commands (shell=True) inside a repository directory (if provided).
@@ -40,7 +41,9 @@ class CommandRunner:
raise
return RunResult(returncode=1, stdout="", stderr=str(e))
res = RunResult(returncode=p.returncode, stdout=p.stdout or "", stderr=p.stderr or "")
res = RunResult(
returncode=p.returncode, stdout=p.stdout or "", stderr=p.stderr or ""
)
if res.returncode != 0 and not quiet:
self._print_compact_failure(res)

View File

@@ -20,7 +20,9 @@ class NixConflictTextParser:
tokens: List[str] = []
for m in pat.finditer(text or ""):
t = (m.group(1) or "").strip()
if (t.startswith("'") and t.endswith("'")) or (t.startswith('"') and t.endswith('"')):
if (t.startswith("'") and t.endswith("'")) or (
t.startswith('"') and t.endswith('"')
):
t = t[1:-1]
if t:
tokens.append(t)

View File

@@ -14,7 +14,9 @@ class PythonInstaller(BaseInstaller):
def supports(self, ctx: RepoContext) -> bool:
if os.environ.get("PKGMGR_DISABLE_PYTHON_INSTALLER") == "1":
print("[INFO] PythonInstaller disabled via PKGMGR_DISABLE_PYTHON_INSTALLER.")
print(
"[INFO] PythonInstaller disabled via PKGMGR_DISABLE_PYTHON_INSTALLER."
)
return False
return os.path.exists(os.path.join(ctx.repo_dir, "pyproject.toml"))

View File

@@ -132,7 +132,11 @@ class InstallationPipeline:
continue
if not quiet:
if ctx.force_update and state.layer is not None and installer_layer == state.layer:
if (
ctx.force_update
and state.layer is not None
and installer_layer == state.layer
):
print(
f"[pkgmgr] Running installer {installer.__class__.__name__} "
f"for {identifier} in '{repo_dir}' (upgrade requested)..."

View File

@@ -3,7 +3,7 @@ from __future__ import annotations
import os
from typing import Optional, Set
from pkgmgr.core.git.errors import GitError
from pkgmgr.core.git.errors import GitRunError
from pkgmgr.core.git.commands import (
GitAddRemoteError,
GitAddRemotePushUrlError,
@@ -90,7 +90,7 @@ def determine_primary_remote_url(
def has_origin_remote(repo_dir: str) -> bool:
try:
return "origin" in list_remotes(cwd=repo_dir)
except GitError:
except GitRunError:
return False
@@ -122,7 +122,7 @@ def _ensure_additional_push_urls(
try:
existing = get_remote_push_urls("origin", cwd=repo_dir)
except GitError:
except GitRunError:
existing = set()
for url in sorted(desired - existing):

View File

@@ -1,8 +1,9 @@
from __future__ import annotations
import os
from collections.abc import Iterable, Mapping
from typing import Union
from urllib.parse import urlparse
from typing import Mapping
from .types import MirrorMap, Repository
@@ -32,7 +33,7 @@ def read_mirrors_file(repo_dir: str, filename: str = "MIRRORS") -> MirrorMap:
"""
Supports:
NAME URL
URL auto name = hostname
URL -> auto-generate name from hostname
"""
path = os.path.join(repo_dir, filename)
mirrors: MirrorMap = {}
@@ -52,7 +53,8 @@ def read_mirrors_file(repo_dir: str, filename: str = "MIRRORS") -> MirrorMap:
# Case 1: "name url"
if len(parts) == 2:
name, url = parts
# Case 2: "url" → auto-generate name
# Case 2: "url" -> auto name
elif len(parts) == 1:
url = parts[0]
parsed = urlparse(url)
@@ -67,21 +69,56 @@ def read_mirrors_file(repo_dir: str, filename: str = "MIRRORS") -> MirrorMap:
continue
mirrors[name] = url
except OSError as exc:
print(f"[WARN] Could not read MIRRORS file at {path}: {exc}")
return mirrors
MirrorsInput = Union[Mapping[str, str], Iterable[str]]
def write_mirrors_file(
repo_dir: str,
mirrors: Mapping[str, str],
mirrors: MirrorsInput,
filename: str = "MIRRORS",
preview: bool = False,
) -> None:
"""
Write MIRRORS in one of two formats:
1) Mapping[str, str] -> "NAME URL" per line (legacy / compatible)
2) Iterable[str] -> "URL" per line (new preferred)
Strings are treated as a single URL (not iterated character-by-character).
"""
path = os.path.join(repo_dir, filename)
lines = [f"{name} {url}" for name, url in sorted(mirrors.items())]
lines: list[str]
if isinstance(mirrors, Mapping):
items = [
(str(name), str(url))
for name, url in mirrors.items()
if url is not None and str(url).strip()
]
items.sort(key=lambda x: (x[0], x[1]))
lines = [f"{name} {url}" for name, url in items]
else:
if isinstance(mirrors, (str, bytes)):
urls = [str(mirrors).strip()]
else:
urls = [
str(url).strip()
for url in mirrors
if url is not None and str(url).strip()
]
urls = sorted(set(urls))
lines = urls
content = "\n".join(lines) + ("\n" if lines else "")
if preview:
@@ -94,5 +131,6 @@ def write_mirrors_file(
with open(path, "w", encoding="utf-8") as fh:
fh.write(content)
print(f"[INFO] Wrote MIRRORS file at {path}")
except OSError as exc:
print(f"[ERROR] Failed to write MIRRORS file at {path}: {exc}")

View File

@@ -16,6 +16,7 @@ from .types import MirrorMap, Repository
# Helpers
# -----------------------------------------------------------------------------
def _repo_key(repo: Repository) -> Tuple[str, str, str]:
"""
Normalised key for identifying a repository in config files.
@@ -47,6 +48,7 @@ def _load_user_config(path: str) -> Dict[str, object]:
# Main merge command
# -----------------------------------------------------------------------------
def merge_mirrors(
selected_repos: List[Repository],
repositories_base_dir: str,

View File

@@ -66,7 +66,9 @@ def _setup_remote_mirrors_for_repo(
# Probe only git URLs (do not try ls-remote against PyPI etc.)
# If there are no mirrors at all, probe the primary git URL.
git_mirrors = {k: v for k, v in ctx.resolved_mirrors.items() if _is_git_remote_url(v)}
git_mirrors = {
k: v for k, v in ctx.resolved_mirrors.items() if _is_git_remote_url(v)
}
if not git_mirrors:
primary = determine_primary_remote_url(repo, ctx)

View File

@@ -17,7 +17,7 @@ def hostport_from_git_url(url: str) -> Tuple[str, Optional[str]]:
netloc = netloc.split("@", 1)[1]
if netloc.startswith("[") and "]" in netloc:
host = netloc[1:netloc.index("]")]
host = netloc[1 : netloc.index("]")]
rest = netloc[netloc.index("]") + 1 :]
port = rest[1:] if rest.startswith(":") else None
return host.strip(), (port.strip() if port else None)
@@ -43,7 +43,7 @@ def normalize_provider_host(host: str) -> str:
return ""
if host.startswith("[") and "]" in host:
host = host[1:host.index("]")]
host = host[1 : host.index("]")]
if ":" in host and host.count(":") == 1:
host = host.rsplit(":", 1)[0]

View File

@@ -4,7 +4,16 @@ from pkgmgr.core.repository.dir import get_repo_dir
from pkgmgr.core.command.run import run_command
import sys
def exec_proxy_command(proxy_prefix: str, selected_repos, repositories_base_dir, all_repos, proxy_command: str, extra_args, preview: bool):
def exec_proxy_command(
proxy_prefix: str,
selected_repos,
repositories_base_dir,
all_repos,
proxy_command: str,
extra_args,
preview: bool,
):
"""Execute a given proxy command with extra arguments for each repository."""
error_repos = []
max_exit_code = 0
@@ -22,7 +31,9 @@ def exec_proxy_command(proxy_prefix: str, selected_repos, repositories_base_dir,
try:
run_command(full_cmd, cwd=repo_dir, preview=preview)
except SystemExit as e:
print(f"[ERROR] Command failed in {repo_identifier} with exit code {e.code}.")
print(
f"[ERROR] Command failed in {repo_identifier} with exit code {e.code}."
)
error_repos.append((repo_identifier, e.code))
max_exit_code = max(max_exit_code, e.code)
@@ -30,4 +41,4 @@ def exec_proxy_command(proxy_prefix: str, selected_repos, repositories_base_dir,
print("\nSummary of failed commands:")
for repo_identifier, exit_code in error_repos:
print(f"- {repo_identifier} failed with exit code {exit_code}")
sys.exit(max_exit_code)
sys.exit(max_exit_code)

View File

@@ -1,519 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
File and metadata update helpers for the release workflow.
Responsibilities:
- Update pyproject.toml with the new version.
- Update flake.nix, PKGBUILD, RPM spec files where present.
- Prepend release entries to CHANGELOG.md.
- Maintain distribution-specific changelog files:
* debian/changelog
* RPM spec %changelog section
including maintainer metadata where applicable.
"""
from __future__ import annotations
import os
import re
import subprocess
import sys
import tempfile
from datetime import date, datetime
from typing import Optional, Tuple
from pkgmgr.core.git.queries import get_config_value
# ---------------------------------------------------------------------------
# Editor helper for interactive changelog messages
# ---------------------------------------------------------------------------
def _open_editor_for_changelog(initial_message: Optional[str] = None) -> str:
"""
Open $EDITOR (fallback 'nano') so the user can enter a changelog message.
The temporary file is pre-filled with commented instructions and an
optional initial_message. Lines starting with '#' are ignored when the
message is read back.
Returns the final message (may be empty string if user leaves it blank).
"""
editor = os.environ.get("EDITOR", "nano")
with tempfile.NamedTemporaryFile(
mode="w+",
delete=False,
encoding="utf-8",
) as tmp:
tmp_path = tmp.name
tmp.write(
"# Write the changelog entry for this release.\n"
"# Lines starting with '#' will be ignored.\n"
"# Empty result will fall back to a generic message.\n\n"
)
if initial_message:
tmp.write(initial_message.strip() + "\n")
tmp.flush()
try:
subprocess.call([editor, tmp_path])
except FileNotFoundError:
print(
f"[WARN] Editor {editor!r} not found; proceeding without "
"interactive changelog message."
)
try:
with open(tmp_path, "r", encoding="utf-8") as f:
content = f.read()
finally:
try:
os.remove(tmp_path)
except OSError:
pass
lines = [line for line in content.splitlines() if not line.strip().startswith("#")]
return "\n".join(lines).strip()
# ---------------------------------------------------------------------------
# File update helpers (pyproject + extra packaging + changelog)
# ---------------------------------------------------------------------------
def update_pyproject_version(
pyproject_path: str,
new_version: str,
preview: bool = False,
) -> None:
"""
Update the version in pyproject.toml with the new version.
The function looks for a line matching:
version = "X.Y.Z"
and replaces the version part with the given new_version string.
If the file does not exist, it is skipped without failing the release.
"""
if not os.path.exists(pyproject_path):
print(
f"[INFO] pyproject.toml not found at: {pyproject_path}, "
"skipping version update."
)
return
try:
with open(pyproject_path, "r", encoding="utf-8") as f:
content = f.read()
except OSError as exc:
print(
f"[WARN] Could not read pyproject.toml at {pyproject_path}: {exc}. "
"Skipping version update."
)
return
pattern = r'^(version\s*=\s*")([^"]+)(")'
new_content, count = re.subn(
pattern,
lambda m: f'{m.group(1)}{new_version}{m.group(3)}',
content,
flags=re.MULTILINE,
)
if count == 0:
print("[ERROR] Could not find version line in pyproject.toml")
sys.exit(1)
if preview:
print(f"[PREVIEW] Would update pyproject.toml version to {new_version}")
return
with open(pyproject_path, "w", encoding="utf-8") as f:
f.write(new_content)
print(f"Updated pyproject.toml version to {new_version}")
def update_flake_version(
flake_path: str,
new_version: str,
preview: bool = False,
) -> None:
"""
Update the version in flake.nix, if present.
"""
if not os.path.exists(flake_path):
print("[INFO] flake.nix not found, skipping.")
return
try:
with open(flake_path, "r", encoding="utf-8") as f:
content = f.read()
except Exception as exc:
print(f"[WARN] Could not read flake.nix: {exc}")
return
pattern = r'(version\s*=\s*")([^"]+)(")'
new_content, count = re.subn(
pattern,
lambda m: f'{m.group(1)}{new_version}{m.group(3)}',
content,
)
if count == 0:
print("[WARN] No version assignment found in flake.nix, skipping.")
return
if preview:
print(f"[PREVIEW] Would update flake.nix version to {new_version}")
return
with open(flake_path, "w", encoding="utf-8") as f:
f.write(new_content)
print(f"Updated flake.nix version to {new_version}")
def update_pkgbuild_version(
pkgbuild_path: str,
new_version: str,
preview: bool = False,
) -> None:
"""
Update the version in PKGBUILD, if present.
Expects:
pkgver=1.2.3
pkgrel=1
"""
if not os.path.exists(pkgbuild_path):
print("[INFO] PKGBUILD not found, skipping.")
return
try:
with open(pkgbuild_path, "r", encoding="utf-8") as f:
content = f.read()
except Exception as exc:
print(f"[WARN] Could not read PKGBUILD: {exc}")
return
ver_pattern = r"^(pkgver\s*=\s*)(.+)$"
new_content, ver_count = re.subn(
ver_pattern,
lambda m: f"{m.group(1)}{new_version}",
content,
flags=re.MULTILINE,
)
if ver_count == 0:
print("[WARN] No pkgver line found in PKGBUILD.")
new_content = content
rel_pattern = r"^(pkgrel\s*=\s*)(.+)$"
new_content, rel_count = re.subn(
rel_pattern,
lambda m: f"{m.group(1)}1",
new_content,
flags=re.MULTILINE,
)
if rel_count == 0:
print("[WARN] No pkgrel line found in PKGBUILD.")
if preview:
print(f"[PREVIEW] Would update PKGBUILD to pkgver={new_version}, pkgrel=1")
return
with open(pkgbuild_path, "w", encoding="utf-8") as f:
f.write(new_content)
print(f"Updated PKGBUILD to pkgver={new_version}, pkgrel=1")
def update_spec_version(
spec_path: str,
new_version: str,
preview: bool = False,
) -> None:
"""
Update the version in an RPM spec file, if present.
"""
if not os.path.exists(spec_path):
print("[INFO] RPM spec file not found, skipping.")
return
try:
with open(spec_path, "r", encoding="utf-8") as f:
content = f.read()
except Exception as exc:
print(f"[WARN] Could not read spec file: {exc}")
return
ver_pattern = r"^(Version:\s*)(.+)$"
new_content, ver_count = re.subn(
ver_pattern,
lambda m: f"{m.group(1)}{new_version}",
content,
flags=re.MULTILINE,
)
if ver_count == 0:
print("[WARN] No 'Version:' line found in spec file.")
rel_pattern = r"^(Release:\s*)(.+)$"
def _release_repl(m: re.Match[str]) -> str: # type: ignore[name-defined]
rest = m.group(2).strip()
match = re.match(r"^(\d+)(.*)$", rest)
if match:
suffix = match.group(2)
else:
suffix = ""
return f"{m.group(1)}1{suffix}"
new_content, rel_count = re.subn(
rel_pattern,
_release_repl,
new_content,
flags=re.MULTILINE,
)
if rel_count == 0:
print("[WARN] No 'Release:' line found in spec file.")
if preview:
print(
"[PREVIEW] Would update spec file "
f"{os.path.basename(spec_path)} to Version: {new_version}, Release: 1..."
)
return
with open(spec_path, "w", encoding="utf-8") as f:
f.write(new_content)
print(
f"Updated spec file {os.path.basename(spec_path)} "
f"to Version: {new_version}, Release: 1..."
)
def update_changelog(
changelog_path: str,
new_version: str,
message: Optional[str] = None,
preview: bool = False,
) -> str:
"""
Prepend a new release section to CHANGELOG.md with the new version,
current date, and a message.
"""
today = date.today().isoformat()
if message is None:
if preview:
message = "Automated release."
else:
print(
"\n[INFO] No release message provided, opening editor for "
"changelog entry...\n"
)
editor_message = _open_editor_for_changelog()
if not editor_message:
message = "Automated release."
else:
message = editor_message
header = f"## [{new_version}] - {today}\n"
header += f"\n* {message}\n\n"
if os.path.exists(changelog_path):
try:
with open(changelog_path, "r", encoding="utf-8") as f:
changelog = f.read()
except Exception as exc:
print(f"[WARN] Could not read existing CHANGELOG.md: {exc}")
changelog = ""
else:
changelog = ""
new_changelog = header + "\n" + changelog if changelog else header
print("\n================ CHANGELOG ENTRY ================")
print(header.rstrip())
print("=================================================\n")
if preview:
print(f"[PREVIEW] Would prepend new entry for {new_version} to CHANGELOG.md")
return message
with open(changelog_path, "w", encoding="utf-8") as f:
f.write(new_changelog)
print(f"Updated CHANGELOG.md with version {new_version}")
return message
# ---------------------------------------------------------------------------
# Debian changelog helpers (with Git config fallback for maintainer)
# ---------------------------------------------------------------------------
def _get_debian_author() -> Tuple[str, str]:
"""
Determine the maintainer name/email for debian/changelog entries.
"""
name = os.environ.get("DEBFULLNAME")
email = os.environ.get("DEBEMAIL")
if not name:
name = os.environ.get("GIT_AUTHOR_NAME")
if not email:
email = os.environ.get("GIT_AUTHOR_EMAIL")
if not name:
name = get_config_value("user.name")
if not email:
email = get_config_value("user.email")
if not name:
name = "Unknown Maintainer"
if not email:
email = "unknown@example.com"
return name, email
def update_debian_changelog(
debian_changelog_path: str,
package_name: str,
new_version: str,
message: Optional[str] = None,
preview: bool = False,
) -> None:
"""
Prepend a new entry to debian/changelog, if it exists.
"""
if not os.path.exists(debian_changelog_path):
print("[INFO] debian/changelog not found, skipping.")
return
debian_version = f"{new_version}-1"
now = datetime.now().astimezone()
date_str = now.strftime("%a, %d %b %Y %H:%M:%S %z")
author_name, author_email = _get_debian_author()
first_line = f"{package_name} ({debian_version}) unstable; urgency=medium"
body_line = message.strip() if message else f"Automated release {new_version}."
stanza = (
f"{first_line}\n\n"
f" * {body_line}\n\n"
f" -- {author_name} <{author_email}> {date_str}\n\n"
)
if preview:
print(
"[PREVIEW] Would prepend the following stanza to debian/changelog:\n"
f"{stanza}"
)
return
try:
with open(debian_changelog_path, "r", encoding="utf-8") as f:
existing = f.read()
except Exception as exc:
print(f"[WARN] Could not read debian/changelog: {exc}")
existing = ""
new_content = stanza + existing
with open(debian_changelog_path, "w", encoding="utf-8") as f:
f.write(new_content)
print(f"Updated debian/changelog with version {debian_version}")
# ---------------------------------------------------------------------------
# Fedora / RPM spec %changelog helper
# ---------------------------------------------------------------------------
def update_spec_changelog(
spec_path: str,
package_name: str,
new_version: str,
message: Optional[str] = None,
preview: bool = False,
) -> None:
"""
Prepend a new entry to the %changelog section of an RPM spec file,
if present.
Typical RPM-style entry:
* Tue Dec 09 2025 John Doe <john@example.com> - 0.5.1-1
- Your changelog message
"""
if not os.path.exists(spec_path):
print("[INFO] RPM spec file not found, skipping spec changelog update.")
return
try:
with open(spec_path, "r", encoding="utf-8") as f:
content = f.read()
except Exception as exc:
print(f"[WARN] Could not read spec file for changelog update: {exc}")
return
debian_version = f"{new_version}-1"
now = datetime.now().astimezone()
date_str = now.strftime("%a %b %d %Y")
# Reuse Debian maintainer discovery for author name/email.
author_name, author_email = _get_debian_author()
body_line = message.strip() if message else f"Automated release {new_version}."
stanza = (
f"* {date_str} {author_name} <{author_email}> - {debian_version}\n"
f"- {body_line}\n\n"
)
marker = "%changelog"
idx = content.find(marker)
if idx == -1:
# No %changelog section yet: append one at the end.
new_content = content.rstrip() + "\n\n%changelog\n" + stanza
else:
# Insert stanza right after the %changelog line.
before = content[: idx + len(marker)]
after = content[idx + len(marker) :]
new_content = before + "\n" + stanza + after.lstrip("\n")
if preview:
print(
"[PREVIEW] Would update RPM %changelog section with the following "
"stanza:\n"
f"{stanza}"
)
return
try:
with open(spec_path, "w", encoding="utf-8") as f:
f.write(new_content)
except Exception as exc:
print(f"[WARN] Failed to write updated spec changelog section: {exc}")
return
print(
f"Updated RPM %changelog section in {os.path.basename(spec_path)} "
f"for {package_name} {debian_version}"
)

View File

@@ -0,0 +1,35 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Backwards-compatible facade for the release file update helpers.
Implementations live in this package:
pkgmgr.actions.release.files.*
Keep this package stable so existing imports continue to work, e.g.:
from pkgmgr.actions.release.files import update_pyproject_version
"""
from __future__ import annotations
from .editor import _open_editor_for_changelog
from .pyproject import update_pyproject_version
from .flake import update_flake_version
from .pkgbuild import update_pkgbuild_version
from .rpm_spec import update_spec_version
from .changelog_md import update_changelog
from .debian import _get_debian_author, update_debian_changelog
from .rpm_changelog import update_spec_changelog
__all__ = [
"_open_editor_for_changelog",
"update_pyproject_version",
"update_flake_version",
"update_pkgbuild_version",
"update_spec_version",
"update_changelog",
"_get_debian_author",
"update_debian_changelog",
"update_spec_changelog",
]

View File

@@ -0,0 +1,62 @@
from __future__ import annotations
import os
from datetime import date
from typing import Optional
from .editor import _open_editor_for_changelog
def update_changelog(
changelog_path: str,
new_version: str,
message: Optional[str] = None,
preview: bool = False,
) -> str:
"""
Prepend a new release section to CHANGELOG.md with the new version,
current date, and a message.
"""
today = date.today().isoformat()
if message is None:
if preview:
message = "Automated release."
else:
print(
"\n[INFO] No release message provided, opening editor for changelog entry...\n"
)
editor_message = _open_editor_for_changelog()
if not editor_message:
message = "Automated release."
else:
message = editor_message
header = f"## [{new_version}] - {today}\n"
header += f"\n* {message}\n\n"
if os.path.exists(changelog_path):
try:
with open(changelog_path, "r", encoding="utf-8") as f:
changelog = f.read()
except Exception as exc:
print(f"[WARN] Could not read existing CHANGELOG.md: {exc}")
changelog = ""
else:
changelog = ""
new_changelog = header + "\n" + changelog if changelog else header
print("\n================ CHANGELOG ENTRY ================")
print(header.rstrip())
print("=================================================\n")
if preview:
print(f"[PREVIEW] Would prepend new entry for {new_version} to CHANGELOG.md")
return message
with open(changelog_path, "w", encoding="utf-8") as f:
f.write(new_changelog)
print(f"Updated CHANGELOG.md with version {new_version}")
return message

View File

@@ -0,0 +1,74 @@
from __future__ import annotations
import os
from datetime import datetime
from typing import Optional, Tuple
from pkgmgr.core.git.queries import get_config_value
def _get_debian_author() -> Tuple[str, str]:
name = os.environ.get("DEBFULLNAME")
email = os.environ.get("DEBEMAIL")
if not name:
name = os.environ.get("GIT_AUTHOR_NAME")
if not email:
email = os.environ.get("GIT_AUTHOR_EMAIL")
if not name:
name = get_config_value("user.name")
if not email:
email = get_config_value("user.email")
if not name:
name = "Unknown Maintainer"
if not email:
email = "unknown@example.com"
return name, email
def update_debian_changelog(
debian_changelog_path: str,
package_name: str,
new_version: str,
message: Optional[str] = None,
preview: bool = False,
) -> None:
if not os.path.exists(debian_changelog_path):
print("[INFO] debian/changelog not found, skipping.")
return
debian_version = f"{new_version}-1"
now = datetime.now().astimezone()
date_str = now.strftime("%a, %d %b %Y %H:%M:%S %z")
author_name, author_email = _get_debian_author()
first_line = f"{package_name} ({debian_version}) unstable; urgency=medium"
body_line = message.strip() if message else f"Automated release {new_version}."
stanza = (
f"{first_line}\n\n"
f" * {body_line}\n\n"
f" -- {author_name} <{author_email}> {date_str}\n\n"
)
if preview:
print(
"[PREVIEW] Would prepend the following stanza to debian/changelog:\n"
f"{stanza}"
)
return
try:
with open(debian_changelog_path, "r", encoding="utf-8") as f:
existing = f.read()
except Exception as exc:
print(f"[WARN] Could not read debian/changelog: {exc}")
existing = ""
with open(debian_changelog_path, "w", encoding="utf-8") as f:
f.write(stanza + existing)
print(f"Updated debian/changelog with version {debian_version}")

View File

@@ -0,0 +1,45 @@
from __future__ import annotations
import os
import subprocess
import tempfile
from typing import Optional
def _open_editor_for_changelog(initial_message: Optional[str] = None) -> str:
editor = os.environ.get("EDITOR", "nano")
with tempfile.NamedTemporaryFile(
mode="w+",
delete=False,
encoding="utf-8",
) as tmp:
tmp_path = tmp.name
tmp.write(
"# Write the changelog entry for this release.\n"
"# Lines starting with '#' will be ignored.\n"
"# Empty result will fall back to a generic message.\n\n"
)
if initial_message:
tmp.write(initial_message.strip() + "\n")
tmp.flush()
try:
subprocess.call([editor, tmp_path])
except FileNotFoundError:
print(
f"[WARN] Editor {editor!r} not found; proceeding without "
"interactive changelog message."
)
try:
with open(tmp_path, "r", encoding="utf-8") as f:
content = f.read()
finally:
try:
os.remove(tmp_path)
except OSError:
pass
lines = [line for line in content.splitlines() if not line.strip().startswith("#")]
return "\n".join(lines).strip()

View File

@@ -0,0 +1,39 @@
from __future__ import annotations
import os
import re
def update_flake_version(
flake_path: str, new_version: str, preview: bool = False
) -> None:
if not os.path.exists(flake_path):
print("[INFO] flake.nix not found, skipping.")
return
try:
with open(flake_path, "r", encoding="utf-8") as f:
content = f.read()
except Exception as exc:
print(f"[WARN] Could not read flake.nix: {exc}")
return
pattern = r'(version\s*=\s*")([^"]+)(")'
new_content, count = re.subn(
pattern,
lambda m: f"{m.group(1)}{new_version}{m.group(3)}",
content,
)
if count == 0:
print("[WARN] No version found in flake.nix.")
return
if preview:
print(f"[PREVIEW] Would update flake.nix version to {new_version}")
return
with open(flake_path, "w", encoding="utf-8") as f:
f.write(new_content)
print(f"Updated flake.nix version to {new_version}")

View File

@@ -0,0 +1,41 @@
from __future__ import annotations
import os
import re
def update_pkgbuild_version(
pkgbuild_path: str, new_version: str, preview: bool = False
) -> None:
if not os.path.exists(pkgbuild_path):
print("[INFO] PKGBUILD not found, skipping.")
return
try:
with open(pkgbuild_path, "r", encoding="utf-8") as f:
content = f.read()
except Exception as exc:
print(f"[WARN] Could not read PKGBUILD: {exc}")
return
content, _ = re.subn(
r"^(pkgver\s*=\s*)(.+)$",
lambda m: f"{m.group(1)}{new_version}",
content,
flags=re.MULTILINE,
)
content, _ = re.subn(
r"^(pkgrel\s*=\s*)(.+)$",
lambda m: f"{m.group(1)}1",
content,
flags=re.MULTILINE,
)
if preview:
print(f"[PREVIEW] Would update PKGBUILD to pkgver={new_version}, pkgrel=1")
return
with open(pkgbuild_path, "w", encoding="utf-8") as f:
f.write(content)
print(f"Updated PKGBUILD to pkgver={new_version}, pkgrel=1")

View File

@@ -0,0 +1,45 @@
from __future__ import annotations
import os
import re
def update_pyproject_version(
pyproject_path: str, new_version: str, preview: bool = False
) -> None:
if not os.path.exists(pyproject_path):
print(f"[INFO] pyproject.toml not found at: {pyproject_path}, skipping.")
return
try:
with open(pyproject_path, "r", encoding="utf-8") as f:
content = f.read()
except OSError as exc:
print(f"[WARN] Could not read pyproject.toml: {exc}")
return
m = re.search(r"(?ms)^\s*\[project\]\s*$.*?(?=^\s*\[|\Z)", content)
if not m:
raise RuntimeError("Missing [project] section in pyproject.toml")
project_block = m.group(0)
ver_pat = r'(?m)^(\s*version\s*=\s*")([^"]+)(")\s*$'
new_block, count = re.subn(
ver_pat,
lambda mm: f"{mm.group(1)}{new_version}{mm.group(3)}",
project_block,
)
if count == 0:
raise RuntimeError("Missing version key in [project] section")
new_content = content[: m.start()] + new_block + content[m.end() :]
if preview:
print(f"[PREVIEW] Would update pyproject.toml version to {new_version}")
return
with open(pyproject_path, "w", encoding="utf-8") as f:
f.write(new_content)
print(f"Updated pyproject.toml version to {new_version}")

View File

@@ -0,0 +1,67 @@
from __future__ import annotations
import os
from datetime import datetime
from typing import Optional
from .debian import _get_debian_author
def update_spec_changelog(
spec_path: str,
package_name: str,
new_version: str,
message: Optional[str] = None,
preview: bool = False,
) -> None:
if not os.path.exists(spec_path):
print("[INFO] RPM spec file not found, skipping spec changelog update.")
return
try:
with open(spec_path, "r", encoding="utf-8") as f:
content = f.read()
except Exception as exc:
print(f"[WARN] Could not read spec file for changelog update: {exc}")
return
debian_version = f"{new_version}-1"
now = datetime.now().astimezone()
date_str = now.strftime("%a %b %d %Y")
author_name, author_email = _get_debian_author()
body_line = message.strip() if message else f"Automated release {new_version}."
stanza = (
f"* {date_str} {author_name} <{author_email}> - {debian_version}\n"
f"- {body_line}\n\n"
)
marker = "%changelog"
idx = content.find(marker)
if idx == -1:
new_content = content.rstrip() + "\n\n%changelog\n" + stanza
else:
before = content[: idx + len(marker)]
after = content[idx + len(marker) :]
new_content = before + "\n" + stanza + after.lstrip("\n")
if preview:
print(
"[PREVIEW] Would update RPM %changelog section with the following stanza:\n"
f"{stanza}"
)
return
try:
with open(spec_path, "w", encoding="utf-8") as f:
f.write(new_content)
except Exception as exc:
print(f"[WARN] Failed to write updated spec changelog section: {exc}")
return
print(
f"Updated RPM %changelog section in {os.path.basename(spec_path)} "
f"for {package_name} {debian_version}"
)

View File

@@ -0,0 +1,66 @@
from __future__ import annotations
import os
import re
def update_spec_version(
spec_path: str, new_version: str, preview: bool = False
) -> None:
"""
Update the version in an RPM spec file, if present.
"""
if not os.path.exists(spec_path):
print("[INFO] RPM spec file not found, skipping.")
return
try:
with open(spec_path, "r", encoding="utf-8") as f:
content = f.read()
except Exception as exc:
print(f"[WARN] Could not read spec file: {exc}")
return
ver_pattern = r"^(Version:\s*)(.+)$"
new_content, ver_count = re.subn(
ver_pattern,
lambda m: f"{m.group(1)}{new_version}",
content,
flags=re.MULTILINE,
)
if ver_count == 0:
print("[WARN] No 'Version:' line found in spec file.")
rel_pattern = r"^(Release:\s*)(.+)$"
def _release_repl(m: re.Match[str]) -> str:
rest = m.group(2).strip()
match = re.match(r"^(\d+)(.*)$", rest)
suffix = match.group(2) if match else ""
return f"{m.group(1)}1{suffix}"
new_content, rel_count = re.subn(
rel_pattern,
_release_repl,
new_content,
flags=re.MULTILINE,
)
if rel_count == 0:
print("[WARN] No 'Release:' line found in spec file.")
if preview:
print(
"[PREVIEW] Would update spec file "
f"{os.path.basename(spec_path)} to Version: {new_version}, Release: 1..."
)
return
with open(spec_path, "w", encoding="utf-8") as f:
f.write(new_content)
print(
f"Updated spec file {os.path.basename(spec_path)} "
f"to Version: {new_version}, Release: 1..."
)

View File

@@ -80,7 +80,9 @@ def is_highest_version_tag(tag: str) -> bool:
return True
latest = max(parsed_all)
print(f"[INFO] Latest tag (parsed): v{'.'.join(map(str, latest))}, Current tag: {tag}")
print(
f"[INFO] Latest tag (parsed): v{'.'.join(map(str, latest))}, Current tag: {tag}"
)
return parsed_current >= latest
@@ -93,7 +95,9 @@ def update_latest_tag(new_tag: str, *, preview: bool = False) -> None:
- 'latest' is forced (floating tag), therefore the push uses --force.
"""
target_ref = f"{new_tag}^{{}}"
print(f"[INFO] Updating 'latest' tag to point at {new_tag} (commit {target_ref})...")
print(
f"[INFO] Updating 'latest' tag to point at {new_tag} (commit {target_ref})..."
)
tag_force_annotated(
name="latest",

View File

@@ -5,7 +5,7 @@ import sys
from typing import Optional
from pkgmgr.actions.branch import close_branch
from pkgmgr.core.git import GitError
from pkgmgr.core.git import GitRunError
from pkgmgr.core.git.commands import add, commit, push, tag_annotated
from pkgmgr.core.git.queries import get_current_branch
from pkgmgr.core.repository.paths import resolve_repo_paths
@@ -40,7 +40,7 @@ def _release_impl(
# Determine current branch early
try:
branch = get_current_branch() or "main"
except GitError:
except GitRunError:
branch = "main"
print(f"Releasing on branch: {branch}")
@@ -76,7 +76,9 @@ def _release_impl(
if paths.arch_pkgbuild:
update_pkgbuild_version(paths.arch_pkgbuild, new_ver_str, preview=preview)
else:
print("[INFO] No PKGBUILD found (packaging/arch/PKGBUILD or PKGBUILD). Skipping.")
print(
"[INFO] No PKGBUILD found (packaging/arch/PKGBUILD or PKGBUILD). Skipping."
)
if paths.rpm_spec:
update_spec_version(paths.rpm_spec, new_ver_str, preview=preview)
@@ -123,7 +125,9 @@ def _release_impl(
paths.rpm_spec,
paths.debian_changelog,
]
existing_files = [p for p in files_to_add if isinstance(p, str) and p and os.path.exists(p)]
existing_files = [
p for p in files_to_add if isinstance(p, str) and p and os.path.exists(p)
]
if preview:
add(existing_files, preview=True)
@@ -135,13 +139,17 @@ def _release_impl(
if is_highest_version_tag(new_tag):
update_latest_tag(new_tag, preview=True)
else:
print(f"[PREVIEW] Skipping 'latest' update (tag {new_tag} is not the highest).")
print(
f"[PREVIEW] Skipping 'latest' update (tag {new_tag} is not the highest)."
)
if close and branch not in ("main", "master"):
if force:
print(f"[PREVIEW] Would delete branch {branch} (forced).")
else:
print(f"[PREVIEW] Would ask whether to delete branch {branch} after release.")
print(
f"[PREVIEW] Would ask whether to delete branch {branch} after release."
)
return
add(existing_files, preview=False)
@@ -157,8 +165,10 @@ def _release_impl(
if is_highest_version_tag(new_tag):
update_latest_tag(new_tag, preview=False)
else:
print(f"[INFO] Skipping 'latest' update (tag {new_tag} is not the highest).")
except GitError as exc:
print(
f"[INFO] Skipping 'latest' update (tag {new_tag} is not the highest)."
)
except GitRunError as exc:
print(f"[WARN] Failed to update floating 'latest' tag for {new_tag}: {exc}")
print("'latest' tag was not updated.")
@@ -166,7 +176,9 @@ def _release_impl(
if close:
if branch in ("main", "master"):
print(f"[INFO] close=True but current branch is {branch}; skipping branch deletion.")
print(
f"[INFO] close=True but current branch is {branch}; skipping branch deletion."
)
return
if not should_delete_branch(force=force):

View File

@@ -55,7 +55,9 @@ def clone_repos(
clone_url = _build_clone_url(repo, clone_mode)
if not clone_url:
print(f"[WARNING] Cannot build clone URL for '{repo_identifier}'. Skipping.")
print(
f"[WARNING] Cannot build clone URL for '{repo_identifier}'. Skipping."
)
continue
shallow = clone_mode == "shallow"
@@ -84,7 +86,11 @@ def clone_repos(
continue
print(f"[WARNING] SSH clone failed for '{repo_identifier}': {exc}")
choice = input("Do you want to attempt HTTPS clone instead? (y/N): ").strip().lower()
choice = (
input("Do you want to attempt HTTPS clone instead? (y/N): ")
.strip()
.lower()
)
if choice != "y":
print(f"[INFO] HTTPS clone not attempted for '{repo_identifier}'.")
continue

View File

@@ -12,8 +12,8 @@ class MirrorBootstrapper:
"""
MIRRORS is the single source of truth.
We write defaults to MIRRORS and then call mirror setup which will
configure git remotes based on MIRRORS content (but only for git URLs).
Defaults are written to MIRRORS and mirror setup derives
git remotes exclusively from that file (git URLs only).
"""
def write_defaults(
@@ -25,10 +25,8 @@ class MirrorBootstrapper:
preview: bool,
) -> None:
mirrors = {
# preferred SSH url is supplied by CreateRepoPlanner.primary_remote
"origin": primary,
# metadata only: must NEVER be configured as a git remote
"pypi": f"https://pypi.org/project/{name}/",
primary,
f"https://pypi.org/project/{name}/",
}
write_mirrors_file(repo_dir, mirrors, preview=preview)
@@ -41,7 +39,8 @@ class MirrorBootstrapper:
preview: bool,
remote: bool,
) -> None:
# IMPORTANT: do NOT set repo["mirrors"] here.
# IMPORTANT:
# Do NOT set repo["mirrors"] here.
# MIRRORS file is the single source of truth.
setup_mirrors(
selected_repos=[repo],

View File

@@ -63,6 +63,4 @@ def _strip_git_suffix(name: str) -> str:
def _ensure_valid_repo_name(name: str) -> None:
if not _NAME_RE.fullmatch(name):
raise ValueError(
"Repository name must match: lowercase a-z, 0-9, '_' and '-'."
)
raise ValueError("Repository name must match: lowercase a-z, 0-9, '_' and '-'.")

View File

@@ -66,9 +66,7 @@ class TemplateRenderer:
for root, _, files in os.walk(self.templates_dir):
for fn in files:
if fn.endswith(".j2"):
rel = os.path.relpath(
os.path.join(root, fn), self.templates_dir
)
rel = os.path.relpath(os.path.join(root, fn), self.templates_dir)
print(f"[Preview] Would render template: {rel} -> {rel[:-3]}")
@staticmethod

View File

@@ -24,9 +24,13 @@ def deinstall_repos(
# Remove alias link/file (interactive)
if os.path.exists(alias_path):
confirm = input(
f"Are you sure you want to delete link '{alias_path}' for {repo_identifier}? [y/N]: "
).strip().lower()
confirm = (
input(
f"Are you sure you want to delete link '{alias_path}' for {repo_identifier}? [y/N]: "
)
.strip()
.lower()
)
if confirm == "y":
if preview:
print(f"[Preview] Would remove link '{alias_path}'.")

View File

@@ -3,22 +3,33 @@ import os
from pkgmgr.core.repository.identifier import get_repo_identifier
from pkgmgr.core.repository.dir import get_repo_dir
def delete_repos(selected_repos, repositories_base_dir, all_repos, preview=False):
for repo in selected_repos:
repo_identifier = get_repo_identifier(repo, all_repos)
repo_dir = get_repo_dir(repositories_base_dir, repo)
if os.path.exists(repo_dir):
confirm = input(f"Are you sure you want to delete directory '{repo_dir}' for {repo_identifier}? [y/N]: ").strip().lower()
confirm = (
input(
f"Are you sure you want to delete directory '{repo_dir}' for {repo_identifier}? [y/N]: "
)
.strip()
.lower()
)
if confirm == "y":
if preview:
print(f"[Preview] Would delete directory '{repo_dir}' for {repo_identifier}.")
print(
f"[Preview] Would delete directory '{repo_dir}' for {repo_identifier}."
)
else:
try:
shutil.rmtree(repo_dir)
print(f"Deleted repository directory '{repo_dir}' for {repo_identifier}.")
print(
f"Deleted repository directory '{repo_dir}' for {repo_identifier}."
)
except Exception as e:
print(f"Error deleting '{repo_dir}' for {repo_identifier}: {e}")
else:
print(f"Skipped deletion of '{repo_dir}' for {repo_identifier}.")
else:
print(f"Repository directory '{repo_dir}' not found for {repo_identifier}.")
print(f"Repository directory '{repo_dir}' not found for {repo_identifier}.")

View File

@@ -233,9 +233,7 @@ def list_repositories(
categories.append(str(repo["category"]))
yaml_tags: List[str] = list(map(str, repo.get("tags", [])))
display_tags: List[str] = sorted(
set(yaml_tags + list(map(str, extra_tags)))
)
display_tags: List[str] = sorted(set(yaml_tags + list(map(str, extra_tags))))
rows.append(
{
@@ -288,13 +286,7 @@ def list_repositories(
status_padded = status.ljust(status_width)
status_colored = _color_status(status_padded)
print(
f"{ident_col} "
f"{status_colored} "
f"{cat_col} "
f"{tag_col} "
f"{dir_col}"
)
print(f"{ident_col} {status_colored} {cat_col} {tag_col} {dir_col}")
# ------------------------------------------------------------------
# Detailed section (alias value red, same status coloring)

View File

@@ -55,12 +55,16 @@ class UpdateManager:
code = exc.code if isinstance(exc.code, int) else str(exc.code)
failures.append((identifier, f"pull failed (exit={code})"))
if not quiet:
print(f"[Warning] update: pull failed for {identifier} (exit={code}). Continuing...")
print(
f"[Warning] update: pull failed for {identifier} (exit={code}). Continuing..."
)
continue
except Exception as exc:
failures.append((identifier, f"pull failed: {exc}"))
if not quiet:
print(f"[Warning] update: pull failed for {identifier}: {exc}. Continuing...")
print(
f"[Warning] update: pull failed for {identifier}: {exc}. Continuing..."
)
continue
try:
@@ -82,12 +86,16 @@ class UpdateManager:
code = exc.code if isinstance(exc.code, int) else str(exc.code)
failures.append((identifier, f"install failed (exit={code})"))
if not quiet:
print(f"[Warning] update: install failed for {identifier} (exit={code}). Continuing...")
print(
f"[Warning] update: install failed for {identifier} (exit={code}). Continuing..."
)
continue
except Exception as exc:
failures.append((identifier, f"install failed: {exc}"))
if not quiet:
print(f"[Warning] update: install failed for {identifier}: {exc}. Continuing...")
print(
f"[Warning] update: install failed for {identifier}: {exc}. Continuing..."
)
continue
if failures and not quiet:

View File

@@ -31,6 +31,7 @@ class OSReleaseInfo:
"""
Minimal /etc/os-release representation for distro detection.
"""
id: str = ""
id_like: str = ""
pretty_name: str = ""
@@ -63,4 +64,6 @@ class OSReleaseInfo:
def is_fedora_family(self) -> bool:
ids = self.ids()
return bool(ids.intersection({"fedora", "rhel", "centos", "rocky", "almalinux"}))
return bool(
ids.intersection({"fedora", "rhel", "centos", "rocky", "almalinux"})
)

View File

@@ -58,7 +58,9 @@ class SystemUpdater:
run_command("sudo pacman -Syu --noconfirm", preview=preview)
return
print("[Warning] Cannot update Arch system: missing required tools (sudo/yay/pacman).")
print(
"[Warning] Cannot update Arch system: missing required tools (sudo/yay/pacman)."
)
def _update_debian(self, *, preview: bool) -> None:
from pkgmgr.core.command.run import run_command
@@ -67,7 +69,9 @@ class SystemUpdater:
apt_get = shutil.which("apt-get")
if not (sudo and apt_get):
print("[Warning] Cannot update Debian/Ubuntu system: missing required tools (sudo/apt-get).")
print(
"[Warning] Cannot update Debian/Ubuntu system: missing required tools (sudo/apt-get)."
)
return
env = "DEBIAN_FRONTEND=noninteractive"

View File

@@ -29,6 +29,7 @@ For details on any command, run:
\033[1mpkgmgr <command> --help\033[0m
"""
def main() -> None:
"""
Entry point for the pkgmgr CLI.
@@ -41,9 +42,7 @@ def main() -> None:
repositories_dir = os.path.expanduser(
directories.get("repositories", "~/Repositories")
)
binaries_dir = os.path.expanduser(
directories.get("binaries", "~/.local/bin")
)
binaries_dir = os.path.expanduser(directories.get("binaries", "~/.local/bin"))
# Ensure the merged config actually contains the resolved directories
config_merged.setdefault("directories", {})

View File

@@ -135,9 +135,7 @@ def handle_changelog(
target_tag=range_arg,
)
if cur_tag is None:
print(
f"[WARN] Tag {range_arg!r} not found or not a SemVer tag."
)
print(f"[WARN] Tag {range_arg!r} not found or not a SemVer tag.")
print("[INFO] Falling back to full history.")
from_ref = None
to_ref = None

View File

@@ -213,9 +213,7 @@ def handle_config(args, ctx: CLIContext) -> None:
)
if key == mod_key:
entry["ignore"] = args.set == "true"
print(
f"Set ignore for {key} to {entry['ignore']}"
)
print(f"Set ignore for {key} to {entry['ignore']}")
save_user_config(user_config, user_config_path)
return

View File

@@ -4,7 +4,12 @@ from __future__ import annotations
import sys
from typing import Any, Dict, List
from pkgmgr.actions.mirror import diff_mirrors, list_mirrors, merge_mirrors, setup_mirrors
from pkgmgr.actions.mirror import (
diff_mirrors,
list_mirrors,
merge_mirrors,
setup_mirrors,
)
from pkgmgr.cli.context import CLIContext
Repository = Dict[str, Any]
@@ -56,11 +61,15 @@ def handle_mirror_command(
preview = getattr(args, "preview", False)
if source == target:
print("[ERROR] For 'mirror merge', source and target must differ (config vs file).")
print(
"[ERROR] For 'mirror merge', source and target must differ (config vs file)."
)
sys.exit(2)
explicit_config_path = getattr(args, "config_path", None)
user_config_path = explicit_config_path or getattr(ctx, "user_config_path", None)
user_config_path = explicit_config_path or getattr(
ctx, "user_config_path", None
)
merge_mirrors(
selected_repos=selected,

View File

@@ -18,7 +18,9 @@ def handle_publish(args, ctx: CLIContext, selected: List[Repository]) -> None:
for repo in selected:
identifier = get_repo_identifier(repo, ctx.all_repositories)
repo_dir = repo.get("directory") or get_repo_dir(ctx.repositories_base_dir, repo)
repo_dir = repo.get("directory") or get_repo_dir(
ctx.repositories_base_dir, repo
)
if not os.path.isdir(repo_dir):
print(f"[WARN] Skipping {identifier}: directory missing.")

View File

@@ -36,9 +36,13 @@ def handle_release(
identifier = get_repo_identifier(repo, ctx.all_repositories)
try:
repo_dir = repo.get("directory") or get_repo_dir(ctx.repositories_base_dir, repo)
repo_dir = repo.get("directory") or get_repo_dir(
ctx.repositories_base_dir, repo
)
except Exception as exc:
print(f"[WARN] Skipping repository {identifier}: failed to resolve directory: {exc}")
print(
f"[WARN] Skipping repository {identifier}: failed to resolve directory: {exc}"
)
continue
if not os.path.isdir(repo_dir):

View File

@@ -32,9 +32,8 @@ def _resolve_repository_directory(repository: Repository, ctx: CLIContext) -> st
if repo_dir:
return repo_dir
base_dir = (
getattr(ctx, "repositories_base_dir", None)
or getattr(ctx, "repositories_dir", None)
base_dir = getattr(ctx, "repositories_base_dir", None) or getattr(
ctx, "repositories_dir", None
)
if not base_dir:
raise RuntimeError(

View File

@@ -1,115 +1,41 @@
from __future__ import annotations
from __future__ import annotations
import json
import os
from typing import Any, Dict, List
from pkgmgr .cli .context import CLIContext
from pkgmgr .core .command .run import run_command
from pkgmgr .core .repository .identifier import get_repo_identifier
from pkgmgr .core .repository .dir import get_repo_dir
from typing import Any, Dict, List
from pkgmgr.cli.context import CLIContext
from pkgmgr.cli.tools import open_vscode_workspace
from pkgmgr.cli.tools.paths import resolve_repository_path
from pkgmgr.core.command.run import run_command
Repository = Dict[str, Any]
def _resolve_repository_path(repository: Repository, ctx: CLIContext) -> str:
"""
Resolve the filesystem path for a repository.
Priority:
1. Use explicit keys if present (directory / path / workspace / workspace_dir).
2. Fallback to get_repo_dir(...) using the repositories base directory
from the CLI context.
"""
# 1) Explicit path-like keys on the repository object
for key in ("directory", "path", "workspace", "workspace_dir"):
value = repository.get(key)
if value:
return value
# 2) Fallback: compute from base dir + repository metadata
base_dir = (
getattr(ctx, "repositories_base_dir", None)
or getattr(ctx, "repositories_dir", None)
)
if not base_dir:
raise RuntimeError(
"Cannot resolve repositories base directory from context; "
"expected ctx.repositories_base_dir or ctx.repositories_dir."
)
return get_repo_dir(base_dir, repository)
def handle_tools_command(
args,
ctx: CLIContext,
selected: List[Repository],
) -> None:
# ------------------------------------------------------------------
# nautilus "explore" command
# ------------------------------------------------------------------
if args.command == "explore":
for repository in selected:
repo_path = _resolve_repository_path(repository, ctx)
run_command(
f'nautilus "{repo_path}" & disown'
)
return
repo_path = resolve_repository_path(repository, ctx)
run_command(f'nautilus "{repo_path}" & disown')
return
# ------------------------------------------------------------------
# GNOME terminal command
# ------------------------------------------------------------------
if args.command == "terminal":
for repository in selected:
repo_path = _resolve_repository_path(repository, ctx)
run_command(
f'gnome-terminal --tab --working-directory="{repo_path}"'
)
return
repo_path = resolve_repository_path(repository, ctx)
run_command(f'gnome-terminal --tab --working-directory="{repo_path}"')
return
# ------------------------------------------------------------------
# VS Code workspace command
# ------------------------------------------------------------------
if args.command == "code":
if not selected:
print("No repositories selected.")
return
identifiers = [
get_repo_identifier(repo, ctx.all_repositories)
for repo in selected
]
sorted_identifiers = sorted(identifiers)
workspace_name = "_".join(sorted_identifiers) + ".code-workspace"
directories_cfg = ctx.config_merged.get("directories") or {}
workspaces_dir = os.path.expanduser(
directories_cfg.get("workspaces", "~/Workspaces")
)
os.makedirs(workspaces_dir, exist_ok=True)
workspace_file = os.path.join(workspaces_dir, workspace_name)
folders = [
{"path": _resolve_repository_path(repository, ctx)}
for repository in selected
]
workspace_data = {
"folders": folders,
"settings": {},
}
if not os.path.exists(workspace_file):
with open(workspace_file, "w", encoding="utf-8") as f:
json.dump(workspace_data, f, indent=4)
print(f"Created workspace file: {workspace_file}")
else:
print(f"Using existing workspace file: {workspace_file}")
run_command(f'code "{workspace_file}"')
open_vscode_workspace(ctx, selected)
return

View File

@@ -38,9 +38,9 @@ def _print_pkgmgr_self_version() -> None:
# Common distribution/module naming variants.
python_candidates = [
"package-manager", # PyPI dist name in your project
"package_manager", # module-ish variant
"pkgmgr", # console/alias-ish
"package-manager", # PyPI dist name in your project
"package_manager", # module-ish variant
"pkgmgr", # console/alias-ish
]
nix_candidates = [
"pkgmgr",

View File

@@ -33,8 +33,7 @@ def add_branch_subparsers(
"name",
nargs="?",
help=(
"Name of the new branch (optional; will be asked interactively "
"if omitted)"
"Name of the new branch (optional; will be asked interactively if omitted)"
),
)
branch_open.add_argument(
@@ -54,8 +53,7 @@ def add_branch_subparsers(
"name",
nargs="?",
help=(
"Name of the branch to close (optional; current branch is used "
"if omitted)"
"Name of the branch to close (optional; current branch is used if omitted)"
),
)
branch_close.add_argument(
@@ -84,8 +82,7 @@ def add_branch_subparsers(
"name",
nargs="?",
help=(
"Name of the branch to drop (optional; current branch is used "
"if omitted)"
"Name of the branch to drop (optional; current branch is used if omitted)"
),
)
branch_drop.add_argument(

View File

@@ -20,7 +20,9 @@ def add_mirror_subparsers(subparsers: argparse._SubParsersAction) -> None:
required=True,
)
mirror_list = mirror_subparsers.add_parser("list", help="List configured mirrors for repositories")
mirror_list = mirror_subparsers.add_parser(
"list", help="List configured mirrors for repositories"
)
add_identifier_arguments(mirror_list)
mirror_list.add_argument(
"--source",
@@ -29,15 +31,21 @@ def add_mirror_subparsers(subparsers: argparse._SubParsersAction) -> None:
help="Which mirror source to show.",
)
mirror_diff = mirror_subparsers.add_parser("diff", help="Show differences between config mirrors and MIRRORS file")
mirror_diff = mirror_subparsers.add_parser(
"diff", help="Show differences between config mirrors and MIRRORS file"
)
add_identifier_arguments(mirror_diff)
mirror_merge = mirror_subparsers.add_parser(
"merge",
help="Merge mirrors between config and MIRRORS file (example: pkgmgr mirror merge config file --all)",
)
mirror_merge.add_argument("source", choices=["config", "file"], help="Source of mirrors.")
mirror_merge.add_argument("target", choices=["config", "file"], help="Target of mirrors.")
mirror_merge.add_argument(
"source", choices=["config", "file"], help="Source of mirrors."
)
mirror_merge.add_argument(
"target", choices=["config", "file"], help="Target of mirrors."
)
add_identifier_arguments(mirror_merge)
mirror_merge.add_argument(
"--config-path",

View File

@@ -48,9 +48,6 @@ def add_navigation_subparsers(
"--command",
nargs=argparse.REMAINDER,
dest="shell_command",
help=(
"The shell command (and its arguments) to execute in each "
"repository"
),
help=("The shell command (and its arguments) to execute in each repository"),
default=[],
)

View File

@@ -53,10 +53,7 @@ def _add_proxy_identifier_arguments(parser: argparse.ArgumentParser) -> None:
parser.add_argument(
"identifiers",
nargs="*",
help=(
"Identifier(s) for repositories. "
"Default: Repository of current folder."
),
help=("Identifier(s) for repositories. Default: Repository of current folder."),
)
parser.add_argument(
"--all",
@@ -118,12 +115,7 @@ def _proxy_has_explicit_selection(args: argparse.Namespace) -> bool:
string_filter = getattr(args, "string", "") or ""
# Proxy commands currently do not support --tag, so it is not checked here.
return bool(
use_all
or identifiers
or categories
or string_filter
)
return bool(use_all or identifiers or categories or string_filter)
def _select_repo_for_current_directory(
@@ -204,9 +196,7 @@ def maybe_handle_proxy(args: argparse.Namespace, ctx: CLIContext) -> bool:
If the top-level command is one of the proxy subcommands
(git / docker / docker compose), handle it here and return True.
"""
all_proxy_subcommands = {
sub for subs in PROXY_COMMANDS.values() for sub in subs
}
all_proxy_subcommands = {sub for subs in PROXY_COMMANDS.values() for sub in subs}
if args.command not in all_proxy_subcommands:
return False

View File

@@ -0,0 +1,5 @@
from __future__ import annotations
from .vscode import open_vscode_workspace
__all__ = ["open_vscode_workspace"]

View File

@@ -0,0 +1,34 @@
from __future__ import annotations
from typing import Any, Dict
from pkgmgr.cli.context import CLIContext
from pkgmgr.core.repository.dir import get_repo_dir
Repository = Dict[str, Any]
def resolve_repository_path(repository: Repository, ctx: CLIContext) -> str:
"""
Resolve the filesystem path for a repository.
Priority:
1. Use explicit keys if present (directory / path / workspace / workspace_dir).
2. Fallback to get_repo_dir(...) using the repositories base directory
from the CLI context.
"""
for key in ("directory", "path", "workspace", "workspace_dir"):
value = repository.get(key)
if value:
return value
base_dir = getattr(ctx, "repositories_base_dir", None) or getattr(
ctx, "repositories_dir", None
)
if not base_dir:
raise RuntimeError(
"Cannot resolve repositories base directory from context; "
"expected ctx.repositories_base_dir or ctx.repositories_dir."
)
return get_repo_dir(base_dir, repository)

View File

@@ -0,0 +1,104 @@
from __future__ import annotations
import json
import os
import shutil
from typing import Any, Dict, List
from pkgmgr.cli.context import CLIContext
from pkgmgr.cli.tools.paths import resolve_repository_path
from pkgmgr.core.command.run import run_command
from pkgmgr.core.repository.identifier import get_repo_identifier
Repository = Dict[str, Any]
def _ensure_vscode_cli_available() -> None:
"""
Ensure that the VS Code CLI ('code') is available in PATH.
"""
if shutil.which("code") is None:
raise RuntimeError(
"VS Code CLI ('code') not found in PATH.\n\n"
"Hint:\n"
" Install Visual Studio Code and ensure the 'code' command is available.\n"
" VS Code → Command Palette → 'Shell Command: Install code command in PATH'\n"
)
def _ensure_identifiers_are_filename_safe(identifiers: List[str]) -> None:
"""
Ensure identifiers can be used in a filename.
If an identifier contains '/', it likely means the repository has not yet
been explicitly identified (no short identifier configured).
"""
invalid = [i for i in identifiers if "/" in i or os.sep in i]
if invalid:
raise RuntimeError(
"Cannot create VS Code workspace.\n\n"
"The following repositories are not yet identified "
"(identifier contains '/'): \n"
+ "\n".join(f" - {i}" for i in invalid)
+ "\n\n"
"Hint:\n"
" The repository has no short identifier yet.\n"
" Add an explicit identifier in your configuration before using `pkgmgr tools code`.\n"
)
def _resolve_workspaces_dir(ctx: CLIContext) -> str:
directories_cfg = ctx.config_merged.get("directories") or {}
return os.path.expanduser(directories_cfg.get("workspaces", "~/Workspaces"))
def _build_workspace_filename(identifiers: List[str]) -> str:
sorted_identifiers = sorted(identifiers)
return "_".join(sorted_identifiers) + ".code-workspace"
def _build_workspace_data(
selected: List[Repository], ctx: CLIContext
) -> Dict[str, Any]:
folders = [{"path": resolve_repository_path(repo, ctx)} for repo in selected]
return {
"folders": folders,
"settings": {},
}
def open_vscode_workspace(ctx: CLIContext, selected: List[Repository]) -> None:
"""
Create (if missing) and open a VS Code workspace for the selected repositories.
Policy:
- Fail with a clear error if VS Code CLI is missing.
- Fail with a clear error if any repository identifier contains '/', because that
indicates the repo has not been explicitly identified (no short identifier).
- Do NOT auto-sanitize identifiers and do NOT create subfolders under workspaces.
"""
if not selected:
print("No repositories selected.")
return
_ensure_vscode_cli_available()
identifiers = [get_repo_identifier(repo, ctx.all_repositories) for repo in selected]
_ensure_identifiers_are_filename_safe(identifiers)
workspaces_dir = _resolve_workspaces_dir(ctx)
os.makedirs(workspaces_dir, exist_ok=True)
workspace_name = _build_workspace_filename(identifiers)
workspace_file = os.path.join(workspaces_dir, workspace_name)
workspace_data = _build_workspace_data(selected, ctx)
if not os.path.exists(workspace_file):
with open(workspace_file, "w", encoding="utf-8") as f:
json.dump(workspace_data, f, indent=4)
print(f"Created workspace file: {workspace_file}")
else:
print(f"Using existing workspace file: {workspace_file}")
run_command(f'code "{workspace_file}"')

View File

@@ -2,10 +2,11 @@ import os
import hashlib
import re
def generate_alias(repo, bin_dir, existing_aliases):
"""
Generate an alias for a repository based on its repository name.
Steps:
1. Keep only consonants from the repository name (letters from BCDFGHJKLMNPQRSTVWXYZ).
2. Collapse consecutive identical consonants.
@@ -39,4 +40,4 @@ def generate_alias(repo, bin_dir, existing_aliases):
while conflict(candidate3):
candidate3 += "x"
candidate3 = candidate3[:12]
return candidate3
return candidate3

View File

@@ -98,8 +98,7 @@ def create_ink(
if alias_name == repo_identifier:
if not quiet:
print(
f"Alias '{alias_name}' equals identifier. "
"Skipping alias creation."
f"Alias '{alias_name}' equals identifier. Skipping alias creation."
)
return

View File

@@ -8,6 +8,7 @@ class CliLayer(str, Enum):
"""
CLI layer precedence (lower number = stronger layer).
"""
OS_PACKAGES = "os-packages"
NIX = "nix"
PYTHON = "python"

View File

@@ -34,11 +34,7 @@ def _nix_binary_candidates(home: str, names: List[str]) -> List[str]:
"""
Build possible Nix profile binary paths for a list of candidate names.
"""
return [
os.path.join(home, ".nix-profile", "bin", name)
for name in names
if name
]
return [os.path.join(home, ".nix-profile", "bin", name) for name in names if name]
def _path_binary_candidates(names: List[str]) -> List[str]:
@@ -148,7 +144,8 @@ def resolve_command_for_repo(
# c) Nix profile binaries
nix_binaries = [
path for path in _nix_binary_candidates(home, candidate_names)
path
for path in _nix_binary_candidates(home, candidate_names)
if _is_executable(path)
]
nix_binary = nix_binaries[0] if nix_binaries else None

View File

@@ -51,6 +51,7 @@ Repo = Dict[str, Any]
# Hilfsfunktionen
# ---------------------------------------------------------------------------
def _deep_merge(base: Dict[str, Any], override: Dict[str, Any]) -> Dict[str, Any]:
"""
Recursively merge two dictionaries.
@@ -58,11 +59,7 @@ def _deep_merge(base: Dict[str, Any], override: Dict[str, Any]) -> Dict[str, Any
Values from `override` win over values in `base`.
"""
for key, value in override.items():
if (
key in base
and isinstance(base[key], dict)
and isinstance(value, dict)
):
if key in base and isinstance(base[key], dict) and isinstance(value, dict):
_deep_merge(base[key], value)
else:
base[key] = value
@@ -93,9 +90,7 @@ def _merge_repo_lists(
- Wenn category_name gesetzt ist, wird dieser in
repo["category_files"] eingetragen.
"""
index: Dict[Tuple[str, str, str], Repo] = {
_repo_key(r): r for r in base_list
}
index: Dict[Tuple[str, str, str], Repo] = {_repo_key(r): r for r in base_list}
for src in new_list:
key = _repo_key(src)
@@ -233,10 +228,12 @@ def _load_defaults_from_package_or_project() -> Dict[str, Any]:
return {"directories": {}, "repositories": []}
# ---------------------------------------------------------------------------
# Hauptfunktion
# ---------------------------------------------------------------------------
def load_config(user_config_path: str) -> Dict[str, Any]:
"""
Load and merge configuration for pkgmgr.
@@ -289,8 +286,12 @@ def load_config(user_config_path: str) -> Dict[str, Any]:
# repositories
merged["repositories"] = []
_merge_repo_lists(merged["repositories"], defaults["repositories"], category_name=None)
_merge_repo_lists(merged["repositories"], user_cfg["repositories"], category_name=None)
_merge_repo_lists(
merged["repositories"], defaults["repositories"], category_name=None
)
_merge_repo_lists(
merged["repositories"], user_cfg["repositories"], category_name=None
)
# andere Top-Level-Keys (falls vorhanden)
other_keys = (set(defaults.keys()) | set(user_cfg.keys())) - {

View File

@@ -1,9 +1,10 @@
import yaml
import os
def save_user_config(user_config,USER_CONFIG_PATH:str):
def save_user_config(user_config, USER_CONFIG_PATH: str):
"""Save the user configuration to USER_CONFIG_PATH."""
os.makedirs(os.path.dirname(USER_CONFIG_PATH), exist_ok=True)
with open(USER_CONFIG_PATH, 'w') as f:
with open(USER_CONFIG_PATH, "w") as f:
yaml.dump(user_config, f)
print(f"User configuration updated in {USER_CONFIG_PATH}.")
print(f"User configuration updated in {USER_CONFIG_PATH}.")

View File

@@ -16,7 +16,9 @@ class EnvTokenProvider:
source_name: str = "env"
def get(self, request: TokenRequest) -> Optional[TokenResult]:
for key in env_var_candidates(request.provider_kind, request.host, request.owner):
for key in env_var_candidates(
request.provider_kind, request.host, request.owner
):
val = os.environ.get(key)
if val:
return TokenResult(token=val.strip(), source=self.source_name)

View File

@@ -15,6 +15,7 @@ class GhTokenProvider:
This does NOT persist anything; it only reads what `gh` already knows.
"""
source_name: str = "gh"
def get(self, request: TokenRequest) -> Optional[TokenResult]:

View File

@@ -21,9 +21,7 @@ def _import_keyring():
try:
import keyring # type: ignore
except Exception as exc: # noqa: BLE001
raise KeyringUnavailableError(
"python-keyring is not installed."
) from exc
raise KeyringUnavailableError("python-keyring is not installed.") from exc
# Some environments have keyring installed but no usable backend.
# We do a lightweight "backend sanity check" by attempting to read the backend.

View File

@@ -9,7 +9,12 @@ from .providers.env import EnvTokenProvider
from .providers.gh import GhTokenProvider
from .providers.keyring import KeyringTokenProvider
from .providers.prompt import PromptTokenProvider
from .types import KeyringUnavailableError, NoCredentialsError, TokenRequest, TokenResult
from .types import (
KeyringUnavailableError,
NoCredentialsError,
TokenRequest,
TokenResult,
)
from .validate import validate_token
@@ -55,7 +60,10 @@ class TokenResolver:
print(f" {msg}", file=sys.stderr)
print(" Tokens will NOT be persisted securely.", file=sys.stderr)
print("", file=sys.stderr)
print(" To enable secure token storage, install python-keyring:", file=sys.stderr)
print(
" To enable secure token storage, install python-keyring:",
file=sys.stderr,
)
print(" pip install keyring", file=sys.stderr)
print("", file=sys.stderr)
print(" Or install via system packages:", file=sys.stderr)

View File

@@ -13,7 +13,9 @@ class KeyringKey:
username: str
def build_keyring_key(provider_kind: str, host: str, owner: Optional[str]) -> KeyringKey:
def build_keyring_key(
provider_kind: str, host: str, owner: Optional[str]
) -> KeyringKey:
"""Build a stable keyring key.
- service: "pkgmgr:<provider>"
@@ -21,11 +23,15 @@ def build_keyring_key(provider_kind: str, host: str, owner: Optional[str]) -> Ke
"""
provider_kind = str(provider_kind).strip().lower()
host = str(host).strip()
owner_part = (str(owner).strip() if owner else "-")
return KeyringKey(service=f"pkgmgr:{provider_kind}", username=f"{host}|{owner_part}")
owner_part = str(owner).strip() if owner else "-"
return KeyringKey(
service=f"pkgmgr:{provider_kind}", username=f"{host}|{owner_part}"
)
def env_var_candidates(provider_kind: str, host: str, owner: Optional[str]) -> list[str]:
def env_var_candidates(
provider_kind: str, host: str, owner: Optional[str]
) -> list[str]:
"""Return a list of environment variable names to try.
Order is from most specific to most generic.
@@ -44,7 +50,7 @@ def env_var_candidates(provider_kind: str, host: str, owner: Optional[str]) -> l
candidates.append(f"PKGMGR_{kind}_TOKEN")
candidates.append(f"PKGMGR_TOKEN_{kind}")
candidates.append("PKGMGR_TOKEN")
return candidates

View File

@@ -1,6 +1,6 @@
from __future__ import annotations
from .errors import GitError
from .errors import GitRunError
from .run import run
"""
@@ -12,6 +12,6 @@ details of subprocess handling.
"""
__all__ = [
"GitError",
"GitRunError",
"run",
]

View File

@@ -16,7 +16,7 @@ from .fetch import GitFetchError, fetch
from .init import GitInitError, init
from .merge_no_ff import GitMergeError, merge_no_ff
from .pull import GitPullError, pull
from .pull_args import GitPullArgsError, pull_args # <-- add
from .pull_args import GitPullArgsError, pull_args
from .pull_ff_only import GitPullFfOnlyError, pull_ff_only
from .push import GitPushError, push
from .push_upstream import GitPushUpstreamError, push_upstream
@@ -30,7 +30,7 @@ __all__ = [
"fetch",
"checkout",
"pull",
"pull_args", # <-- add
"pull_args",
"pull_ff_only",
"merge_no_ff",
"push",
@@ -52,7 +52,7 @@ __all__ = [
"GitFetchError",
"GitCheckoutError",
"GitPullError",
"GitPullArgsError", # <-- add
"GitPullArgsError",
"GitPullFfOnlyError",
"GitMergeError",
"GitPushError",

Some files were not shown because too many files have changed in this diff Show More