Compare commits

...

37 Commits

Author SHA1 Message Date
Kevin Veen-Birkenbach
65903e740b Release version 1.7.0
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / linter-shell (push) Has been cancelled
Mark stable commit / linter-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2025-12-14 21:10:06 +01:00
Kevin Veen-Birkenbach
aa80a2ddb4 Added correct e2e test and pypi mirror
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / linter-shell (push) Has been cancelled
Mark stable commit / linter-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2025-12-14 21:08:23 +01:00
Kevin Veen-Birkenbach
9456ad4475 feat(publish): add PyPI publish workflow, CLI command, parser integration, and tests
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / linter-shell (push) Has been cancelled
Mark stable commit / linter-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
* Introduce publish action with PyPI target detection via MIRRORS
* Resolve version from SemVer git tags on HEAD
* Support preview mode and non-interactive CI usage
* Build and upload artifacts using build + twine with token resolution
* Add CLI wiring (dispatch, command handler, parser)
* Add E2E publish help tests for pkgmgr and nix run
* Add integration tests for publish preview and mirror handling
* Add unit tests for git tag parsing, PyPI URL parsing, workflow preview, and CLI handler
* Clean up dispatch and parser structure while integrating publish

https://chatgpt.com/share/693f0f00-af68-800f-8846-193dca69bd2e
2025-12-14 20:24:01 +01:00
Kevin Veen-Birkenbach
3d7d7e9c09 Release version 1.6.4
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / linter-shell (push) Has been cancelled
Mark stable commit / linter-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2025-12-14 19:33:07 +01:00
Kevin Veen-Birkenbach
328203ccd7 **test(nix): add comprehensive unittest coverage for nix installer helpers**
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / linter-shell (push) Has been cancelled
Mark stable commit / linter-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
* Add reusable fakes for runner and retry logic
* Cover conflict resolution paths (store-prefix, output-token, textual fallback)
* Add unit tests for profile parsing, normalization, matching, and text parsing
* Verify installer core behavior for success, mandatory failure, and optional failure
* Keep tests Nix-free using pure unittest + mocks

https://chatgpt.com/share/693efe80-d928-800f-98b7-0aaafee1d32a
2025-12-14 19:27:26 +01:00
Kevin Veen-Birkenbach
ac16378807 Deleted deprecated unit tests:
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / linter-shell (push) Has been cancelled
Mark stable commit / linter-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
https://chatgpt.com/share/693efe80-d928-800f-98b7-0aaafee1d32a
2025-12-14 19:14:42 +01:00
Kevin Veen-Birkenbach
f7a86bc353 fix(launcher): avoid calling missing retry helper in packaged installs
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / linter-shell (push) Has been cancelled
Mark stable commit / linter-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
- Load GitHub 403 retry helper only when available
- Fallback to plain `nix run` if retry function is not defined
- Prevent exit 127 when pkgmgr launcher is installed without retry script
- Fix E2E failure for `pkgmgr update pkgmgr --system`

https://chatgpt.com/share/693efd23-8b60-800f-adbb-9dfffc33f1f7
2025-12-14 19:08:32 +01:00
Kevin Veen-Birkenbach
06a6a77a48 *fix(nix): resolve nix profile conflicts without numeric indices and fix update pkgmgr system test*
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / linter-shell (push) Has been cancelled
Mark stable commit / linter-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
* Switch conflict handling from index-based removal to token-based removal (*nix profile remove <name>*) for newer nix versions
* Add robust parsing of *nix profile list --json* with normalization and heuristics for output/name matching
* Detect at runtime whether numeric profile indices are supported and fall back automatically when they are not
* Ensure *pkgmgr* / *package-manager* flake outputs are correctly identified and cleaned up during reinstall
* Fix failing E2E test *test_update_pkgmgr_shallow_pkgmgr_with_system* by reliably removing conflicting profile entries before reinstall

https://chatgpt.com/share/693efae5-b8bc-800f-94e3-28c93b74ed7b
2025-12-14 18:58:29 +01:00
Kevin Veen-Birkenbach
4883e40812 fix(ci): skip container publish when no version tag exists
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / linter-shell (push) Has been cancelled
Mark stable commit / linter-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
* Remove unsupported `fetch-tags` input from checkout step
* Detect missing `v*` tag on workflow_run SHA and exit successfully
* Gate Buildx, GHCR login, and publish steps behind `should_publish` flag

https://chatgpt.com/share/693ee7f1-ed80-800f-bb03-369a1cc659e3
2025-12-14 17:38:06 +01:00
Kevin Veen-Birkenbach
031ae5ac69 test(integration): fix mirror tests by removing non-existent check_cmd patches
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / linter-shell (push) Has been cancelled
Mark stable commit / linter-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
- Remove patches referencing pkgmgr.actions.mirror.check_cmd (module does not exist)
- Patch actual mirror probe/remote helpers used at runtime
- Make mirror integration tests deterministic and CI-safe

https://chatgpt.com/share/693ee657-b260-800f-a69a-8b0680e6baa5
2025-12-14 17:31:05 +01:00
Kevin Veen-Birkenbach
1c4fc531fa fix(shellcheck): correct source path hint for retry_403 helper
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / linter-shell (push) Has been cancelled
Mark stable commit / linter-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
- Align ShellCheck source hint with repository layout
- Fix SC1091 without disabling checks
- Runtime sourcing via ${RETRY_LIB} remains unchanged

https://chatgpt.com/share/693ee308-6c48-800f-b14f-7d6081e14eb4
2025-12-14 17:16:35 +01:00
Kevin Veen-Birkenbach
33dfbf3a4d test(env-virtual): execute pkgmgr from Python venv instead of system launcher
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / linter-shell (push) Has been cancelled
Mark stable commit / linter-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
The virtual environment test no longer invokes the distro-installed pkgmgr launcher (Nix-based).
Instead, it explicitly installs and activates the Python venv via make setup-venv and runs pkgmgr from there.

This aligns the test with its actual purpose (venv validation), avoids accidental execution of the Nix launcher, and fixes the failure caused by the missing run_with_github_403_retry helper in the venv workflow.

https://chatgpt.com/share/693ee224-e838-800f-8fa0-45295b2f5e20
2025-12-14 17:12:48 +01:00
Kevin Veen-Birkenbach
a3aa7b6394 git commit -am "fix(shellcheck): point source hint to repo-local retry_403.sh
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / linter-shell (push) Has been cancelled
Mark stable commit / linter-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
- Fix SC1091 by updating ShellCheck source hint to repo path
- Keep runtime sourcing from /usr/lib/package-manager unchanged
- CI-safe without disabling ShellCheck rules"

https://chatgpt.com/share/693edae1-6d84-800f-8556-0e54dd15b944
2025-12-14 16:42:22 +01:00
Kevin Veen-Birkenbach
724c262a4a fix(test): import mirror submodules before patching in integration tests
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / linter-shell (push) Has been cancelled
Mark stable commit / linter-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
Ensure pkgmgr.actions.mirror.* submodules are imported before unittest.mock.patch
to avoid AttributeError when patching dotted paths (e.g. check_cmd).
Stabilizes mirror CLI integration tests in CI.

https://chatgpt.com/share/693ed9f5-9918-800f-a880-d1238b3da1c9
2025-12-14 16:38:24 +01:00
Kevin Veen-Birkenbach
dcbe16c5f0 feat(launcher): enforce GitHub 403 retry for nix run
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / linter-shell (push) Has been cancelled
Mark stable commit / linter-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
- Require retry_403.sh to exist and fail hard if missing
- Source retry helper unconditionally
- Run nix flake execution via run_with_github_403_retry
- Prevent transient GitHub API rate-limit failures during nix run

https://chatgpt.com/share/693ed83e-a2e8-800f-8c1b-d5d5afeaa6ad
2025-12-14 16:31:02 +01:00
Kevin Veen-Birkenbach
f63b0a9f08 chore(ci): rename codesniffer workflows to linter
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / linter-shell (push) Has been cancelled
Mark stable commit / linter-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
- Rename ShellCheck workflow to linter-shell
- Rename Ruff workflow to linter-python
- Update workflow calls and dependencies accordingly

https://chatgpt.com/share/693ed61a-7490-800f-aef1-fce845e717a2
2025-12-14 16:21:57 +01:00
Kevin Veen-Birkenbach
822c418503 Added missing import
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / codesniffer-shellcheck (push) Has been cancelled
Mark stable commit / codesniffer-ruff (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2025-12-14 16:16:37 +01:00
Kevin Veen-Birkenbach
562a6da291 test(integration): move mirror CLI tests from e2e to integration and patch side effects
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / codesniffer-shellcheck (push) Has been cancelled
Mark stable commit / codesniffer-ruff (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
https://chatgpt.com/share/693ed188-eb80-800f-8541-356e3fbd98c5
2025-12-14 16:14:17 +01:00
Kevin Veen-Birkenbach
e61b30d9af feat(tests): add unit tests for mirror context, io, commands, and remote helpers
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / codesniffer-shellcheck (push) Has been cancelled
Mark stable commit / codesniffer-ruff (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
https://chatgpt.com/share/693ed188-eb80-800f-8541-356e3fbd98c5
2025-12-14 16:02:11 +01:00
Kevin Veen-Birkenbach
27c0c7c01f **fix(mirror): derive remote repository owner and name from URL**
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / codesniffer-shellcheck (push) Has been cancelled
Mark stable commit / codesniffer-ruff (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
* Parse host, owner, and repository name directly from Git remote URLs
* Prevent provisioning under incorrect repository names
* Make Git URL the single source of truth for remote provisioning
* Improve diagnostics when URL parsing fails
2025-12-14 14:54:19 +01:00
Kevin Veen-Birkenbach
0d652d995e **feat(mirror,credentials): improve remote provisioning UX and token handling**
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / codesniffer-shellcheck (push) Has been cancelled
Mark stable commit / codesniffer-ruff (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
* Split mirror logic into atomic modules (remote check, provisioning, URL utils)
* Normalize Git remote URLs and provider host detection
* Add provider-specific token help URLs (GitHub, Gitea/Forgejo, GitLab)
* Improve keyring handling with clear warnings and install hints
* Gracefully fall back to prompt when keyring is unavailable
* Fix provider hint override logic during remote provisioning
2025-12-14 14:48:05 +01:00
Kevin Veen-Birkenbach
0e03fbbee2 Changed Mirror Name
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / codesniffer-shellcheck (push) Has been cancelled
Mark stable commit / codesniffer-ruff (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2025-12-14 14:01:19 +01:00
Kevin Veen-Birkenbach
7cfd7e8d5c Release version 1.6.3
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / codesniffer-shellcheck (push) Has been cancelled
Mark stable commit / codesniffer-ruff (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2025-12-14 13:39:52 +01:00
Kevin Veen-Birkenbach
84b6c71748 test(integration): add unittest-based repository layout contract test
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / codesniffer-shellcheck (push) Has been cancelled
Mark stable commit / codesniffer-ruff (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
- Add integration test using unittest to verify canonical repository paths
- Assert pkgmgr repository satisfies template layout (packaging, changelog, metadata)
- Use real filesystem without mocks or pytest dependencies

https://chatgpt.com/share/693eaa75-98f0-800f-adca-439555f84154
2025-12-14 13:26:18 +01:00
Kevin Veen-Birkenbach
db9aaf920e refactor(release,version): centralize repository path resolution and validate template layout
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / codesniffer-shellcheck (push) Has been cancelled
Mark stable commit / codesniffer-ruff (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
- Introduce RepoPaths resolver as single source of truth for repository file locations
- Update release workflow to use resolved packaging and changelog paths
- Update version readers to rely on the shared path resolver
- Add integration test asserting pkgmgr repository satisfies canonical template layout

https://chatgpt.com/share/693eaa75-98f0-800f-adca-439555f84154
2025-12-14 13:15:41 +01:00
Kevin Veen-Birkenbach
69d28a461d Release version 1.6.2
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / codesniffer-shellcheck (push) Has been cancelled
Mark stable commit / codesniffer-ruff (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2025-12-14 12:58:35 +01:00
Kevin Veen-Birkenbach
03e414cc9f fix(version): add tomli fallback for Python < 3.11
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / codesniffer-shellcheck (push) Has been cancelled
Mark stable commit / codesniffer-ruff (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
- Add conditional runtime dependency on tomli for Python < 3.11
- Fix crash on CentOS / Python 3.9 when reading pyproject.toml
- Ensure version command works consistently across distros

https://chatgpt.com/share/693ea1cb-41a0-800f-b4dc-4ff507eb60c6
2025-12-14 12:38:43 +01:00
Kevin Veen-Birkenbach
7674762c9a feat(version): show installed pkgmgr version when no repo is selected
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / codesniffer-shellcheck (push) Has been cancelled
Mark stable commit / codesniffer-ruff (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
- Add installed version detection for Python environments and Nix profiles
- Display pkgmgr’s own installed version when run outside a repository
- Improve version command output to include installed vs source versions
- Prefer editable venv setup as default in Makefile setup target

https://chatgpt.com/share/693e9f02-9b34-800f-8eeb-c7c776b3faa7
2025-12-14 12:26:50 +01:00
Kevin Veen-Birkenbach
a47de15e42 Release version 1.6.1
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / codesniffer-shellcheck (push) Has been cancelled
Mark stable commit / codesniffer-ruff (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2025-12-14 12:01:52 +01:00
Kevin Veen-Birkenbach
37f3057d31 fix(nix): resolve Ruff F821 via TYPE_CHECKING and stabilize NixFlakeInstaller tests
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / codesniffer-shellcheck (push) Has been cancelled
Mark stable commit / codesniffer-ruff (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
CI / test-unit (push) Has been cancelled
CI / test-integration (push) Has been cancelled
CI / test-env-virtual (push) Has been cancelled
CI / test-env-nix (push) Has been cancelled
CI / test-e2e (push) Has been cancelled
CI / test-virgin-user (push) Has been cancelled
CI / test-virgin-root (push) Has been cancelled
CI / codesniffer-shellcheck (push) Has been cancelled
CI / codesniffer-ruff (push) Has been cancelled
- Add TYPE_CHECKING imports for RepoContext and CommandRunner to avoid runtime deps
- Fix Ruff F821 undefined-name errors in nix installer modules
- Refactor legacy NixFlakeInstaller unit tests to mock subprocess.run directly
- Remove obsolete run_cmd_mock usage and assert install calls via subprocess calls
- Ensure tests run without realtime waits or external nix dependencies

https://chatgpt.com/share/693e925d-a79c-800f-b0b6-92b8ba260b11
2025-12-14 11:43:33 +01:00
Kevin Veen-Birkenbach
d55c8d3726 refactor(nix): split NixFlakeInstaller into atomic modules and add GitHub 403 retry handling
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / codesniffer-shellcheck (push) Has been cancelled
Mark stable commit / codesniffer-ruff (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
- Move Nix flake installer into installers/nix/ with atomic components
  (installer, runner, profile, retry, types)
- Preserve legacy behavior and semantics of NixFlakeInstaller
- Add GitHub API 403 rate-limit retry with Fibonacci backoff + jitter
- Update all imports to new nix module path
- Rename legacy unit tests and adapt patches to new structure
- Add unit test for simulated GitHub 403 retry without realtime sleeping

https://chatgpt.com/share/693e925d-a79c-800f-b0b6-92b8ba260b11
2025-12-14 11:32:48 +01:00
Kevin Veen-Birkenbach
3990560cd7 Release version 1.6.0
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / codesniffer-shellcheck (push) Has been cancelled
Mark stable commit / codesniffer-ruff (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2025-12-14 10:51:40 +01:00
Kevin Veen-Birkenbach
d1e5a71f77 Merge branch 'feature/mirror-provision'
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / codesniffer-shellcheck (push) Has been cancelled
Mark stable commit / codesniffer-ruff (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2025-12-14 10:45:51 +01:00
Kevin Veen-Birkenbach
d59dc8ad53 fix(cli): route update exclusively through UpdateManager
Some checks failed
CI / test-unit (push) Has been cancelled
CI / test-integration (push) Has been cancelled
CI / test-env-virtual (push) Has been cancelled
CI / test-env-nix (push) Has been cancelled
CI / test-e2e (push) Has been cancelled
CI / test-virgin-user (push) Has been cancelled
CI / test-virgin-root (push) Has been cancelled
CI / codesniffer-shellcheck (push) Has been cancelled
CI / codesniffer-ruff (push) Has been cancelled
* Remove `update` from repos command dispatch
* Prevent update from being handled by `handle_repos_command`
* Ensure top-level `update` always uses UpdateManager
* Fix "Unknown repos command: update" error after refactor

https://chatgpt.com/share/693e7ee9-2658-800f-985f-293ed0c8efbc
2025-12-14 10:09:46 +01:00
Kevin Veen-Birkenbach
55f4a1e941 refactor(update): move update logic to unified UpdateManager and extend system support
Some checks failed
CI / test-unit (push) Has been cancelled
CI / test-integration (push) Has been cancelled
CI / test-env-virtual (push) Has been cancelled
CI / test-env-nix (push) Has been cancelled
CI / test-e2e (push) Has been cancelled
CI / test-virgin-user (push) Has been cancelled
CI / test-virgin-root (push) Has been cancelled
CI / codesniffer-shellcheck (push) Has been cancelled
CI / codesniffer-ruff (push) Has been cancelled
- Move update orchestration from repository scope to actions/update
- Introduce UpdateManager and SystemUpdater with distro detection
- Add Arch, Debian/Ubuntu, and Fedora/RHEL system update handling
- Rename CLI flag from --system-update to --system
- Route update as a top-level command in CLI dispatch
- Remove legacy update_repos implementation
- Add E2E tests for:
  - update all without system updates
  - update single repo (pkgmgr) with system updates

https://chatgpt.com/share/693e76ec-5ee4-800f-9623-3983f56d5430
2025-12-14 09:35:52 +01:00
Kevin Veen-Birkenbach
2a4ec18532 Changed argument order
Some checks failed
CI / test-unit (push) Has been cancelled
CI / test-integration (push) Has been cancelled
CI / test-env-virtual (push) Has been cancelled
CI / test-env-nix (push) Has been cancelled
CI / test-e2e (push) Has been cancelled
CI / test-virgin-user (push) Has been cancelled
CI / test-virgin-root (push) Has been cancelled
CI / codesniffer-shellcheck (push) Has been cancelled
CI / codesniffer-ruff (push) Has been cancelled
2025-12-14 08:51:37 +01:00
Kevin Veen-Birkenbach
2debdbee09 * **Split mirror responsibilities into clear subcommands**
Some checks failed
CI / test-unit (push) Has been cancelled
CI / test-integration (push) Has been cancelled
CI / test-env-virtual (push) Has been cancelled
CI / test-env-nix (push) Has been cancelled
CI / test-e2e (push) Has been cancelled
CI / test-virgin-user (push) Has been cancelled
CI / test-virgin-root (push) Has been cancelled
CI / codesniffer-shellcheck (push) Has been cancelled
CI / codesniffer-ruff (push) Has been cancelled
Setup configures local Git state, check validates remote reachability in a read-only way, and provision explicitly creates missing remote repositories. Destructive behavior is never implicit.

* **Introduce a remote provisioning layer**
  pkgmgr can now ensure that repositories exist on remote providers. If a repository is missing, it can be created automatically on supported platforms when explicitly requested.

* **Add a provider registry for extensibility**
  Providers are resolved based on the remote host, with optional hints to force a specific backend. This makes it straightforward to add further providers later without changing the core logic.

* **Use a lightweight, dependency-free HTTP client**
  All API communication is handled via a small stdlib-based client. HTTP errors are mapped to meaningful domain errors, improving diagnostics and error handling consistency.

* **Centralize credential resolution**
  API tokens are resolved in a strict order: environment variables first, then the system keyring, and finally an interactive prompt if allowed. This works well for both CI and interactive use.

* **Keep keyring integration optional**
  Secure token storage via the OS keyring is provided as an optional dependency. If unavailable, pkgmgr still works using environment variables or one-off interactive tokens.

* **Improve CLI parser safety and clarity**
  Shared argument helpers now guard against duplicate definitions, making composed subcommands more robust and easier to maintain.

* **Expand end-to-end test coverage**
  All mirror-related workflows are exercised through real CLI invocations in preview mode, ensuring full wiring correctness while remaining safe for automated test environments.

https://chatgpt.com/share/693df441-a780-800f-bcf7-96e06cc9e421
2025-12-14 00:16:54 +01:00
119 changed files with 5586 additions and 1367 deletions

View File

@@ -28,8 +28,8 @@ jobs:
test-virgin-root:
uses: ./.github/workflows/test-virgin-root.yml
codesniffer-shellcheck:
uses: ./.github/workflows/codesniffer-shellcheck.yml
linter-shell:
uses: ./.github/workflows/linter-shell.yml
codesniffer-ruff:
uses: ./.github/workflows/codesniffer-ruff.yml
linter-python:
uses: ./.github/workflows/linter-python.yml

View File

@@ -4,7 +4,7 @@ on:
workflow_call:
jobs:
codesniffer-ruff:
linter-python:
runs-on: ubuntu-latest
steps:

View File

@@ -4,7 +4,7 @@ on:
workflow_call:
jobs:
codesniffer-shellcheck:
linter-shell:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4

View File

@@ -29,16 +29,16 @@ jobs:
test-virgin-root:
uses: ./.github/workflows/test-virgin-root.yml
codesniffer-shellcheck:
uses: ./.github/workflows/codesniffer-shellcheck.yml
linter-shell:
uses: ./.github/workflows/linter-shell.yml
codesniffer-ruff:
uses: ./.github/workflows/codesniffer-ruff.yml
linter-python:
uses: ./.github/workflows/linter-python.yml
mark-stable:
needs:
- codesniffer-shellcheck
- codesniffer-ruff
- linter-shell
- linter-python
- test-unit
- test-integration
- test-env-nix

View File

@@ -19,7 +19,6 @@ jobs:
uses: actions/checkout@v4
with:
fetch-depth: 0
fetch-tags: true
- name: Checkout workflow_run commit and refresh tags
run: |
@@ -35,22 +34,30 @@ jobs:
SHA="$(git rev-parse HEAD)"
V_TAG="$(git tag --points-at "${SHA}" --list 'v*' | sort -V | tail -n1)"
[[ -n "$V_TAG" ]] || { echo "No version tag found"; exit 1; }
if [[ -z "${V_TAG}" ]]; then
echo "No version tag found for ${SHA}. Skipping publish."
echo "should_publish=false" >> "$GITHUB_OUTPUT"
exit 0
fi
VERSION="${V_TAG#v}"
STABLE_SHA="$(git rev-parse -q --verify refs/tags/stable^{commit} 2>/dev/null || true)"
IS_STABLE=false
[[ -n "${STABLE_SHA}" && "${STABLE_SHA}" == "${SHA}" ]] && IS_STABLE=true
echo "should_publish=true" >> "$GITHUB_OUTPUT"
echo "version=${VERSION}" >> "$GITHUB_OUTPUT"
echo "is_stable=${IS_STABLE}" >> "$GITHUB_OUTPUT"
- name: Set up Docker Buildx
if: ${{ steps.info.outputs.should_publish == 'true' }}
uses: docker/setup-buildx-action@v3
with:
use: true
- name: Login to GHCR
if: ${{ steps.info.outputs.should_publish == 'true' }}
uses: docker/login-action@v3
with:
registry: ghcr.io
@@ -58,6 +65,7 @@ jobs:
password: ${{ secrets.GITHUB_TOKEN }}
- name: Publish all images
if: ${{ steps.info.outputs.should_publish == 'true' }}
run: |
set -euo pipefail
OWNER="${{ github.repository_owner }}" \

View File

@@ -1,3 +1,54 @@
## [1.7.0] - 2025-12-14
* * New *pkgmgr publish* command to publish repository artifacts to PyPI based on the *MIRRORS* file.
* Automatically selects the current repository when no explicit selection is given.
* Publishes only when a semantic version tag is present on *HEAD*; otherwise skips with a clear info message.
* Supports non-interactive mode for CI environments via *--non-interactive*.
## [1.6.4] - 2025-12-14
* * Improved reliability of Nix installs and updates, including automatic resolution of profile conflicts and better handling of GitHub 403 rate limits.
* More stable launcher behavior in packaged and virtual-env setups.
* Enhanced mirror and remote handling: repository owner/name are derived from URLs, with smoother provisioning and clearer credential handling.
* More reliable releases and artifacts due to safer CI behavior when no version tag is present.
## [1.6.3] - 2025-12-14
* ***Fixed:*** Corrected repository path resolution so release and version logic consistently use the canonical packaging/* layout, preventing changelog and packaging files from being read or updated from incorrect locations.
## [1.6.2] - 2025-12-14
* **pkgmgr version** now also shows the installed pkgmgr version when run outside a repository.
## [1.6.1] - 2025-12-14
* * Added automatic retry handling for GitHub 403 / rate-limit errors during Nix flake installs (Fibonacci backoff with jitter).
## [1.6.0] - 2025-12-14
* *** Changed ***
- Unified update handling via a single top-level `pkgmgr update` command, removing ambiguous update paths.
- Improved update reliability by routing all update logic through a central UpdateManager.
- Renamed system update flag from `--system-update` to `--system` for clarity and consistency.
- Made mirror handling explicit and safer by separating setup, check, and provision responsibilities.
- Improved credential resolution for remote providers (environment → keyring → interactive).
*** Added ***
- Optional system updates via `pkgmgr update --system` (Arch, Debian/Ubuntu, Fedora/RHEL).
- `pkgmgr install --update` to force re-running installers and refresh existing installations.
- Remote repository provisioning for mirrors on supported providers.
- Extended end-to-end test coverage for update and mirror workflows.
*** Fixed ***
- Resolved “Unknown repos command: update” errors after CLI refactoring.
- Improved Nix update stability and reduced CI failures caused by transient rate limits.
## [1.5.0] - 2025-12-13
* - Commands now show live output while running, making long operations easier to follow

View File

@@ -1,3 +1,4 @@
git@github.com:kevinveenbirkenbach/package-manager.git
ssh://git@git.veen.world:2201/kevinveenbirkenbach/pkgmgr.git
ssh://git@code.cymais.cloud:2201/kevinveenbirkenbach/pkgmgr.git
ssh://git@code.infinito.nexus:2201/kevinveenbirkenbach/pkgmgr.git
https://pypi.org/project/kpmx/

View File

@@ -44,7 +44,7 @@ install:
# ------------------------------------------------------------
# Default: keep current auto-detection behavior
setup: setup-nix setup-venv
setup: setup-venv
# Explicit: developer setup (Python venv + shell RC + install)
setup-venv: setup-nix

View File

@@ -32,7 +32,7 @@
rec {
pkgmgr = pyPkgs.buildPythonApplication {
pname = "package-manager";
version = "1.5.0";
version = "1.7.0";
# Use the git repo as source
src = ./.;

View File

@@ -1,7 +1,7 @@
# Maintainer: Kevin Veen-Birkenbach <info@veen.world>
pkgname=package-manager
pkgver=0.9.1
pkgver=1.7.0
pkgrel=1
pkgdesc="Local-flake wrapper for Kevin's package-manager (Nix-based)."
arch=('any')

View File

@@ -1,3 +1,27 @@
package-manager (1.7.0-1) unstable; urgency=medium
* * New *pkgmgr publish* command to publish repository artifacts to PyPI based on the *MIRRORS* file.
* Automatically selects the current repository when no explicit selection is given.
* Publishes only when a semantic version tag is present on *HEAD*; otherwise skips with a clear info message.
* Supports non-interactive mode for CI environments via *--non-interactive*.
-- Kevin Veen-Birkenbach <kevin@veen.world> Sun, 14 Dec 2025 21:10:06 +0100
package-manager (1.6.4-1) unstable; urgency=medium
* * Improved reliability of Nix installs and updates, including automatic resolution of profile conflicts and better handling of GitHub 403 rate limits.
* More stable launcher behavior in packaged and virtual-env setups.
* Enhanced mirror and remote handling: repository owner/name are derived from URLs, with smoother provisioning and clearer credential handling.
* More reliable releases and artifacts due to safer CI behavior when no version tag is present.
-- Kevin Veen-Birkenbach <kevin@veen.world> Sun, 14 Dec 2025 19:33:07 +0100
package-manager (1.6.3-1) unstable; urgency=medium
* ***Fixed:*** Corrected repository path resolution so release and version logic consistently use the canonical packaging/* layout, preventing changelog and packaging files from being read or updated from incorrect locations.
-- Kevin Veen-Birkenbach <kevin@veen.world> Sun, 14 Dec 2025 13:39:52 +0100
package-manager (0.9.1-1) unstable; urgency=medium
* * Refactored installer: new `venv-create.sh`, cleaner root/user setup flow, updated README with architecture map.

View File

@@ -1,5 +1,5 @@
Name: package-manager
Version: 0.9.1
Version: 1.7.0
Release: 1%{?dist}
Summary: Wrapper that runs Kevin's package-manager via Nix flake
@@ -74,6 +74,21 @@ echo ">>> package-manager removed. Nix itself was not removed."
/usr/lib/package-manager/
%changelog
* Sun Dec 14 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.7.0-1
- * New *pkgmgr publish* command to publish repository artifacts to PyPI based on the *MIRRORS* file.
* Automatically selects the current repository when no explicit selection is given.
* Publishes only when a semantic version tag is present on *HEAD*; otherwise skips with a clear info message.
* Supports non-interactive mode for CI environments via *--non-interactive*.
* Sun Dec 14 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.6.4-1
- * Improved reliability of Nix installs and updates, including automatic resolution of profile conflicts and better handling of GitHub 403 rate limits.
* More stable launcher behavior in packaged and virtual-env setups.
* Enhanced mirror and remote handling: repository owner/name are derived from URLs, with smoother provisioning and clearer credential handling.
* More reliable releases and artifacts due to safer CI behavior when no version tag is present.
* Sun Dec 14 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.6.3-1
- ***Fixed:*** Corrected repository path resolution so release and version logic consistently use the canonical packaging/* layout, preventing changelog and packaging files from being read or updated from incorrect locations.
* Wed Dec 10 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 0.9.1-1
- * Refactored installer: new `venv-create.sh`, cleaner root/user setup flow, updated README with architecture map.
* Split virgin tests into root/user workflows; stabilized Nix installer across distros; improved test scripts with dynamic distro selection and isolated Nix stores.

View File

@@ -7,7 +7,7 @@ build-backend = "setuptools.build_meta"
[project]
name = "package-manager"
version = "1.5.0"
version = "1.7.0"
description = "Kevin's package-manager tool (pkgmgr)"
readme = "README.md"
requires-python = ">=3.9"
@@ -19,7 +19,8 @@ authors = [
# Base runtime dependencies
dependencies = [
"PyYAML>=6.0"
"PyYAML>=6.0",
"tomli; python_version < \"3.11\"",
]
[project.urls]
@@ -27,8 +28,8 @@ Homepage = "https://s.veen.world/pkgmgr"
Source = "https://github.com/kevinveenbirkenbach/package-manager"
[project.optional-dependencies]
keyring = ["keyring>=24.0.0"]
dev = [
"pytest",
"mypy"
]

View File

@@ -2,6 +2,16 @@
set -euo pipefail
FLAKE_DIR="/usr/lib/package-manager"
NIX_LIB_DIR="${FLAKE_DIR}/nix/lib"
RETRY_LIB="${NIX_LIB_DIR}/retry_403.sh"
# ---------------------------------------------------------------------------
# Hard requirement: retry helper must exist (fail if missing)
# ---------------------------------------------------------------------------
if [[ ! -f "${RETRY_LIB}" ]]; then
echo "[launcher] ERROR: Required retry helper not found: ${RETRY_LIB}" >&2
exit 1
fi
# ---------------------------------------------------------------------------
# Try to ensure that "nix" is on PATH (common locations + container user)
@@ -32,9 +42,13 @@ if ! command -v nix >/dev/null 2>&1; then
fi
# ---------------------------------------------------------------------------
# Primary path: use Nix flake if available
# Primary path: use Nix flake if available (with GitHub 403 retry)
# ---------------------------------------------------------------------------
if command -v nix >/dev/null 2>&1; then
if declare -F run_with_github_403_retry >/dev/null; then
# shellcheck source=./scripts/nix/lib/retry_403.sh
source "${RETRY_LIB}"
exec run_with_github_403_retry nix run "${FLAKE_DIR}#pkgmgr" -- "$@"
else
exec nix run "${FLAKE_DIR}#pkgmgr" -- "$@"
fi

View File

@@ -1,32 +1,49 @@
#!/usr/bin/env bash
set -euo pipefail
IMAGE="pkgmgr-$PKGMGR_DISTRO"
IMAGE="pkgmgr-${PKGMGR_DISTRO}"
echo
echo "------------------------------------------------------------"
echo ">>> Testing VENV: $IMAGE"
echo ">>> Testing VENV: ${IMAGE}"
echo "------------------------------------------------------------"
echo "[test-env-virtual] Inspect image metadata:"
docker image inspect "$IMAGE" | sed -n '1,40p'
echo "[test-env-virtual] Running: docker run --rm --entrypoint pkgmgr $IMAGE --help"
docker image inspect "${IMAGE}" | sed -n '1,40p'
echo
# Run the command and capture the output
# ------------------------------------------------------------
# Run VENV-based pkgmgr test inside container
# ------------------------------------------------------------
if OUTPUT=$(docker run --rm \
-e REINSTALL_PKGMGR=1 \
-v "pkgmgr_nix_store_${PKGMGR_DISTRO}:/nix" \
-v "$(pwd):/src" \
-v "pkgmgr_nix_cache_${PKGMGR_DISTRO}:/root/.cache/nix" \
"$IMAGE" 2>&1); then
-e REINSTALL_PKGMGR=1 \
-v "$(pwd):/src" \
-w /src \
"${IMAGE}" \
bash -lc '
set -euo pipefail
echo "[test-env-virtual] Installing pkgmgr (distro package)..."
make install
echo "[test-env-virtual] Setting up Python venv..."
make setup-venv
echo "[test-env-virtual] Activating venv..."
. "$HOME/.venvs/pkgmgr/bin/activate"
echo "[test-env-virtual] Using pkgmgr from:"
command -v pkgmgr
pkgmgr --help
' 2>&1); then
echo "$OUTPUT"
echo
echo "[test-env-virtual] SUCCESS: $IMAGE responded to 'pkgmgr --help'"
echo "[test-env-virtual] SUCCESS: venv-based pkgmgr works in ${IMAGE}"
else
echo "$OUTPUT"
echo
echo "[test-env-virtual] ERROR: $IMAGE failed to run 'pkgmgr --help'"
echo "[test-env-virtual] ERROR: venv-based pkgmgr failed in ${IMAGE}"
exit 1
fi
fi

View File

@@ -28,7 +28,7 @@ from pkgmgr.actions.install.installers.os_packages import (
DebianControlInstaller,
RpmSpecInstaller,
)
from pkgmgr.actions.install.installers.nix_flake import (
from pkgmgr.actions.install.installers.nix import (
NixFlakeInstaller,
)
from pkgmgr.actions.install.installers.python import PythonInstaller

View File

@@ -9,7 +9,7 @@ pkgmgr.actions.install.installers.
"""
from pkgmgr.actions.install.installers.base import BaseInstaller # noqa: F401
from pkgmgr.actions.install.installers.nix_flake import NixFlakeInstaller # noqa: F401
from pkgmgr.actions.install.installers.nix import NixFlakeInstaller # noqa: F401
from pkgmgr.actions.install.installers.python import PythonInstaller # noqa: F401
from pkgmgr.actions.install.installers.makefile import MakefileInstaller # noqa: F401

View File

@@ -0,0 +1,4 @@
from .installer import NixFlakeInstaller
from .retry import RetryPolicy
__all__ = ["NixFlakeInstaller", "RetryPolicy"]

View File

@@ -0,0 +1,100 @@
from __future__ import annotations
from typing import TYPE_CHECKING, List
from .profile import NixProfileInspector
from .retry import GitHubRateLimitRetry
from .runner import CommandRunner
from .textparse import NixConflictTextParser
if TYPE_CHECKING:
from pkgmgr.actions.install.context import RepoContext
class NixConflictResolver:
"""
Resolves nix profile file conflicts by:
1. Parsing conflicting store paths from stderr
2. Mapping them to profile remove tokens via `nix profile list --json`
3. Removing those tokens deterministically
4. Retrying install
"""
def __init__(
self,
runner: CommandRunner,
retry: GitHubRateLimitRetry,
profile: NixProfileInspector,
) -> None:
self._runner = runner
self._retry = retry
self._profile = profile
self._parser = NixConflictTextParser()
def resolve(
self,
ctx: "RepoContext",
install_cmd: str,
stdout: str,
stderr: str,
*,
output: str,
max_rounds: int = 10,
) -> bool:
quiet = bool(getattr(ctx, "quiet", False))
combined = f"{stdout}\n{stderr}"
for _ in range(max_rounds):
# 1) Extract conflicting store prefixes from nix error output
store_prefixes = self._parser.existing_store_prefixes(combined)
# 2) Resolve them to concrete remove tokens
tokens: List[str] = self._profile.find_remove_tokens_for_store_prefixes(
ctx,
self._runner,
store_prefixes,
)
# 3) Fallback: output-name based lookup (also covers nix suggesting: `nix profile remove pkgmgr`)
if not tokens:
tokens = self._profile.find_remove_tokens_for_output(ctx, self._runner, output)
if tokens:
if not quiet:
print(
"[nix] conflict detected; removing existing profile entries: "
+ ", ".join(tokens)
)
for t in tokens:
# tokens may contain things like "pkgmgr" or "pkgmgr-1" or quoted tokens (we keep raw)
self._runner.run(ctx, f"nix profile remove {t}", allow_failure=True)
res = self._retry.run_with_retry(ctx, self._runner, install_cmd)
if res.returncode == 0:
return True
combined = f"{res.stdout}\n{res.stderr}"
continue
# 4) Last-resort fallback: use textual remove tokens from stderr (“nix profile remove X”)
tokens = self._parser.remove_tokens(combined)
if tokens:
if not quiet:
print("[nix] fallback remove tokens: " + ", ".join(tokens))
for t in tokens:
self._runner.run(ctx, f"nix profile remove {t}", allow_failure=True)
res = self._retry.run_with_retry(ctx, self._runner, install_cmd)
if res.returncode == 0:
return True
combined = f"{res.stdout}\n{res.stderr}"
continue
if not quiet:
print("[nix] conflict detected but could not resolve profile entries to remove.")
return False
return False

View File

@@ -0,0 +1,229 @@
from __future__ import annotations
import os
import shutil
from typing import TYPE_CHECKING, List, Tuple
from pkgmgr.actions.install.installers.base import BaseInstaller
from .conflicts import NixConflictResolver
from .profile import NixProfileInspector
from .retry import GitHubRateLimitRetry, RetryPolicy
from .runner import CommandRunner
if TYPE_CHECKING:
from pkgmgr.actions.install.context import RepoContext
class NixFlakeInstaller(BaseInstaller):
layer = "nix"
FLAKE_FILE = "flake.nix"
def __init__(self, policy: RetryPolicy | None = None) -> None:
self._runner = CommandRunner()
self._retry = GitHubRateLimitRetry(policy=policy)
self._profile = NixProfileInspector()
self._conflicts = NixConflictResolver(self._runner, self._retry, self._profile)
# Newer nix rejects numeric indices; we learn this at runtime and cache the decision.
self._indices_supported: bool | None = None
def supports(self, ctx: "RepoContext") -> bool:
if os.environ.get("PKGMGR_DISABLE_NIX_FLAKE_INSTALLER") == "1":
if not ctx.quiet:
print(
"[INFO] PKGMGR_DISABLE_NIX_FLAKE_INSTALLER=1 "
"skipping NixFlakeInstaller."
)
return False
if shutil.which("nix") is None:
return False
return os.path.exists(os.path.join(ctx.repo_dir, self.FLAKE_FILE))
def _profile_outputs(self, ctx: "RepoContext") -> List[Tuple[str, bool]]:
# (output_name, allow_failure)
if ctx.identifier in {"pkgmgr", "package-manager"}:
return [("pkgmgr", False), ("default", True)]
return [("default", False)]
def run(self, ctx: "RepoContext") -> None:
if not self.supports(ctx):
return
outputs = self._profile_outputs(ctx)
if not ctx.quiet:
msg = (
"[nix] flake detected in "
f"{ctx.identifier}, ensuring outputs: "
+ ", ".join(name for name, _ in outputs)
)
print(msg)
for output, allow_failure in outputs:
if ctx.force_update:
self._force_upgrade_output(ctx, output, allow_failure)
else:
self._install_only(ctx, output, allow_failure)
def _installable(self, ctx: "RepoContext", output: str) -> str:
return f"{ctx.repo_dir}#{output}"
# ---------------------------------------------------------------------
# Core install path
# ---------------------------------------------------------------------
def _install_only(self, ctx: "RepoContext", output: str, allow_failure: bool) -> None:
install_cmd = f"nix profile install {self._installable(ctx, output)}"
if not ctx.quiet:
print(f"[nix] install: {install_cmd}")
res = self._retry.run_with_retry(ctx, self._runner, install_cmd)
if res.returncode == 0:
if not ctx.quiet:
print(f"[nix] output '{output}' successfully installed.")
return
# Conflict resolver first (handles the common “existing package already provides file” case)
if self._conflicts.resolve(
ctx,
install_cmd,
res.stdout,
res.stderr,
output=output,
):
if not ctx.quiet:
print(f"[nix] output '{output}' successfully installed after conflict cleanup.")
return
if not ctx.quiet:
print(
f"[nix] install failed for '{output}' (exit {res.returncode}), "
"trying upgrade/remove+install..."
)
# If indices are supported, try legacy index-upgrade path.
if self._indices_supported is not False:
indices = self._profile.find_installed_indices_for_output(ctx, self._runner, output)
upgraded = False
for idx in indices:
if self._upgrade_index(ctx, idx):
upgraded = True
if not ctx.quiet:
print(f"[nix] output '{output}' successfully upgraded (index {idx}).")
if upgraded:
return
if indices and not ctx.quiet:
print(f"[nix] upgrade failed; removing indices {indices} and reinstalling '{output}'.")
for idx in indices:
self._remove_index(ctx, idx)
# If we learned indices are unsupported, immediately fall back below
if self._indices_supported is False:
self._remove_tokens_for_output(ctx, output)
else:
# indices explicitly unsupported
self._remove_tokens_for_output(ctx, output)
final = self._runner.run(ctx, install_cmd, allow_failure=True)
if final.returncode == 0:
if not ctx.quiet:
print(f"[nix] output '{output}' successfully re-installed.")
return
print(f"[ERROR] Failed to install Nix flake output '{output}' (exit {final.returncode})")
if not allow_failure:
raise SystemExit(final.returncode)
print(f"[WARNING] Continuing despite failure of optional output '{output}'.")
# ---------------------------------------------------------------------
# force_update path
# ---------------------------------------------------------------------
def _force_upgrade_output(self, ctx: "RepoContext", output: str, allow_failure: bool) -> None:
# Prefer token path if indices unsupported (new nix)
if self._indices_supported is False:
self._remove_tokens_for_output(ctx, output)
self._install_only(ctx, output, allow_failure)
if not ctx.quiet:
print(f"[nix] output '{output}' successfully upgraded.")
return
indices = self._profile.find_installed_indices_for_output(ctx, self._runner, output)
upgraded_any = False
for idx in indices:
if self._upgrade_index(ctx, idx):
upgraded_any = True
if not ctx.quiet:
print(f"[nix] output '{output}' successfully upgraded (index {idx}).")
if upgraded_any:
if not ctx.quiet:
print(f"[nix] output '{output}' successfully upgraded.")
return
if indices and not ctx.quiet:
print(f"[nix] upgrade failed; removing indices {indices} and reinstalling '{output}'.")
for idx in indices:
self._remove_index(ctx, idx)
# If we learned indices are unsupported, also remove by token to actually clear conflicts
if self._indices_supported is False:
self._remove_tokens_for_output(ctx, output)
self._install_only(ctx, output, allow_failure)
if not ctx.quiet:
print(f"[nix] output '{output}' successfully upgraded.")
# ---------------------------------------------------------------------
# Helpers
# ---------------------------------------------------------------------
def _stderr_says_indices_unsupported(self, stderr: str) -> bool:
s = (stderr or "").lower()
return "no longer supports indices" in s or "does not support indices" in s
def _upgrade_index(self, ctx: "RepoContext", idx: int) -> bool:
cmd = f"nix profile upgrade --refresh {idx}"
res = self._runner.run(ctx, cmd, allow_failure=True)
if self._stderr_says_indices_unsupported(getattr(res, "stderr", "")):
self._indices_supported = False
return False
if self._indices_supported is None:
self._indices_supported = True
return res.returncode == 0
def _remove_index(self, ctx: "RepoContext", idx: int) -> None:
res = self._runner.run(ctx, f"nix profile remove {idx}", allow_failure=True)
if self._stderr_says_indices_unsupported(getattr(res, "stderr", "")):
self._indices_supported = False
if self._indices_supported is None:
self._indices_supported = True
def _remove_tokens_for_output(self, ctx: "RepoContext", output: str) -> None:
tokens = self._profile.find_remove_tokens_for_output(ctx, self._runner, output)
if not tokens:
return
if not ctx.quiet:
print(f"[nix] indices unsupported; removing by token(s): {', '.join(tokens)}")
for t in tokens:
self._runner.run(ctx, f"nix profile remove {t}", allow_failure=True)

View File

@@ -0,0 +1,4 @@
from .inspector import NixProfileInspector
from .models import NixProfileEntry
__all__ = ["NixProfileInspector", "NixProfileEntry"]

View File

@@ -0,0 +1,162 @@
from __future__ import annotations
from typing import Any, List, TYPE_CHECKING
from .matcher import (
entry_matches_output,
entry_matches_store_path,
stable_unique_ints,
)
from .normalizer import normalize_elements
from .parser import parse_profile_list_json
from .result import extract_stdout_text
if TYPE_CHECKING:
# Keep these as TYPE_CHECKING-only to avoid runtime import cycles.
from pkgmgr.actions.install.context import RepoContext
from pkgmgr.core.command.runner import CommandRunner
class NixProfileInspector:
"""
Reads and inspects the user's Nix profile list (JSON).
Public API:
- list_json()
- find_installed_indices_for_output() (legacy; may not work on newer nix)
- find_indices_by_store_path() (legacy; may not work on newer nix)
- find_remove_tokens_for_output()
- find_remove_tokens_for_store_prefixes()
"""
def list_json(self, ctx: "RepoContext", runner: "CommandRunner") -> dict[str, Any]:
res = runner.run(ctx, "nix profile list --json", allow_failure=False)
raw = extract_stdout_text(res)
return parse_profile_list_json(raw)
# ---------------------------------------------------------------------
# Legacy index helpers (still useful on older nix; newer nix may reject indices)
# ---------------------------------------------------------------------
def find_installed_indices_for_output(
self,
ctx: "RepoContext",
runner: "CommandRunner",
output: str,
) -> List[int]:
data = self.list_json(ctx, runner)
entries = normalize_elements(data)
hits: List[int] = []
for e in entries:
if e.index is None:
continue
if entry_matches_output(e, output):
hits.append(e.index)
return stable_unique_ints(hits)
def find_indices_by_store_path(
self,
ctx: "RepoContext",
runner: "CommandRunner",
store_path: str,
) -> List[int]:
needle = (store_path or "").strip()
if not needle:
return []
data = self.list_json(ctx, runner)
entries = normalize_elements(data)
hits: List[int] = []
for e in entries:
if e.index is None:
continue
if entry_matches_store_path(e, needle):
hits.append(e.index)
return stable_unique_ints(hits)
# ---------------------------------------------------------------------
# New token-based helpers (works with newer nix where indices are rejected)
# ---------------------------------------------------------------------
def find_remove_tokens_for_output(
self,
ctx: "RepoContext",
runner: "CommandRunner",
output: str,
) -> List[str]:
"""
Returns profile remove tokens to remove entries matching a given output.
We always include the raw output token first because nix itself suggests:
nix profile remove pkgmgr
"""
out = (output or "").strip()
if not out:
return []
data = self.list_json(ctx, runner)
entries = normalize_elements(data)
tokens: List[str] = [out] # critical: matches nix's own suggestion for conflicts
for e in entries:
if entry_matches_output(e, out):
# Prefer removing by key/name (non-index) when possible.
# New nix rejects numeric indices; these tokens are safer.
k = (e.key or "").strip()
n = (e.name or "").strip()
if k and not k.isdigit():
tokens.append(k)
elif n and not n.isdigit():
tokens.append(n)
# stable unique preserving order
seen: set[str] = set()
uniq: List[str] = []
for t in tokens:
if t and t not in seen:
uniq.append(t)
seen.add(t)
return uniq
def find_remove_tokens_for_store_prefixes(
self,
ctx: "RepoContext",
runner: "CommandRunner",
prefixes: List[str],
) -> List[str]:
"""
Returns remove tokens for entries whose store path matches any prefix.
"""
prefixes = [(p or "").strip() for p in (prefixes or []) if p]
prefixes = [p for p in prefixes if p]
if not prefixes:
return []
data = self.list_json(ctx, runner)
entries = normalize_elements(data)
tokens: List[str] = []
for e in entries:
if not e.store_paths:
continue
if any(sp == p for sp in e.store_paths for p in prefixes):
k = (e.key or "").strip()
n = (e.name or "").strip()
if k and not k.isdigit():
tokens.append(k)
elif n and not n.isdigit():
tokens.append(n)
seen: set[str] = set()
uniq: List[str] = []
for t in tokens:
if t and t not in seen:
uniq.append(t)
seen.add(t)
return uniq

View File

@@ -0,0 +1,62 @@
from __future__ import annotations
from typing import List
from .models import NixProfileEntry
def entry_matches_output(entry: NixProfileEntry, output: str) -> bool:
"""
Heuristic matcher: output is typically a flake output name (e.g. "pkgmgr"),
and we match against name/attrPath patterns.
"""
out = (output or "").strip()
if not out:
return False
candidates = [entry.name, entry.attr_path]
for c in candidates:
c = (c or "").strip()
if not c:
continue
# Direct match
if c == out:
return True
# AttrPath contains "#<output>"
if f"#{out}" in c:
return True
# AttrPath ends with ".<output>"
if c.endswith(f".{out}"):
return True
# Name pattern "<output>-<n>" (common, e.g. pkgmgr-1)
if c.startswith(f"{out}-"):
return True
# Historical special case: repo is "package-manager" but output is "pkgmgr"
if out == "pkgmgr" and c.startswith("package-manager-"):
return True
return False
def entry_matches_store_path(entry: NixProfileEntry, store_path: str) -> bool:
needle = (store_path or "").strip()
if not needle:
return False
return any((p or "") == needle for p in entry.store_paths)
def stable_unique_ints(values: List[int]) -> List[int]:
seen: set[int] = set()
uniq: List[int] = []
for v in values:
if v in seen:
continue
uniq.append(v)
seen.add(v)
return uniq

View File

@@ -0,0 +1,17 @@
from __future__ import annotations
from dataclasses import dataclass
from typing import List, Optional
@dataclass(frozen=True)
class NixProfileEntry:
"""
Minimal normalized representation of one nix profile element entry.
"""
key: str
index: Optional[int]
name: str
attr_path: str
store_paths: List[str]

View File

@@ -0,0 +1,128 @@
from __future__ import annotations
import re
from typing import Any, Dict, Iterable, List, Optional
from .models import NixProfileEntry
def coerce_index(key: str, entry: Dict[str, Any]) -> Optional[int]:
"""
Nix JSON schema varies:
- elements keys might be "0", "1", ...
- or might be names like "pkgmgr-1"
Some versions include an explicit index field.
We try safe options in order.
"""
k = (key or "").strip()
# 1) Classic: numeric keys
if k.isdigit():
try:
return int(k)
except Exception:
return None
# 2) Explicit index fields (schema-dependent)
for field in ("index", "id", "position"):
v = entry.get(field)
if isinstance(v, int):
return v
if isinstance(v, str) and v.strip().isdigit():
try:
return int(v.strip())
except Exception:
pass
# 3) Last resort: extract trailing number from key if it looks like "<name>-<n>"
m = re.match(r"^.+-(\d+)$", k)
if m:
try:
return int(m.group(1))
except Exception:
return None
return None
def iter_store_paths(entry: Dict[str, Any]) -> Iterable[str]:
"""
Yield all possible store paths from a nix profile JSON entry.
Nix has had schema shifts. We support common variants:
- "storePaths": ["/nix/store/..", ...]
- "storePaths": "/nix/store/.." (rare)
- "storePath": "/nix/store/.." (some variants)
- nested "outputs" dict(s) with store paths (best-effort)
"""
if not isinstance(entry, dict):
return
sp = entry.get("storePaths")
if isinstance(sp, list):
for p in sp:
if isinstance(p, str):
yield p
elif isinstance(sp, str):
yield sp
sp2 = entry.get("storePath")
if isinstance(sp2, str):
yield sp2
outs = entry.get("outputs")
if isinstance(outs, dict):
for _, ov in outs.items():
if isinstance(ov, dict):
p = ov.get("storePath")
if isinstance(p, str):
yield p
def normalize_store_path(store_path: str) -> str:
"""
Normalize store path for matching.
Currently just strips whitespace; hook for future normalization if needed.
"""
return (store_path or "").strip()
def normalize_elements(data: Dict[str, Any]) -> List[NixProfileEntry]:
"""
Converts nix profile list JSON into a list of normalized entries.
JSON formats observed:
- {"elements": {"0": {...}, "1": {...}}}
- {"elements": {"pkgmgr-1": {...}, "pkgmgr-2": {...}}}
"""
elements = data.get("elements")
if not isinstance(elements, dict):
return []
normalized: List[NixProfileEntry] = []
for k, entry in elements.items():
if not isinstance(entry, dict):
continue
idx = coerce_index(str(k), entry)
name = str(entry.get("name", "") or "")
attr = str(entry.get("attrPath", "") or "")
store_paths: List[str] = []
for p in iter_store_paths(entry):
sp = normalize_store_path(p)
if sp:
store_paths.append(sp)
normalized.append(
NixProfileEntry(
key=str(k),
index=idx,
name=name,
attr_path=attr,
store_paths=store_paths,
)
)
return normalized

View File

@@ -0,0 +1,19 @@
from __future__ import annotations
import json
from typing import Any, Dict
def parse_profile_list_json(raw: str) -> Dict[str, Any]:
"""
Parse JSON output from `nix profile list --json`.
Raises SystemExit with a helpful excerpt on parse failure.
"""
try:
return json.loads(raw)
except json.JSONDecodeError as e:
excerpt = (raw or "")[:5000]
raise SystemExit(
f"[nix] Failed to parse `nix profile list --json`: {e}\n{excerpt}"
) from e

View File

@@ -0,0 +1,28 @@
from __future__ import annotations
from typing import Any
def extract_stdout_text(result: Any) -> str:
"""
Normalize different runner return types to a stdout string.
Supported patterns:
- result is str -> returned as-is
- result is bytes/bytearray -> decoded UTF-8 (replace errors)
- result has `.stdout` (str or bytes) -> used
- fallback: str(result)
"""
if isinstance(result, str):
return result
if isinstance(result, (bytes, bytearray)):
return bytes(result).decode("utf-8", errors="replace")
stdout = getattr(result, "stdout", None)
if isinstance(stdout, str):
return stdout
if isinstance(stdout, (bytes, bytearray)):
return bytes(stdout).decode("utf-8", errors="replace")
return str(result)

View File

@@ -0,0 +1,69 @@
from __future__ import annotations
import re
from typing import TYPE_CHECKING, List, Tuple
from .runner import CommandRunner
if TYPE_CHECKING:
from pkgmgr.actions.install.context import RepoContext
class NixProfileListReader:
def __init__(self, runner: CommandRunner) -> None:
self._runner = runner
@staticmethod
def _store_prefix(path: str) -> str:
raw = (path or "").strip()
m = re.match(r"^(/nix/store/[0-9a-z]{32}-[^/ \t]+)", raw)
return m.group(1) if m else raw
def entries(self, ctx: "RepoContext") -> List[Tuple[int, str]]:
res = self._runner.run(ctx, "nix profile list", allow_failure=True)
if res.returncode != 0:
return []
entries: List[Tuple[int, str]] = []
pat = re.compile(
r"^\s*(\d+)\s+.*?(/nix/store/[0-9a-z]{32}-[^/ \t]+)",
re.MULTILINE,
)
for m in pat.finditer(res.stdout or ""):
idx_s = m.group(1)
sp = m.group(2)
try:
idx = int(idx_s)
except Exception:
continue
entries.append((idx, self._store_prefix(sp)))
seen: set[int] = set()
uniq: List[Tuple[int, str]] = []
for idx, sp in entries:
if idx not in seen:
seen.add(idx)
uniq.append((idx, sp))
return uniq
def indices_matching_store_prefixes(self, ctx: "RepoContext", prefixes: List[str]) -> List[int]:
prefixes = [self._store_prefix(p) for p in prefixes if p]
prefixes = [p for p in prefixes if p]
if not prefixes:
return []
hits: List[int] = []
for idx, sp in self.entries(ctx):
if any(sp == p for p in prefixes):
hits.append(idx)
seen: set[int] = set()
uniq: List[int] = []
for i in hits:
if i not in seen:
seen.add(i)
uniq.append(i)
return uniq

View File

@@ -0,0 +1,87 @@
from __future__ import annotations
import random
import time
from dataclasses import dataclass
from typing import Iterable, TYPE_CHECKING
from .types import RunResult
if TYPE_CHECKING:
from pkgmgr.actions.install.context import RepoContext
from .runner import CommandRunner
@dataclass(frozen=True)
class RetryPolicy:
max_attempts: int = 7
base_delay_seconds: int = 30
jitter_seconds_min: int = 0
jitter_seconds_max: int = 60
class GitHubRateLimitRetry:
"""
Retries nix install commands only when the error looks like a GitHub API rate limit (HTTP 403).
Backoff: Fibonacci(base, base, ...) + random jitter.
"""
def __init__(self, policy: RetryPolicy | None = None) -> None:
self._policy = policy or RetryPolicy()
def run_with_retry(
self,
ctx: "RepoContext",
runner: "CommandRunner",
install_cmd: str,
) -> RunResult:
quiet = bool(getattr(ctx, "quiet", False))
delays = list(self._fibonacci_backoff(self._policy.base_delay_seconds, self._policy.max_attempts))
last: RunResult | None = None
for attempt, base_delay in enumerate(delays, start=1):
if not quiet:
print(f"[nix] attempt {attempt}/{self._policy.max_attempts}: {install_cmd}")
res = runner.run(ctx, install_cmd, allow_failure=True)
last = res
if res.returncode == 0:
return res
combined = f"{res.stdout}\n{res.stderr}"
if not self._is_github_rate_limit_error(combined):
return res
if attempt >= self._policy.max_attempts:
break
jitter = random.randint(self._policy.jitter_seconds_min, self._policy.jitter_seconds_max)
wait_time = base_delay + jitter
if not quiet:
print(
"[nix] GitHub rate limit detected (403). "
f"Retrying in {wait_time}s (base={base_delay}s, jitter={jitter}s)..."
)
time.sleep(wait_time)
return last if last is not None else RunResult(returncode=1, stdout="", stderr="nix install retry failed")
@staticmethod
def _is_github_rate_limit_error(text: str) -> bool:
t = (text or "").lower()
return (
"http error 403" in t
or "rate limit exceeded" in t
or "github api rate limit" in t
or "api rate limit exceeded" in t
)
@staticmethod
def _fibonacci_backoff(base: int, attempts: int) -> Iterable[int]:
a, b = base, base
for _ in range(max(1, attempts)):
yield a
a, b = b, a + b

View File

@@ -0,0 +1,64 @@
from __future__ import annotations
import subprocess
from typing import TYPE_CHECKING
from .types import RunResult
if TYPE_CHECKING:
from pkgmgr.actions.install.context import RepoContext
class CommandRunner:
"""
Executes commands (shell=True) inside a repository directory (if provided).
Supports preview mode and compact failure output logging.
"""
def run(self, ctx: "RepoContext", cmd: str, allow_failure: bool) -> RunResult:
repo_dir = getattr(ctx, "repo_dir", None) or getattr(ctx, "repo_path", None)
preview = bool(getattr(ctx, "preview", False))
quiet = bool(getattr(ctx, "quiet", False))
if preview:
if not quiet:
print(f"[preview] {cmd}")
return RunResult(returncode=0, stdout="", stderr="")
try:
p = subprocess.run(
cmd,
shell=True,
cwd=repo_dir,
check=False,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
)
except Exception as e:
if not allow_failure:
raise
return RunResult(returncode=1, stdout="", stderr=str(e))
res = RunResult(returncode=p.returncode, stdout=p.stdout or "", stderr=p.stderr or "")
if res.returncode != 0 and not quiet:
self._print_compact_failure(res)
if res.returncode != 0 and not allow_failure:
raise SystemExit(res.returncode)
return res
@staticmethod
def _print_compact_failure(res: RunResult) -> None:
out = (res.stdout or "").strip()
err = (res.stderr or "").strip()
if out:
print("[nix] stdout (last lines):")
print("\n".join(out.splitlines()[-20:]))
if err:
print("[nix] stderr (last lines):")
print("\n".join(err.splitlines()[-40:]))

View File

@@ -0,0 +1,76 @@
from __future__ import annotations
import re
from typing import List
class NixConflictTextParser:
@staticmethod
def _store_prefix(path: str) -> str:
raw = (path or "").strip()
m = re.match(r"^(/nix/store/[0-9a-z]{32}-[^/ \t]+)", raw)
return m.group(1) if m else raw
def remove_tokens(self, text: str) -> List[str]:
pat = re.compile(
r"^\s*nix profile remove\s+([^\s'\"`]+|'[^']+'|\"[^\"]+\")\s*$",
re.MULTILINE,
)
tokens: List[str] = []
for m in pat.finditer(text or ""):
t = (m.group(1) or "").strip()
if (t.startswith("'") and t.endswith("'")) or (t.startswith('"') and t.endswith('"')):
t = t[1:-1]
if t:
tokens.append(t)
seen: set[str] = set()
uniq: List[str] = []
for t in tokens:
if t not in seen:
seen.add(t)
uniq.append(t)
return uniq
def existing_store_prefixes(self, text: str) -> List[str]:
lines = (text or "").splitlines()
prefixes: List[str] = []
in_existing = False
in_new = False
store_pat = re.compile(r"^\s*(/nix/store/[0-9a-z]{32}-[^ \t]+)")
for raw in lines:
line = raw.strip()
if "An existing package already provides the following file" in line:
in_existing = True
in_new = False
continue
if "This is the conflicting file from the new package" in line:
in_existing = False
in_new = True
continue
if in_existing:
m = store_pat.match(raw)
if m:
prefixes.append(m.group(1))
continue
_ = in_new
norm = [self._store_prefix(p) for p in prefixes if p]
seen: set[str] = set()
uniq: List[str] = []
for p in norm:
if p and p not in seen:
seen.add(p)
uniq.append(p)
return uniq

View File

@@ -0,0 +1,10 @@
from __future__ import annotations
from dataclasses import dataclass
@dataclass(frozen=True)
class RunResult:
returncode: int
stdout: str
stderr: str

View File

@@ -1,238 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from __future__ import annotations
import json
import os
import shutil
import subprocess
from typing import TYPE_CHECKING, List, Tuple
from pkgmgr.actions.install.installers.base import BaseInstaller
from pkgmgr.core.command.run import run_command
if TYPE_CHECKING:
from pkgmgr.actions.install.context import RepoContext
class NixFlakeInstaller(BaseInstaller):
layer = "nix"
FLAKE_FILE = "flake.nix"
def supports(self, ctx: "RepoContext") -> bool:
if os.environ.get("PKGMGR_DISABLE_NIX_FLAKE_INSTALLER") == "1":
if not ctx.quiet:
print("[INFO] PKGMGR_DISABLE_NIX_FLAKE_INSTALLER=1 skipping NixFlakeInstaller.")
return False
if shutil.which("nix") is None:
return False
return os.path.exists(os.path.join(ctx.repo_dir, self.FLAKE_FILE))
def _profile_outputs(self, ctx: "RepoContext") -> List[Tuple[str, bool]]:
# (output_name, allow_failure)
if ctx.identifier in {"pkgmgr", "package-manager"}:
return [("pkgmgr", False), ("default", True)]
return [("default", False)]
def _installable(self, ctx: "RepoContext", output: str) -> str:
return f"{ctx.repo_dir}#{output}"
def _run(self, ctx: "RepoContext", cmd: str, allow_failure: bool = True):
return run_command(
cmd,
cwd=ctx.repo_dir,
preview=ctx.preview,
allow_failure=allow_failure,
)
def _profile_list_json(self, ctx: "RepoContext") -> dict:
"""
Read current Nix profile entries as JSON (best-effort).
NOTE: Nix versions differ:
- Newer: {"elements": [ { "index": 0, "attrPath": "...", ... }, ... ]}
- Older: {"elements": [ "nixpkgs#hello", ... ]} (strings)
We return {} on failure or in preview mode.
"""
if ctx.preview:
return {}
proc = subprocess.run(
["nix", "profile", "list", "--json"],
check=False,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
text=True,
env=os.environ.copy(),
)
if proc.returncode != 0:
return {}
try:
return json.loads(proc.stdout or "{}")
except json.JSONDecodeError:
return {}
def _find_installed_indices_for_output(self, ctx: "RepoContext", output: str) -> List[int]:
"""
Find installed profile indices for a given output.
Works across Nix JSON variants:
- If elements are dicts: we can extract indices.
- If elements are strings: we cannot extract indices -> return [].
"""
data = self._profile_list_json(ctx)
elements = data.get("elements", []) or []
matches: List[int] = []
for el in elements:
# Legacy JSON format: plain strings -> no index information
if not isinstance(el, dict):
continue
idx = el.get("index")
if idx is None:
continue
attr_path = el.get("attrPath") or el.get("attr_path") or ""
pname = el.get("pname") or ""
name = el.get("name") or ""
if attr_path == output:
matches.append(int(idx))
continue
if pname == output or name == output:
matches.append(int(idx))
continue
if isinstance(attr_path, str) and attr_path.endswith(f".{output}"):
matches.append(int(idx))
continue
return matches
def _upgrade_index(self, ctx: "RepoContext", index: int) -> bool:
cmd = f"nix profile upgrade --refresh {index}"
if not ctx.quiet:
print(f"[nix] upgrade: {cmd}")
res = self._run(ctx, cmd, allow_failure=True)
return res.returncode == 0
def _remove_index(self, ctx: "RepoContext", index: int) -> None:
cmd = f"nix profile remove {index}"
if not ctx.quiet:
print(f"[nix] remove: {cmd}")
self._run(ctx, cmd, allow_failure=True)
def _install_only(self, ctx: "RepoContext", output: str, allow_failure: bool) -> None:
"""
Install output; on failure, try index-based upgrade/remove+install if possible.
"""
installable = self._installable(ctx, output)
install_cmd = f"nix profile install {installable}"
if not ctx.quiet:
print(f"[nix] install: {install_cmd}")
res = self._run(ctx, install_cmd, allow_failure=True)
if res.returncode == 0:
if not ctx.quiet:
print(f"[nix] output '{output}' successfully installed.")
return
if not ctx.quiet:
print(
f"[nix] install failed for '{output}' (exit {res.returncode}), "
"trying index-based upgrade/remove+install..."
)
indices = self._find_installed_indices_for_output(ctx, output)
# 1) Try upgrading existing indices (only possible on newer JSON format)
upgraded = False
for idx in indices:
if self._upgrade_index(ctx, idx):
upgraded = True
if not ctx.quiet:
print(f"[nix] output '{output}' successfully upgraded (index {idx}).")
if upgraded:
return
# 2) Remove matching indices and retry install
if indices and not ctx.quiet:
print(f"[nix] upgrade failed; removing indices {indices} and reinstalling '{output}'.")
for idx in indices:
self._remove_index(ctx, idx)
final = self._run(ctx, install_cmd, allow_failure=True)
if final.returncode == 0:
if not ctx.quiet:
print(f"[nix] output '{output}' successfully re-installed.")
return
msg = f"[ERROR] Failed to install Nix flake output '{output}' (exit {final.returncode})"
print(msg)
if not allow_failure:
raise SystemExit(final.returncode)
print(f"[WARNING] Continuing despite failure of optional output '{output}'.")
def _force_upgrade_output(self, ctx: "RepoContext", output: str, allow_failure: bool) -> None:
"""
force_update path:
- Prefer upgrading existing entries via indices (if we can discover them).
- If no indices (legacy JSON) or upgrade fails, fall back to install-only logic.
"""
indices = self._find_installed_indices_for_output(ctx, output)
upgraded_any = False
for idx in indices:
if self._upgrade_index(ctx, idx):
upgraded_any = True
if not ctx.quiet:
print(f"[nix] output '{output}' successfully upgraded (index {idx}).")
if upgraded_any:
# Make upgrades visible to tests
print(f"[nix] output '{output}' successfully upgraded.")
return
if indices and not ctx.quiet:
print(f"[nix] upgrade failed; removing indices {indices} and reinstalling '{output}'.")
for idx in indices:
self._remove_index(ctx, idx)
# Ensure installed (includes its own fallback logic)
self._install_only(ctx, output, allow_failure)
# Make upgrades visible to tests (semantic: update requested)
print(f"[nix] output '{output}' successfully upgraded.")
def run(self, ctx: "RepoContext") -> None:
if not self.supports(ctx):
return
outputs = self._profile_outputs(ctx)
if not ctx.quiet:
print(
"[nix] flake detected in "
f"{ctx.identifier}, ensuring outputs: "
+ ", ".join(name for name, _ in outputs)
)
for output, allow_failure in outputs:
if ctx.force_update:
self._force_upgrade_output(ctx, output, allow_failure)
else:
self._install_only(ctx, output, allow_failure)

View File

@@ -0,0 +1,21 @@
# src/pkgmgr/actions/mirror/remote_check.py
from __future__ import annotations
from typing import Tuple
from pkgmgr.core.git import GitError, run_git
def probe_mirror(url: str, repo_dir: str) -> Tuple[bool, str]:
"""
Probe a remote mirror URL using `git ls-remote`.
Returns:
(True, "") on success,
(False, error_message) on failure.
"""
try:
run_git(["ls-remote", url], cwd=repo_dir)
return True, ""
except GitError as exc:
return False, str(exc)

View File

@@ -0,0 +1,70 @@
# src/pkgmgr/actions/mirror/remote_provision.py
from __future__ import annotations
from typing import List
from pkgmgr.core.remote_provisioning import ProviderHint, RepoSpec, ensure_remote_repo
from pkgmgr.core.remote_provisioning.ensure import EnsureOptions
from .context import build_context
from .git_remote import determine_primary_remote_url
from .types import Repository
from .url_utils import normalize_provider_host, parse_repo_from_git_url
def ensure_remote_repository(
repo: Repository,
repositories_base_dir: str,
all_repos: List[Repository],
preview: bool,
) -> None:
ctx = build_context(repo, repositories_base_dir, all_repos)
resolved_mirrors = ctx.resolved_mirrors
primary_url = determine_primary_remote_url(repo, resolved_mirrors)
if not primary_url:
print("[INFO] No remote URL could be derived; skipping remote provisioning.")
return
host_raw, owner_from_url, name_from_url = parse_repo_from_git_url(primary_url)
host = normalize_provider_host(host_raw)
if not host or not owner_from_url or not name_from_url:
print("[WARN] Could not derive host/owner/repository from URL; cannot ensure remote repo.")
print(f" url={primary_url!r}")
print(f" host={host!r}, owner={owner_from_url!r}, repository={name_from_url!r}")
return
print("------------------------------------------------------------")
print(f"[REMOTE ENSURE] {ctx.identifier}")
print(f"[REMOTE ENSURE] host: {host}")
print("------------------------------------------------------------")
spec = RepoSpec(
host=str(host),
owner=str(owner_from_url),
name=str(name_from_url),
private=bool(repo.get("private", True)),
description=str(repo.get("description", "")),
)
provider_kind = str(repo.get("provider", "")).strip().lower() or None
try:
result = ensure_remote_repo(
spec,
provider_hint=ProviderHint(kind=provider_kind),
options=EnsureOptions(
preview=preview,
interactive=True,
allow_prompt=True,
save_prompt_token_to_keyring=True,
),
)
print(f"[REMOTE ENSURE] {result.status.upper()}: {result.message}")
if result.url:
print(f"[REMOTE ENSURE] URL: {result.url}")
except Exception as exc: # noqa: BLE001
print(f"[ERROR] Remote provisioning failed: {exc}")
print()

View File

@@ -1,23 +1,20 @@
# src/pkgmgr/actions/mirror/setup_cmd.py
from __future__ import annotations
from typing import List, Tuple
from pkgmgr.core.git import run_git, GitError
from typing import List
from .context import build_context
from .git_remote import determine_primary_remote_url, ensure_origin_remote
from .git_remote import ensure_origin_remote, determine_primary_remote_url
from .remote_check import probe_mirror
from .remote_provision import ensure_remote_repository
from .types import Repository
def _setup_local_mirrors_for_repo(
repo: Repository,
repositories_base_dir: str,
all_repos: List[Repository],
preview: bool,
) -> None:
"""
Ensure local Git state is sane (currently: 'origin' remote).
"""
ctx = build_context(repo, repositories_base_dir, all_repos)
print("------------------------------------------------------------")
@@ -29,103 +26,57 @@ def _setup_local_mirrors_for_repo(
print()
def _probe_mirror(url: str, repo_dir: str) -> Tuple[bool, str]:
"""
Probe a remote mirror by running `git ls-remote <url>`.
Returns:
(True, "") on success,
(False, error_message) on failure.
Wichtig:
- Wir werten ausschließlich den Exit-Code aus.
- STDERR kann Hinweise/Warnings enthalten und ist NICHT automatisch ein Fehler.
"""
try:
# Wir ignorieren stdout komplett; wichtig ist nur, dass der Befehl ohne
# GitError (also Exit-Code 0) durchläuft.
run_git(["ls-remote", url], cwd=repo_dir)
return True, ""
except GitError as exc:
return False, str(exc)
def _setup_remote_mirrors_for_repo(
repo: Repository,
repositories_base_dir: str,
all_repos: List[Repository],
preview: bool,
ensure_remote: bool,
) -> None:
"""
Remote-side setup / validation.
Aktuell werden nur **nicht-destruktive Checks** gemacht:
- Für jeden Mirror (aus config + MIRRORS-Datei, file gewinnt):
* `git ls-remote <url>` wird ausgeführt.
* Bei Exit-Code 0 → [OK]
* Bei Fehler → [WARN] + Details aus der GitError-Exception
Es werden **keine** Provider-APIs aufgerufen und keine Repos angelegt.
"""
ctx = build_context(repo, repositories_base_dir, all_repos)
resolved_m = ctx.resolved_mirrors
resolved_mirrors = ctx.resolved_mirrors
print("------------------------------------------------------------")
print(f"[MIRROR SETUP:REMOTE] {ctx.identifier}")
print(f"[MIRROR SETUP:REMOTE] dir: {ctx.repo_dir}")
print("------------------------------------------------------------")
if not resolved_m:
# Optional: Fallback auf eine heuristisch bestimmte URL, falls wir
# irgendwann "automatisch anlegen" implementieren wollen.
primary_url = determine_primary_remote_url(repo, resolved_m)
if ensure_remote:
ensure_remote_repository(
repo,
repositories_base_dir=repositories_base_dir,
all_repos=all_repos,
preview=preview,
)
if not resolved_mirrors:
primary_url = determine_primary_remote_url(repo, resolved_mirrors)
if not primary_url:
print(
"[INFO] No mirrors configured (config or MIRRORS file), and no "
"primary URL could be derived from provider/account/repository."
)
print("[INFO] No mirrors configured and no primary URL available.")
print()
return
ok, error_message = _probe_mirror(primary_url, ctx.repo_dir)
ok, error_message = probe_mirror(primary_url, ctx.repo_dir)
if ok:
print(f"[OK] Remote mirror (primary) is reachable: {primary_url}")
print(f"[OK] primary: {primary_url}")
else:
print("[WARN] Primary remote URL is NOT reachable:")
print(f" {primary_url}")
if error_message:
print(" Details:")
for line in error_message.splitlines():
print(f" {line}")
print(f"[WARN] primary: {primary_url}")
for line in error_message.splitlines():
print(f" {line}")
print()
print(
"[INFO] Remote checks are non-destructive and only use `git ls-remote` "
"to probe mirror URLs."
)
print()
return
# Normaler Fall: wir haben benannte Mirrors aus config/MIRRORS
for name, url in sorted(resolved_m.items()):
ok, error_message = _probe_mirror(url, ctx.repo_dir)
for name, url in sorted(resolved_mirrors.items()):
ok, error_message = probe_mirror(url, ctx.repo_dir)
if ok:
print(f"[OK] Remote mirror '{name}' is reachable: {url}")
print(f"[OK] {name}: {url}")
else:
print(f"[WARN] Remote mirror '{name}' is NOT reachable:")
print(f" {url}")
if error_message:
print(" Details:")
for line in error_message.splitlines():
print(f" {line}")
print(f"[WARN] {name}: {url}")
for line in error_message.splitlines():
print(f" {line}")
print()
print(
"[INFO] Remote checks are non-destructive and only use `git ls-remote` "
"to probe mirror URLs."
)
print()
def setup_mirrors(
@@ -135,22 +86,12 @@ def setup_mirrors(
preview: bool = False,
local: bool = True,
remote: bool = True,
ensure_remote: bool = False,
) -> None:
"""
Setup mirrors for the selected repositories.
local:
- Configure local Git remotes (currently: ensure 'origin' is present and
points to a reasonable URL).
remote:
- Non-destructive remote checks using `git ls-remote` for each mirror URL.
Es werden keine Repositories auf dem Provider angelegt.
"""
for repo in selected_repos:
if local:
_setup_local_mirrors_for_repo(
repo,
repo=repo,
repositories_base_dir=repositories_base_dir,
all_repos=all_repos,
preview=preview,
@@ -158,8 +99,9 @@ def setup_mirrors(
if remote:
_setup_remote_mirrors_for_repo(
repo,
repo=repo,
repositories_base_dir=repositories_base_dir,
all_repos=all_repos,
preview=preview,
ensure_remote=ensure_remote,
)

View File

@@ -0,0 +1,111 @@
# src/pkgmgr/actions/mirror/url_utils.py
from __future__ import annotations
from urllib.parse import urlparse
from typing import Optional, Tuple
def hostport_from_git_url(url: str) -> Tuple[str, Optional[str]]:
url = (url or "").strip()
if not url:
return "", None
if "://" in url:
parsed = urlparse(url)
netloc = (parsed.netloc or "").strip()
if "@" in netloc:
netloc = netloc.split("@", 1)[1]
if netloc.startswith("[") and "]" in netloc:
host = netloc[1:netloc.index("]")]
rest = netloc[netloc.index("]") + 1 :]
port = rest[1:] if rest.startswith(":") else None
return host.strip(), (port.strip() if port else None)
if ":" in netloc:
host, port = netloc.rsplit(":", 1)
return host.strip(), (port.strip() or None)
return netloc.strip(), None
if "@" in url and ":" in url:
after_at = url.split("@", 1)[1]
host = after_at.split(":", 1)[0].strip()
return host, None
host = url.split("/", 1)[0].strip()
return host, None
def normalize_provider_host(host: str) -> str:
host = (host or "").strip()
if not host:
return ""
if host.startswith("[") and "]" in host:
host = host[1:host.index("]")]
if ":" in host and host.count(":") == 1:
host = host.rsplit(":", 1)[0]
return host.strip().lower()
def _strip_dot_git(name: str) -> str:
n = (name or "").strip()
if n.lower().endswith(".git"):
return n[:-4]
return n
def parse_repo_from_git_url(url: str) -> Tuple[str, Optional[str], Optional[str]]:
"""
Parse (host, owner, repo_name) from common Git remote URLs.
Supports:
- ssh://git@host:2201/owner/repo.git
- https://host/owner/repo.git
- git@host:owner/repo.git
- host/owner/repo(.git) (best-effort)
Returns:
(host, owner, repo_name) with owner/repo possibly None if not derivable.
"""
u = (url or "").strip()
if not u:
return "", None, None
# URL-style (ssh://, https://, http://)
if "://" in u:
parsed = urlparse(u)
host = (parsed.hostname or "").strip()
path = (parsed.path or "").strip("/")
parts = [p for p in path.split("/") if p]
if len(parts) >= 2:
owner = parts[0]
repo_name = _strip_dot_git(parts[1])
return host, owner, repo_name
return host, None, None
# SCP-like: git@host:owner/repo.git
if "@" in u and ":" in u:
after_at = u.split("@", 1)[1]
host = after_at.split(":", 1)[0].strip()
path = after_at.split(":", 1)[1].strip("/")
parts = [p for p in path.split("/") if p]
if len(parts) >= 2:
owner = parts[0]
repo_name = _strip_dot_git(parts[1])
return host, owner, repo_name
return host, None, None
# Fallback: host/owner/repo.git
host = u.split("/", 1)[0].strip()
rest = u.split("/", 1)[1] if "/" in u else ""
parts = [p for p in rest.strip("/").split("/") if p]
if len(parts) >= 2:
owner = parts[0]
repo_name = _strip_dot_git(parts[1])
return host, owner, repo_name
return host, None, None

View File

@@ -0,0 +1,5 @@
from __future__ import annotations
from .workflow import publish
__all__ = ["publish"]

View File

@@ -0,0 +1,17 @@
from __future__ import annotations
from pkgmgr.core.git import run_git
from pkgmgr.core.version.semver import SemVer, is_semver_tag
def head_semver_tags(cwd: str = ".") -> list[str]:
out = run_git(["tag", "--points-at", "HEAD"], cwd=cwd)
if not out:
return []
tags = [t.strip() for t in out.splitlines() if t.strip()]
tags = [t for t in tags if is_semver_tag(t) and t.startswith("v")]
if not tags:
return []
return sorted(tags, key=SemVer.parse)

View File

@@ -0,0 +1,24 @@
from __future__ import annotations
from urllib.parse import urlparse
from .types import PyPITarget
def parse_pypi_project_url(url: str) -> PyPITarget | None:
u = (url or "").strip()
if not u:
return None
parsed = urlparse(u)
host = (parsed.netloc or "").lower()
path = (parsed.path or "").strip("/")
if host not in ("pypi.org", "test.pypi.org"):
return None
parts = [p for p in path.split("/") if p]
if len(parts) >= 2 and parts[0] == "project":
return PyPITarget(host=host, project=parts[1])
return None

View File

@@ -0,0 +1,9 @@
from __future__ import annotations
from dataclasses import dataclass
@dataclass(frozen=True)
class PyPITarget:
host: str
project: str

View File

@@ -0,0 +1,109 @@
from __future__ import annotations
import glob
import os
import shutil
import subprocess
from pkgmgr.actions.mirror.io import read_mirrors_file
from pkgmgr.actions.mirror.types import Repository
from pkgmgr.core.credentials.resolver import ResolutionOptions, TokenResolver
from pkgmgr.core.version.semver import SemVer
from .git_tags import head_semver_tags
from .pypi_url import parse_pypi_project_url
def _require_tool(module: str) -> None:
try:
subprocess.run(
["python", "-m", module, "--help"],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
check=True,
)
except Exception as exc:
raise RuntimeError(
f"Required Python module '{module}' is not available. "
f"Install it via: pip install {module}"
) from exc
def publish(
repo: Repository,
repo_dir: str,
*,
preview: bool = False,
interactive: bool = True,
allow_prompt: bool = True,
) -> None:
mirrors = read_mirrors_file(repo_dir)
targets = []
for url in mirrors.values():
t = parse_pypi_project_url(url)
if t:
targets.append(t)
if not targets:
print("[INFO] No PyPI mirror found. Skipping publish.")
return
if len(targets) > 1:
raise RuntimeError("Multiple PyPI mirrors found; refusing to publish.")
tags = head_semver_tags(cwd=repo_dir)
if not tags:
print("[INFO] No version tag on HEAD. Skipping publish.")
return
tag = max(tags, key=SemVer.parse)
target = targets[0]
print(f"[INFO] Publishing {target.project} for tag {tag}")
if preview:
print("[PREVIEW] Would build and upload to PyPI.")
return
_require_tool("build")
_require_tool("twine")
dist_dir = os.path.join(repo_dir, "dist")
if os.path.isdir(dist_dir):
shutil.rmtree(dist_dir, ignore_errors=True)
subprocess.run(
["python", "-m", "build"],
cwd=repo_dir,
check=True,
)
artifacts = sorted(glob.glob(os.path.join(dist_dir, "*")))
if not artifacts:
raise RuntimeError("No build artifacts found in dist/.")
resolver = TokenResolver()
token = resolver.get_token(
provider_kind="pypi",
host=target.host,
owner=target.project,
options=ResolutionOptions(
interactive=interactive,
allow_prompt=allow_prompt,
save_prompt_token_to_keyring=True,
),
).token
env = dict(os.environ)
env["TWINE_USERNAME"] = "__token__"
env["TWINE_PASSWORD"] = token
subprocess.run(
["python", "-m", "twine", "upload", *artifacts],
cwd=repo_dir,
env=env,
check=True,
)
print("[INFO] Publish completed.")

View File

@@ -1,10 +1,13 @@
# src/pkgmgr/actions/release/workflow.py
from __future__ import annotations
from typing import Optional
import os
import sys
from typing import Optional
from pkgmgr.actions.branch import close_branch
from pkgmgr.core.git import get_current_branch, GitError
from pkgmgr.core.repository.paths import resolve_repo_paths
from .files import (
update_changelog,
@@ -55,8 +58,12 @@ def _release_impl(
print(f"New version: {new_ver_str} ({release_type})")
repo_root = os.path.dirname(os.path.abspath(pyproject_path))
paths = resolve_repo_paths(repo_root)
# --- Update versioned files ------------------------------------------------
update_pyproject_version(pyproject_path, new_ver_str, preview=preview)
changelog_message = update_changelog(
changelog_path,
new_ver_str,
@@ -64,38 +71,46 @@ def _release_impl(
preview=preview,
)
flake_path = os.path.join(repo_root, "flake.nix")
update_flake_version(flake_path, new_ver_str, preview=preview)
update_flake_version(paths.flake_nix, new_ver_str, preview=preview)
pkgbuild_path = os.path.join(repo_root, "PKGBUILD")
update_pkgbuild_version(pkgbuild_path, new_ver_str, preview=preview)
if paths.arch_pkgbuild:
update_pkgbuild_version(paths.arch_pkgbuild, new_ver_str, preview=preview)
else:
print("[INFO] No PKGBUILD found (packaging/arch/PKGBUILD or PKGBUILD). Skipping.")
spec_path = os.path.join(repo_root, "package-manager.spec")
update_spec_version(spec_path, new_ver_str, preview=preview)
if paths.rpm_spec:
update_spec_version(paths.rpm_spec, new_ver_str, preview=preview)
else:
print("[INFO] No RPM spec file found. Skipping spec version update.")
effective_message: Optional[str] = message
if effective_message is None and isinstance(changelog_message, str):
if changelog_message.strip():
effective_message = changelog_message.strip()
debian_changelog_path = os.path.join(repo_root, "debian", "changelog")
package_name = os.path.basename(repo_root) or "package-manager"
update_debian_changelog(
debian_changelog_path,
package_name=package_name,
new_version=new_ver_str,
message=effective_message,
preview=preview,
)
if paths.debian_changelog:
update_debian_changelog(
paths.debian_changelog,
package_name=package_name,
new_version=new_ver_str,
message=effective_message,
preview=preview,
)
else:
print("[INFO] No debian changelog found. Skipping debian/changelog update.")
update_spec_changelog(
spec_path=spec_path,
package_name=package_name,
new_version=new_ver_str,
message=effective_message,
preview=preview,
)
if paths.rpm_spec:
update_spec_changelog(
spec_path=paths.rpm_spec,
package_name=package_name,
new_version=new_ver_str,
message=effective_message,
preview=preview,
)
# --- Git commit / tag / push ----------------------------------------------
commit_msg = f"Release version {new_ver_str}"
tag_msg = effective_message or commit_msg
@@ -103,12 +118,12 @@ def _release_impl(
files_to_add = [
pyproject_path,
changelog_path,
flake_path,
pkgbuild_path,
spec_path,
debian_changelog_path,
paths.flake_nix,
paths.arch_pkgbuild,
paths.rpm_spec,
paths.debian_changelog,
]
existing_files = [p for p in files_to_add if p and os.path.exists(p)]
existing_files = [p for p in files_to_add if isinstance(p, str) and p and os.path.exists(p)]
if preview:
for path in existing_files:

View File

@@ -1,58 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import shutil
from pkgmgr.actions.install import install_repos
from pkgmgr.actions.repository.pull import pull_with_verification
def update_repos(
selected_repos,
repositories_base_dir,
bin_dir,
all_repos,
no_verification,
system_update,
preview: bool,
quiet: bool,
update_dependencies: bool,
clone_mode: str,
force_update: bool = True,
) -> None:
"""
Update repositories by pulling latest changes and installing them.
"""
pull_with_verification(
selected_repos,
repositories_base_dir,
all_repos,
[],
no_verification,
preview,
)
install_repos(
selected_repos,
repositories_base_dir,
bin_dir,
all_repos,
no_verification,
preview,
quiet,
clone_mode,
update_dependencies,
force_update=force_update,
)
if system_update:
from pkgmgr.core.command.run import run_command
if shutil.which("nix") is not None:
try:
run_command("nix profile upgrade '.*'", preview=preview)
except SystemExit as e:
print(f"[Warning] 'nix profile upgrade' failed: {e}")
run_command("sudo -u aur_builder yay -Syu --noconfirm", preview=preview)
run_command("sudo pacman -Syyu --noconfirm", preview=preview)

View File

@@ -0,0 +1,10 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from __future__ import annotations
from pkgmgr.actions.update.manager import UpdateManager
__all__ = [
"UpdateManager",
]

View File

@@ -0,0 +1,61 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from __future__ import annotations
from typing import Any, Iterable
from pkgmgr.actions.update.system_updater import SystemUpdater
class UpdateManager:
"""
Orchestrates:
- repository pull + installation
- optional system update
"""
def __init__(self) -> None:
self._system_updater = SystemUpdater()
def run(
self,
selected_repos: Iterable[Any],
repositories_base_dir: str,
bin_dir: str,
all_repos: Any,
no_verification: bool,
system_update: bool,
preview: bool,
quiet: bool,
update_dependencies: bool,
clone_mode: str,
force_update: bool = True,
) -> None:
from pkgmgr.actions.install import install_repos
from pkgmgr.actions.repository.pull import pull_with_verification
pull_with_verification(
selected_repos,
repositories_base_dir,
all_repos,
[],
no_verification,
preview,
)
install_repos(
selected_repos,
repositories_base_dir,
bin_dir,
all_repos,
no_verification,
preview,
quiet,
clone_mode,
update_dependencies,
force_update=force_update,
)
if system_update:
self._system_updater.run(preview=preview)

View File

@@ -0,0 +1,66 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from __future__ import annotations
import os
from dataclasses import dataclass
from typing import Dict
def read_os_release(path: str = "/etc/os-release") -> Dict[str, str]:
"""
Parse /etc/os-release into a dict. Returns empty dict if missing.
"""
if not os.path.exists(path):
return {}
result: Dict[str, str] = {}
with open(path, "r", encoding="utf-8") as f:
for line in f:
line = line.strip()
if not line or line.startswith("#") or "=" not in line:
continue
key, value = line.split("=", 1)
result[key.strip()] = value.strip().strip('"')
return result
@dataclass(frozen=True)
class OSReleaseInfo:
"""
Minimal /etc/os-release representation for distro detection.
"""
id: str = ""
id_like: str = ""
pretty_name: str = ""
@staticmethod
def load() -> "OSReleaseInfo":
data = read_os_release()
return OSReleaseInfo(
id=(data.get("ID") or "").lower(),
id_like=(data.get("ID_LIKE") or "").lower(),
pretty_name=(data.get("PRETTY_NAME") or ""),
)
def ids(self) -> set[str]:
ids: set[str] = set()
if self.id:
ids.add(self.id)
if self.id_like:
for part in self.id_like.split():
ids.add(part.strip())
return ids
def is_arch_family(self) -> bool:
ids = self.ids()
return ("arch" in ids) or ("archlinux" in ids)
def is_debian_family(self) -> bool:
ids = self.ids()
return bool(ids.intersection({"debian", "ubuntu"}))
def is_fedora_family(self) -> bool:
ids = self.ids()
return bool(ids.intersection({"fedora", "rhel", "centos", "rocky", "almalinux"}))

View File

@@ -0,0 +1,96 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from __future__ import annotations
import platform
import shutil
from pkgmgr.actions.update.os_release import OSReleaseInfo
class SystemUpdater:
"""
Executes distro-specific system update commands, plus Nix profile upgrades if available.
"""
def run(self, *, preview: bool) -> None:
from pkgmgr.core.command.run import run_command
# Distro-agnostic: Nix profile upgrades (if Nix is present).
if shutil.which("nix") is not None:
try:
run_command("nix profile upgrade '.*'", preview=preview)
except SystemExit as e:
print(f"[Warning] 'nix profile upgrade' failed: {e}")
osr = OSReleaseInfo.load()
if osr.is_arch_family():
self._update_arch(preview=preview)
return
if osr.is_debian_family():
self._update_debian(preview=preview)
return
if osr.is_fedora_family():
self._update_fedora(preview=preview)
return
distro = osr.pretty_name or platform.platform()
print(f"[Warning] Unsupported distribution for system update: {distro}")
def _update_arch(self, *, preview: bool) -> None:
from pkgmgr.core.command.run import run_command
yay = shutil.which("yay")
pacman = shutil.which("pacman")
sudo = shutil.which("sudo")
# Prefer yay if available (repo + AUR in one pass).
# Avoid running yay and pacman afterwards to prevent double update passes.
if yay and sudo:
run_command("sudo -u aur_builder yay -Syu --noconfirm", preview=preview)
return
if pacman and sudo:
run_command("sudo pacman -Syu --noconfirm", preview=preview)
return
print("[Warning] Cannot update Arch system: missing required tools (sudo/yay/pacman).")
def _update_debian(self, *, preview: bool) -> None:
from pkgmgr.core.command.run import run_command
sudo = shutil.which("sudo")
apt_get = shutil.which("apt-get")
if not (sudo and apt_get):
print("[Warning] Cannot update Debian/Ubuntu system: missing required tools (sudo/apt-get).")
return
env = "DEBIAN_FRONTEND=noninteractive"
run_command(f"sudo {env} apt-get update -y", preview=preview)
run_command(f"sudo {env} apt-get -y dist-upgrade", preview=preview)
def _update_fedora(self, *, preview: bool) -> None:
from pkgmgr.core.command.run import run_command
sudo = shutil.which("sudo")
dnf = shutil.which("dnf")
microdnf = shutil.which("microdnf")
if not sudo:
print("[Warning] Cannot update Fedora/RHEL-like system: missing sudo.")
return
if dnf:
run_command("sudo dnf -y upgrade", preview=preview)
return
if microdnf:
run_command("sudo microdnf -y upgrade", preview=preview)
return
print("[Warning] Cannot update Fedora/RHEL-like system: missing dnf/microdnf.")

View File

@@ -2,6 +2,7 @@ from .repos import handle_repos_command
from .config import handle_config
from .tools import handle_tools_command
from .release import handle_release
from .publish import handle_publish
from .version import handle_version
from .make import handle_make
from .changelog import handle_changelog
@@ -13,6 +14,7 @@ __all__ = [
"handle_config",
"handle_tools_command",
"handle_release",
"handle_publish",
"handle_version",
"handle_make",
"handle_changelog",

View File

@@ -1,32 +1,30 @@
# src/pkgmgr/cli/commands/mirror.py
from __future__ import annotations
import sys
from typing import Any, Dict, List
from pkgmgr.actions.mirror import (
diff_mirrors,
list_mirrors,
merge_mirrors,
setup_mirrors,
)
from pkgmgr.actions.mirror import diff_mirrors, list_mirrors, merge_mirrors, setup_mirrors
from pkgmgr.cli.context import CLIContext
Repository = Dict[str, Any]
def handle_mirror_command(
args,
ctx: CLIContext,
args: Any,
selected: List[Repository],
) -> None:
"""
Entry point for 'pkgmgr mirror' subcommands.
Subcommands:
- mirror list → list configured mirrors
- mirror diff → compare config vs MIRRORS file
- mirror merge → merge mirrors between config and MIRRORS file
- mirror setup → configure local Git + remote placeholders
- mirror list
- mirror diff
- mirror merge
- mirror setup
- mirror check
- mirror provision
"""
if not selected:
print("[INFO] No repositories selected for 'mirror' command.")
@@ -34,9 +32,6 @@ def handle_mirror_command(
subcommand = getattr(args, "subcommand", None)
# ------------------------------------------------------------
# mirror list
# ------------------------------------------------------------
if subcommand == "list":
source = getattr(args, "source", "all")
list_mirrors(
@@ -47,9 +42,6 @@ def handle_mirror_command(
)
return
# ------------------------------------------------------------
# mirror diff
# ------------------------------------------------------------
if subcommand == "diff":
diff_mirrors(
selected_repos=selected,
@@ -58,27 +50,17 @@ def handle_mirror_command(
)
return
# ------------------------------------------------------------
# mirror merge
# ------------------------------------------------------------
if subcommand == "merge":
source = getattr(args, "source", None)
target = getattr(args, "target", None)
preview = getattr(args, "preview", False)
if source == target:
print(
"[ERROR] For 'mirror merge', source and target "
"must differ (one of: config, file)."
)
print("[ERROR] For 'mirror merge', source and target must differ (config vs file).")
sys.exit(2)
# Config file path can be passed explicitly via --config-path.
# If not given, fall back to the global context (if available).
explicit_config_path = getattr(args, "config_path", None)
user_config_path = explicit_config_path or getattr(
ctx, "user_config_path", None
)
user_config_path = explicit_config_path or getattr(ctx, "user_config_path", None)
merge_mirrors(
selected_repos=selected,
@@ -91,26 +73,42 @@ def handle_mirror_command(
)
return
# ------------------------------------------------------------
# mirror setup
# ------------------------------------------------------------
if subcommand == "setup":
local = getattr(args, "local", False)
remote = getattr(args, "remote", False)
preview = getattr(args, "preview", False)
# If neither flag is set → default to both.
if not local and not remote:
local = True
remote = True
setup_mirrors(
selected_repos=selected,
repositories_base_dir=ctx.repositories_base_dir,
all_repos=ctx.all_repositories,
preview=preview,
local=local,
remote=remote,
local=True,
remote=False,
ensure_remote=False,
)
return
if subcommand == "check":
preview = getattr(args, "preview", False)
setup_mirrors(
selected_repos=selected,
repositories_base_dir=ctx.repositories_base_dir,
all_repos=ctx.all_repositories,
preview=preview,
local=False,
remote=True,
ensure_remote=False,
)
return
if subcommand == "provision":
preview = getattr(args, "preview", False)
setup_mirrors(
selected_repos=selected,
repositories_base_dir=ctx.repositories_base_dir,
all_repos=ctx.all_repositories,
preview=preview,
local=False,
remote=True,
ensure_remote=True,
)
return

View File

@@ -0,0 +1,34 @@
from __future__ import annotations
import os
from typing import Any, Dict, List
from pkgmgr.actions.publish import publish
from pkgmgr.cli.context import CLIContext
from pkgmgr.core.repository.dir import get_repo_dir
from pkgmgr.core.repository.identifier import get_repo_identifier
Repository = Dict[str, Any]
def handle_publish(args, ctx: CLIContext, selected: List[Repository]) -> None:
if not selected:
print("[pkgmgr] No repositories selected for publish.")
return
for repo in selected:
identifier = get_repo_identifier(repo, ctx.all_repositories)
repo_dir = repo.get("directory") or get_repo_dir(ctx.repositories_base_dir, repo)
if not os.path.isdir(repo_dir):
print(f"[WARN] Skipping {identifier}: directory missing.")
continue
print(f"[pkgmgr] Publishing repository {identifier}...")
publish(
repo=repo,
repo_dir=repo_dir,
preview=getattr(args, "preview", False),
interactive=not getattr(args, "non_interactive", False),
allow_prompt=not getattr(args, "non_interactive", False),
)

View File

@@ -8,7 +8,6 @@ from typing import Any, Dict, List
from pkgmgr.cli.context import CLIContext
from pkgmgr.actions.install import install_repos
from pkgmgr.actions.repository.update import update_repos
from pkgmgr.actions.repository.deinstall import deinstall_repos
from pkgmgr.actions.repository.delete import delete_repos
from pkgmgr.actions.repository.status import status_repos
@@ -72,25 +71,6 @@ def handle_repos_command(
)
return
# ------------------------------------------------------------
# update
# ------------------------------------------------------------
if args.command == "update":
update_repos(
selected,
ctx.repositories_base_dir,
ctx.binaries_dir,
ctx.all_repositories,
args.no_verification,
args.system_update,
args.preview,
args.quiet,
args.dependencies,
args.clone_mode,
force_update=True,
)
return
# ------------------------------------------------------------
# deinstall
# ------------------------------------------------------------

View File

@@ -9,8 +9,13 @@ from pkgmgr.core.repository.dir import get_repo_dir
from pkgmgr.core.repository.identifier import get_repo_identifier
from pkgmgr.core.git import get_tags
from pkgmgr.core.version.semver import SemVer, find_latest_version
from pkgmgr.core.version.installed import (
get_installed_python_version,
get_installed_nix_profile_version,
)
from pkgmgr.core.version.source import (
read_pyproject_version,
read_pyproject_project_name,
read_flake_version,
read_pkgbuild_version,
read_debian_changelog_version,
@@ -18,10 +23,54 @@ from pkgmgr.core.version.source import (
read_ansible_galaxy_version,
)
Repository = Dict[str, Any]
def _print_pkgmgr_self_version() -> None:
"""
Print version information for pkgmgr itself (installed env + nix profile),
used when no repository is selected (e.g. user is not inside a repo).
"""
print("pkgmgr version info")
print("====================")
print("\nRepository: <pkgmgr self>")
print("----------------------------------------")
# Common distribution/module naming variants.
python_candidates = [
"package-manager", # PyPI dist name in your project
"package_manager", # module-ish variant
"pkgmgr", # console/alias-ish
]
nix_candidates = [
"pkgmgr",
"package-manager",
]
installed_python = get_installed_python_version(*python_candidates)
installed_nix = get_installed_nix_profile_version(*nix_candidates)
if installed_python:
print(
f"Installed (Python env): {installed_python.version} "
f"(dist: {installed_python.name})"
)
else:
print("Installed (Python env): <not installed>")
if installed_nix:
print(
f"Installed (Nix profile): {installed_nix.version} "
f"(match: {installed_nix.name})"
)
else:
print("Installed (Nix profile): <not installed>")
# Helpful context for debugging "why do versions differ?"
print(f"Python executable: {sys.executable}")
print(f"Python prefix: {sys.prefix}")
def handle_version(
args,
ctx: CLIContext,
@@ -30,20 +79,39 @@ def handle_version(
"""
Handle the 'version' command.
Shows version information from various sources (git tags, pyproject,
flake.nix, PKGBUILD, debian, spec, Ansible Galaxy).
"""
Shows version information from:
- Git tags
- packaging metadata
- installed Python environment
- installed Nix profile
repo_list = selected
if not repo_list:
print("No repositories selected for version.")
sys.exit(1)
Special case:
- If no repositories are selected (e.g. not in a repo and no identifiers),
print pkgmgr's own installed versions instead of exiting with an error.
"""
if not selected:
_print_pkgmgr_self_version()
return
print("pkgmgr version info")
print("====================")
for repo in repo_list:
# Resolve repository directory
for repo in selected:
identifier = get_repo_identifier(repo, ctx.all_repositories)
python_candidates: list[str] = []
nix_candidates: list[str] = [identifier]
for key in ("pypi", "pip", "python_package", "distribution", "package"):
val = repo.get(key)
if isinstance(val, str) and val.strip():
python_candidates.append(val.strip())
python_candidates.append(identifier)
installed_python = get_installed_python_version(*python_candidates)
installed_nix = get_installed_nix_profile_version(*nix_candidates)
repo_dir = repo.get("directory")
if not repo_dir:
try:
@@ -51,51 +119,79 @@ def handle_version(
except Exception:
repo_dir = None
# If no local clone exists, skip gracefully with info message
if not repo_dir or not os.path.isdir(repo_dir):
identifier = get_repo_identifier(repo, ctx.all_repositories)
print(f"\nRepository: {identifier}")
print("----------------------------------------")
print(
"[INFO] Skipped: repository directory does not exist "
"locally, version detection is not possible."
"[INFO] Skipped: repository directory does not exist locally, "
"version detection is not possible."
)
if installed_python:
print(
f"Installed (Python env): {installed_python.version} "
f"(dist: {installed_python.name})"
)
else:
print("Installed (Python env): <not installed>")
if installed_nix:
print(
f"Installed (Nix profile): {installed_nix.version} "
f"(match: {installed_nix.name})"
)
else:
print("Installed (Nix profile): <not installed>")
continue
print(f"\nRepository: {repo_dir}")
print("----------------------------------------")
# 1) Git tags (SemVer)
try:
tags = get_tags(cwd=repo_dir)
except Exception as exc:
print(f"[ERROR] Could not read git tags: {exc}")
tags = []
latest_tag_info: Optional[Tuple[str, SemVer]]
latest_tag_info = find_latest_version(tags) if tags else None
latest_tag_info: Optional[Tuple[str, SemVer]] = (
find_latest_version(tags) if tags else None
)
if latest_tag_info is None:
latest_tag_str = None
latest_ver = None
if latest_tag_info:
tag, ver = latest_tag_info
print(f"Git (latest SemVer tag): {tag} (parsed: {ver})")
else:
latest_tag_str, latest_ver = latest_tag_info
print("Git (latest SemVer tag): <none found>")
# 2) Packaging / metadata sources
pyproject_version = read_pyproject_version(repo_dir)
pyproject_name = read_pyproject_project_name(repo_dir)
flake_version = read_flake_version(repo_dir)
pkgbuild_version = read_pkgbuild_version(repo_dir)
debian_version = read_debian_changelog_version(repo_dir)
spec_version = read_spec_version(repo_dir)
ansible_version = read_ansible_galaxy_version(repo_dir)
# 3) Print version summary
if latest_ver is not None:
if pyproject_name:
installed_python = get_installed_python_version(
pyproject_name, *python_candidates
)
if installed_python:
print(
f"Git (latest SemVer tag): {latest_tag_str} (parsed: {latest_ver})"
f"Installed (Python env): {installed_python.version} "
f"(dist: {installed_python.name})"
)
else:
print("Git (latest SemVer tag): <none found>")
print("Installed (Python env): <not installed>")
if installed_nix:
print(
f"Installed (Nix profile): {installed_nix.version} "
f"(match: {installed_nix.name})"
)
else:
print("Installed (Nix profile): <not installed>")
print(f"pyproject.toml: {pyproject_version or '<not found>'}")
print(f"flake.nix: {flake_version or '<not found>'}")
@@ -104,15 +200,16 @@ def handle_version(
print(f"package-manager.spec: {spec_version or '<not found>'}")
print(f"Ansible Galaxy meta: {ansible_version or '<not found>'}")
# 4) Consistency hint (Git tag vs. pyproject)
if latest_ver is not None and pyproject_version is not None:
if latest_tag_info and pyproject_version:
try:
file_ver = SemVer.parse(pyproject_version)
if file_ver != latest_ver:
if file_ver != latest_tag_info[1]:
print(
f"[WARN] Version mismatch: Git={latest_ver}, pyproject={file_ver}"
f"[WARN] Version mismatch: "
f"Git={latest_tag_info[1]}, pyproject={file_ver}"
)
except ValueError:
print(
f"[WARN] pyproject version {pyproject_version!r} is not valid SemVer."
f"[WARN] pyproject version {pyproject_version!r} "
f"is not valid SemVer."
)

View File

@@ -1,6 +1,3 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from __future__ import annotations
import os
@@ -16,6 +13,7 @@ from pkgmgr.cli.commands import (
handle_repos_command,
handle_tools_command,
handle_release,
handle_publish,
handle_version,
handle_config,
handle_make,
@@ -24,40 +22,20 @@ from pkgmgr.cli.commands import (
handle_mirror_command,
)
def _has_explicit_selection(args) -> bool:
"""
Return True if the user explicitly selected repositories via
identifiers / --all / --category / --tag / --string.
"""
identifiers = getattr(args, "identifiers", []) or []
use_all = getattr(args, "all", False)
categories = getattr(args, "category", []) or []
tags = getattr(args, "tag", []) or []
string_filter = getattr(args, "string", "") or ""
def _has_explicit_selection(args) -> bool:
return bool(
use_all
or identifiers
or categories
or tags
or string_filter
getattr(args, "all", False)
or getattr(args, "identifiers", [])
or getattr(args, "category", [])
or getattr(args, "tag", [])
or getattr(args, "string", "")
)
def _select_repo_for_current_directory(
ctx: CLIContext,
) -> List[Dict[str, Any]]:
"""
Heuristic: find the repository whose local directory matches the
current working directory or is the closest parent.
Example:
- Repo directory: /home/kevin/Repositories/foo
- CWD: /home/kevin/Repositories/foo/subdir
'foo' is selected.
"""
def _select_repo_for_current_directory(ctx: CLIContext) -> List[Dict[str, Any]]:
cwd = os.path.abspath(os.getcwd())
candidates: List[tuple[str, Dict[str, Any]]] = []
matches = []
for repo in ctx.all_repositories:
repo_dir = repo.get("directory")
@@ -65,33 +43,24 @@ def _select_repo_for_current_directory(
try:
repo_dir = get_repo_dir(ctx.repositories_base_dir, repo)
except Exception:
repo_dir = None
if not repo_dir:
continue
continue
repo_dir_abs = os.path.abspath(os.path.expanduser(repo_dir))
if cwd == repo_dir_abs or cwd.startswith(repo_dir_abs + os.sep):
candidates.append((repo_dir_abs, repo))
repo_dir = os.path.abspath(os.path.expanduser(repo_dir))
if cwd == repo_dir or cwd.startswith(repo_dir + os.sep):
matches.append((repo_dir, repo))
if not candidates:
if not matches:
return []
# Pick the repo with the longest (most specific) path.
candidates.sort(key=lambda item: len(item[0]), reverse=True)
return [candidates[0][1]]
matches.sort(key=lambda x: len(x[0]), reverse=True)
return [matches[0][1]]
def dispatch_command(args, ctx: CLIContext) -> None:
"""
Dispatch the parsed arguments to the appropriate command handler.
"""
# First: proxy commands (git / docker / docker compose / make wrapper etc.)
if maybe_handle_proxy(args, ctx):
return
# Commands that operate on repository selections
commands_with_selection: List[str] = [
commands_with_selection = {
"install",
"update",
"deinstall",
@@ -103,33 +72,26 @@ def dispatch_command(args, ctx: CLIContext) -> None:
"list",
"make",
"release",
"publish",
"version",
"changelog",
"explore",
"terminal",
"code",
"mirror",
]
}
if getattr(args, "command", None) in commands_with_selection:
if _has_explicit_selection(args):
# Classic selection logic (identifiers / --all / filters)
selected = get_selected_repos(args, ctx.all_repositories)
else:
# Default per help text: repository of current folder.
selected = _select_repo_for_current_directory(ctx)
# If none is found, leave 'selected' empty.
# Individual handlers will then emit a clear message instead
# of silently picking an unrelated repository.
if args.command in commands_with_selection:
selected = (
get_selected_repos(args, ctx.all_repositories)
if _has_explicit_selection(args)
else _select_repo_for_current_directory(ctx)
)
else:
selected = []
# ------------------------------------------------------------------ #
# Repos-related commands
# ------------------------------------------------------------------ #
if args.command in (
if args.command in {
"install",
"update",
"deinstall",
"delete",
"status",
@@ -137,24 +99,39 @@ def dispatch_command(args, ctx: CLIContext) -> None:
"shell",
"create",
"list",
):
}:
handle_repos_command(args, ctx, selected)
return
# ------------------------------------------------------------------ #
# Tools (explore / terminal / code)
# ------------------------------------------------------------------ #
if args.command == "update":
from pkgmgr.actions.update import UpdateManager
UpdateManager().run(
selected_repos=selected,
repositories_base_dir=ctx.repositories_base_dir,
bin_dir=ctx.binaries_dir,
all_repos=ctx.all_repositories,
no_verification=args.no_verification,
system_update=args.system,
preview=args.preview,
quiet=args.quiet,
update_dependencies=args.dependencies,
clone_mode=args.clone_mode,
force_update=True,
)
return
if args.command in ("explore", "terminal", "code"):
handle_tools_command(args, ctx, selected)
return
# ------------------------------------------------------------------ #
# Release / Version / Changelog / Config / Make / Branch
# ------------------------------------------------------------------ #
if args.command == "release":
handle_release(args, ctx, selected)
return
if args.command == "publish":
handle_publish(args, ctx, selected)
return
if args.command == "version":
handle_version(args, ctx, selected)
return
@@ -176,7 +153,7 @@ def dispatch_command(args, ctx: CLIContext) -> None:
return
if args.command == "mirror":
handle_mirror_command(args, ctx, selected)
handle_mirror_command(ctx, args, selected)
return
print(f"Unknown command: {args.command}")

View File

@@ -1,6 +1,3 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from __future__ import annotations
import argparse
@@ -13,6 +10,7 @@ from .config_cmd import add_config_subparsers
from .navigation_cmd import add_navigation_subparsers
from .branch_cmd import add_branch_subparsers
from .release_cmd import add_release_subparser
from .publish_cmd import add_publish_subparser
from .version_cmd import add_version_subparser
from .changelog_cmd import add_changelog_subparser
from .list_cmd import add_list_subparser
@@ -21,9 +19,6 @@ from .mirror_cmd import add_mirror_subparsers
def create_parser(description_text: str) -> argparse.ArgumentParser:
"""
Create the top-level argument parser for pkgmgr.
"""
parser = argparse.ArgumentParser(
description=description_text,
formatter_class=argparse.RawTextHelpFormatter,
@@ -34,35 +29,23 @@ def create_parser(description_text: str) -> argparse.ArgumentParser:
action=SortedSubParsersAction,
)
# Core repo operations
add_install_update_subparsers(subparsers)
add_config_subparsers(subparsers)
# Navigation / tooling around repos
add_navigation_subparsers(subparsers)
# Branch & release workflow
add_branch_subparsers(subparsers)
add_release_subparser(subparsers)
add_publish_subparser(subparsers)
# Info commands
add_version_subparser(subparsers)
add_changelog_subparser(subparsers)
add_list_subparser(subparsers)
# Make wrapper
add_make_subparsers(subparsers)
# Mirror management
add_mirror_subparsers(subparsers)
# Proxy commands (git, docker, docker compose, ...)
register_proxy_commands(subparsers)
return parser
__all__ = [
"create_parser",
"SortedSubParsersAction",
]
__all__ = ["create_parser", "SortedSubParsersAction"]

View File

@@ -1,96 +1,134 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# src/pkgmgr/cli/parser/common.py
from __future__ import annotations
import argparse
from typing import Optional, Tuple
class SortedSubParsersAction(argparse._SubParsersAction):
"""
Subparsers action that keeps choices sorted alphabetically.
Subparsers action that keeps subcommands sorted alphabetically.
"""
def add_parser(self, name, **kwargs):
parser = super().add_parser(name, **kwargs)
# Sort choices alphabetically by dest (subcommand name)
self._choices_actions.sort(key=lambda a: a.dest)
return parser
def _has_action(
parser: argparse.ArgumentParser,
*,
positional: Optional[str] = None,
options: Tuple[str, ...] = (),
) -> bool:
"""
Check whether the parser already has an action.
- positional: name of a positional argument (e.g. "identifiers")
- options: option strings (e.g. "--preview", "-q")
"""
for action in parser._actions:
if positional and action.dest == positional:
return True
if options and any(opt in action.option_strings for opt in options):
return True
return False
def _add_positional_if_missing(
parser: argparse.ArgumentParser,
name: str,
**kwargs,
) -> None:
"""Safely add a positional argument."""
if _has_action(parser, positional=name):
return
parser.add_argument(name, **kwargs)
def _add_option_if_missing(
parser: argparse.ArgumentParser,
*option_strings: str,
**kwargs,
) -> None:
"""Safely add an optional argument."""
if _has_action(parser, options=tuple(option_strings)):
return
parser.add_argument(*option_strings, **kwargs)
def add_identifier_arguments(subparser: argparse.ArgumentParser) -> None:
"""
Common identifier / selection arguments for many subcommands.
Selection modes (mutual intent, not hard-enforced):
- identifiers (positional): select by alias / provider/account/repo
- --all: select all repositories
- --category / --string / --tag: filter-based selection on top
of the full repository set
"""
subparser.add_argument(
_add_positional_if_missing(
subparser,
"identifiers",
nargs="*",
help=(
"Identifier(s) for repositories. "
"Default: Repository of current folder."
"Default: repository of the current working directory."
),
)
subparser.add_argument(
_add_option_if_missing(
subparser,
"--all",
action="store_true",
default=False,
help=(
"Apply the subcommand to all repositories in the config. "
"Some subcommands ask for confirmation. If you want to give this "
"confirmation for all repositories, pipe 'yes'. E.g: "
"yes | pkgmgr {subcommand} --all"
"Pipe 'yes' to auto-confirm. Example:\n"
" yes | pkgmgr <command> --all"
),
)
subparser.add_argument(
_add_option_if_missing(
subparser,
"--category",
nargs="+",
default=[],
help=(
"Filter repositories by category patterns derived from config "
"filenames or repo metadata (use filename without .yml/.yaml, "
"or /regex/ to use a regular expression)."
),
help="Filter repositories by category (supports /regex/).",
)
subparser.add_argument(
_add_option_if_missing(
subparser,
"--string",
default="",
help=(
"Filter repositories whose identifier / name / path contains this "
"substring (case-insensitive). Use /regex/ for regular expressions."
),
help="Filter repositories by substring or /regex/.",
)
subparser.add_argument(
_add_option_if_missing(
subparser,
"--tag",
action="append",
default=[],
help=(
"Filter repositories by tag. Matches tags from the repository "
"collector and category tags. Use /regex/ for regular expressions."
),
help="Filter repositories by tag (supports /regex/).",
)
subparser.add_argument(
_add_option_if_missing(
subparser,
"--preview",
action="store_true",
help="Preview changes without executing commands",
help="Preview changes without executing commands.",
)
subparser.add_argument(
_add_option_if_missing(
subparser,
"--list",
action="store_true",
help="List affected repositories (with preview or status)",
help="List affected repositories.",
)
subparser.add_argument(
_add_option_if_missing(
subparser,
"-a",
"--args",
nargs=argparse.REMAINDER,
dest="extra_args",
help="Additional parameters to be attached.",
nargs=argparse.REMAINDER,
default=[],
help="Additional parameters to be attached.",
)
@@ -99,29 +137,34 @@ def add_install_update_arguments(subparser: argparse.ArgumentParser) -> None:
Common arguments for install/update commands.
"""
add_identifier_arguments(subparser)
subparser.add_argument(
_add_option_if_missing(
subparser,
"-q",
"--quiet",
action="store_true",
help="Suppress warnings and info messages",
help="Suppress warnings and info messages.",
)
subparser.add_argument(
_add_option_if_missing(
subparser,
"--no-verification",
action="store_true",
default=False,
help="Disable verification via commit/gpg",
help="Disable verification via commit / GPG.",
)
subparser.add_argument(
_add_option_if_missing(
subparser,
"--dependencies",
action="store_true",
help="Also pull and update dependencies",
help="Also pull and update dependencies.",
)
subparser.add_argument(
_add_option_if_missing(
subparser,
"--clone-mode",
choices=["ssh", "https", "shallow"],
default="ssh",
help=(
"Specify the clone mode: ssh, https, or shallow "
"(HTTPS shallow clone; default: ssh)"
),
help="Specify clone mode (default: ssh).",
)

View File

@@ -33,8 +33,8 @@ def add_install_update_subparsers(
)
add_install_update_arguments(update_parser)
update_parser.add_argument(
"--system-update",
dest="system_update",
"--system",
dest="system",
action="store_true",
help="Include system update commands",
)

View File

@@ -1,3 +1,4 @@
# src/pkgmgr/cli/parser/mirror_cmd.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
@@ -8,103 +9,55 @@ import argparse
from .common import add_identifier_arguments
def add_mirror_subparsers(
subparsers: argparse._SubParsersAction,
) -> None:
"""
Register mirror command and its subcommands (list, diff, merge, setup).
"""
def add_mirror_subparsers(subparsers: argparse._SubParsersAction) -> None:
mirror_parser = subparsers.add_parser(
"mirror",
help="Mirror-related utilities (list, diff, merge, setup)",
help="Mirror-related utilities (list, diff, merge, setup, check, provision)",
)
mirror_subparsers = mirror_parser.add_subparsers(
dest="subcommand",
help="Mirror subcommands",
metavar="SUBCOMMAND",
required=True,
)
# ------------------------------------------------------------------
# mirror list
# ------------------------------------------------------------------
mirror_list = mirror_subparsers.add_parser(
"list",
help="List configured mirrors for repositories",
)
mirror_list = mirror_subparsers.add_parser("list", help="List configured mirrors for repositories")
add_identifier_arguments(mirror_list)
mirror_list.add_argument(
"--source",
choices=["all", "config", "file", "resolved"],
choices=["config", "file", "all"],
default="all",
help="Which mirror source to show.",
)
# ------------------------------------------------------------------
# mirror diff
# ------------------------------------------------------------------
mirror_diff = mirror_subparsers.add_parser(
"diff",
help="Show differences between config mirrors and MIRRORS file",
)
mirror_diff = mirror_subparsers.add_parser("diff", help="Show differences between config mirrors and MIRRORS file")
add_identifier_arguments(mirror_diff)
# ------------------------------------------------------------------
# mirror merge {config,file} {config,file}
# ------------------------------------------------------------------
mirror_merge = mirror_subparsers.add_parser(
"merge",
help=(
"Merge mirrors between config and MIRRORS file "
"(example: pkgmgr mirror merge config file --all)"
),
help="Merge mirrors between config and MIRRORS file (example: pkgmgr mirror merge config file --all)",
)
# First define merge direction positionals, then selection args.
mirror_merge.add_argument(
"source",
choices=["config", "file"],
help="Source of mirrors.",
)
mirror_merge.add_argument(
"target",
choices=["config", "file"],
help="Target of mirrors.",
)
# Selection / filter / preview arguments
mirror_merge.add_argument("source", choices=["config", "file"], help="Source of mirrors.")
mirror_merge.add_argument("target", choices=["config", "file"], help="Target of mirrors.")
add_identifier_arguments(mirror_merge)
mirror_merge.add_argument(
"--config-path",
help=(
"Path to the user config file to update. "
"If omitted, the global config path is used."
),
help="Path to the user config file to update. If omitted, the global config path is used.",
)
# Note: --preview, --all, --category, --tag, --list, etc. are provided
# by add_identifier_arguments().
# ------------------------------------------------------------------
# mirror setup
# ------------------------------------------------------------------
mirror_setup = mirror_subparsers.add_parser(
"setup",
help=(
"Setup mirror configuration for repositories.\n"
" --local → configure local Git (remotes, pushurls)\n"
" --remote → create remote repositories if missing\n"
"Default: both local and remote."
),
help="Configure local Git remotes and push URLs (origin, pushurl list).",
)
add_identifier_arguments(mirror_setup)
mirror_setup.add_argument(
"--local",
action="store_true",
help="Only configure the local Git repository.",
mirror_check = mirror_subparsers.add_parser(
"check",
help="Check remote mirror reachability (git ls-remote). Read-only.",
)
mirror_setup.add_argument(
"--remote",
action="store_true",
help="Only operate on remote repositories.",
add_identifier_arguments(mirror_check)
mirror_provision = mirror_subparsers.add_parser(
"provision",
help="Provision remote repositories via provider APIs (create missing repos).",
)
# Note: --preview also comes from add_identifier_arguments().
add_identifier_arguments(mirror_provision)

View File

@@ -0,0 +1,19 @@
from __future__ import annotations
import argparse
from .common import add_identifier_arguments
def add_publish_subparser(subparsers: argparse._SubParsersAction) -> None:
parser = subparsers.add_parser(
"publish",
help="Publish repository artifacts (e.g. PyPI) based on MIRRORS.",
)
add_identifier_arguments(parser)
parser.add_argument(
"--non-interactive",
action="store_true",
help="Disable interactive credential prompts (CI mode).",
)

View File

@@ -0,0 +1,21 @@
# src/pkgmgr/core/credentials/__init__.py
"""Credential resolution for provider APIs."""
from .resolver import ResolutionOptions, TokenResolver
from .types import (
CredentialError,
KeyringUnavailableError,
NoCredentialsError,
TokenRequest,
TokenResult,
)
__all__ = [
"TokenResolver",
"ResolutionOptions",
"CredentialError",
"NoCredentialsError",
"KeyringUnavailableError",
"TokenRequest",
"TokenResult",
]

View File

@@ -0,0 +1,11 @@
"""Credential providers used by TokenResolver."""
from .env import EnvTokenProvider
from .keyring import KeyringTokenProvider
from .prompt import PromptTokenProvider
__all__ = [
"EnvTokenProvider",
"KeyringTokenProvider",
"PromptTokenProvider",
]

View File

@@ -0,0 +1,23 @@
# src/pkgmgr/core/credentials/providers/env.py
from __future__ import annotations
import os
from dataclasses import dataclass
from typing import Optional
from ..store_keys import env_var_candidates
from ..types import TokenRequest, TokenResult
@dataclass(frozen=True)
class EnvTokenProvider:
"""Resolve tokens from environment variables."""
source_name: str = "env"
def get(self, request: TokenRequest) -> Optional[TokenResult]:
for key in env_var_candidates(request.provider_kind, request.host, request.owner):
val = os.environ.get(key)
if val:
return TokenResult(token=val.strip(), source=self.source_name)
return None

View File

@@ -0,0 +1,57 @@
# src/pkgmgr/core/credentials/providers/keyring.py
from __future__ import annotations
from dataclasses import dataclass
from typing import Optional
from ..store_keys import build_keyring_key
from ..types import KeyringUnavailableError, TokenRequest, TokenResult
def _import_keyring():
"""
Import python-keyring.
Raises:
KeyringUnavailableError if:
- library is missing
- no backend is configured / usable
- import fails for any reason
"""
try:
import keyring # type: ignore
except Exception as exc: # noqa: BLE001
raise KeyringUnavailableError(
"python-keyring is not installed."
) from exc
# Some environments have keyring installed but no usable backend.
# We do a lightweight "backend sanity check" by attempting to read the backend.
try:
_ = keyring.get_keyring()
except Exception as exc: # noqa: BLE001
raise KeyringUnavailableError(
"python-keyring is installed but no usable keyring backend is configured."
) from exc
return keyring
@dataclass(frozen=True)
class KeyringTokenProvider:
"""Resolve/store tokens from/to OS keyring via python-keyring."""
source_name: str = "keyring"
def get(self, request: TokenRequest) -> Optional[TokenResult]:
keyring = _import_keyring()
key = build_keyring_key(request.provider_kind, request.host, request.owner)
token = keyring.get_password(key.service, key.username)
if token:
return TokenResult(token=token.strip(), source=self.source_name)
return None
def set(self, request: TokenRequest, token: str) -> None:
keyring = _import_keyring()
key = build_keyring_key(request.provider_kind, request.host, request.owner)
keyring.set_password(key.service, key.username, token)

View File

@@ -0,0 +1,68 @@
# src/pkgmgr/core/credentials/providers/prompt.py
from __future__ import annotations
import sys
from dataclasses import dataclass
from getpass import getpass
from typing import Optional
from ..types import TokenRequest, TokenResult
def _token_help_url(provider_kind: str, host: str) -> Optional[str]:
"""
Return a provider-specific URL where a user can create/get an API token.
Keep this conservative and stable:
- GitHub: official token settings URL
- Gitea/Forgejo: common settings path on the given host
- GitLab: common personal access token path
"""
kind = (provider_kind or "").strip().lower()
h = (host or "").strip()
# GitHub (cloud)
if kind == "github":
return "https://github.com/settings/tokens"
# Gitea / Forgejo (self-hosted)
if kind in ("gitea", "forgejo"):
# Typical UI path: Settings -> Applications -> Access Tokens
# In many installations this is available at /user/settings/applications
base = f"https://{h}".rstrip("/")
return f"{base}/user/settings/applications"
# GitLab (cloud or self-hosted)
if kind == "gitlab":
base = "https://gitlab.com" if not h else f"https://{h}".rstrip("/")
return f"{base}/-/profile/personal_access_tokens"
return None
@dataclass(frozen=True)
class PromptTokenProvider:
"""Interactively prompt for a token.
Only used when:
- interactive mode is enabled
- stdin is a TTY
"""
source_name: str = "prompt"
def get(self, request: TokenRequest) -> Optional[TokenResult]:
if not sys.stdin.isatty():
return None
owner_info = f" (owner: {request.owner})" if request.owner else ""
help_url = _token_help_url(request.provider_kind, request.host)
if help_url:
print(f"[INFO] Create/get your token here: {help_url}")
prompt = f"Enter API token for {request.provider_kind} on {request.host}{owner_info}: "
token = (getpass(prompt) or "").strip()
if not token:
return None
return TokenResult(token=token, source=self.source_name)

View File

@@ -0,0 +1,96 @@
# src/pkgmgr/core/credentials/resolver.py
from __future__ import annotations
import sys
from dataclasses import dataclass
from typing import Optional
from .providers.env import EnvTokenProvider
from .providers.keyring import KeyringTokenProvider
from .providers.prompt import PromptTokenProvider
from .types import KeyringUnavailableError, NoCredentialsError, TokenRequest, TokenResult
@dataclass(frozen=True)
class ResolutionOptions:
"""Controls token resolution behavior."""
interactive: bool = True
allow_prompt: bool = True
save_prompt_token_to_keyring: bool = True
class TokenResolver:
"""Resolve tokens from multiple sources (ENV -> Keyring -> Prompt)."""
def __init__(self) -> None:
self._env = EnvTokenProvider()
self._keyring = KeyringTokenProvider()
self._prompt = PromptTokenProvider()
self._warned_keyring: bool = False
def _warn_keyring_unavailable(self, exc: Exception) -> None:
if self._warned_keyring:
return
self._warned_keyring = True
msg = str(exc).strip() or "Keyring is unavailable."
print("[WARN] Keyring support is not available.", file=sys.stderr)
print(f" {msg}", file=sys.stderr)
print(" Tokens will NOT be persisted securely.", file=sys.stderr)
print("", file=sys.stderr)
print(" To enable secure token storage, install python-keyring:", file=sys.stderr)
print(" pip install keyring", file=sys.stderr)
print("", file=sys.stderr)
print(" Or install via system packages:", file=sys.stderr)
print(" sudo apt install python3-keyring", file=sys.stderr)
print(" sudo pacman -S python-keyring", file=sys.stderr)
print(" sudo dnf install python3-keyring", file=sys.stderr)
print("", file=sys.stderr)
def get_token(
self,
provider_kind: str,
host: str,
owner: Optional[str] = None,
options: Optional[ResolutionOptions] = None,
) -> TokenResult:
opts = options or ResolutionOptions()
request = TokenRequest(provider_kind=provider_kind, host=host, owner=owner)
# 1) ENV
env_res = self._env.get(request)
if env_res:
return env_res
# 2) Keyring
try:
kr_res = self._keyring.get(request)
if kr_res:
return kr_res
except KeyringUnavailableError as exc:
# Show a helpful warning once, then continue (prompt fallback).
self._warn_keyring_unavailable(exc)
except Exception:
# Unknown keyring errors: do not block prompting; still avoid hard crash.
pass
# 3) Prompt (optional)
if opts.interactive and opts.allow_prompt:
prompt_res = self._prompt.get(request)
if prompt_res:
if opts.save_prompt_token_to_keyring:
try:
self._keyring.set(request, prompt_res.token)
except KeyringUnavailableError as exc:
self._warn_keyring_unavailable(exc)
except Exception:
# If keyring cannot store, still use token for this run.
pass
return prompt_res
raise NoCredentialsError(
f"No token available for {provider_kind}@{host}"
+ (f" (owner: {owner})" if owner else "")
+ ". Provide it via environment variable or keyring."
)

View File

@@ -0,0 +1,54 @@
# src/pkgmgr/core/credentials/store_keys.py
from __future__ import annotations
from dataclasses import dataclass
from typing import Optional
@dataclass(frozen=True)
class KeyringKey:
"""Keyring address for a token."""
service: str
username: str
def build_keyring_key(provider_kind: str, host: str, owner: Optional[str]) -> KeyringKey:
"""Build a stable keyring key.
- service: "pkgmgr:<provider>"
- username: "<host>|<owner>" or "<host>|-"
"""
provider_kind = str(provider_kind).strip().lower()
host = str(host).strip()
owner_part = (str(owner).strip() if owner else "-")
return KeyringKey(service=f"pkgmgr:{provider_kind}", username=f"{host}|{owner_part}")
def env_var_candidates(provider_kind: str, host: str, owner: Optional[str]) -> list[str]:
"""Return a list of environment variable names to try.
Order is from most specific to most generic.
"""
kind = re_sub_non_alnum(str(provider_kind).strip().upper())
host_norm = re_sub_non_alnum(str(host).strip().upper())
candidates: list[str] = []
if owner:
owner_norm = re_sub_non_alnum(str(owner).strip().upper())
candidates.append(f"PKGMGR_{kind}_TOKEN_{host_norm}_{owner_norm}")
candidates.append(f"PKGMGR_TOKEN_{kind}_{host_norm}_{owner_norm}")
candidates.append(f"PKGMGR_{kind}_TOKEN_{host_norm}")
candidates.append(f"PKGMGR_TOKEN_{kind}_{host_norm}")
candidates.append(f"PKGMGR_{kind}_TOKEN")
candidates.append(f"PKGMGR_TOKEN_{kind}")
candidates.append("PKGMGR_TOKEN")
return candidates
def re_sub_non_alnum(value: str) -> str:
"""Normalize to an uppercase env-var friendly token (A-Z0-9_)."""
import re
return re.sub(r"[^A-Z0-9]+", "_", value).strip("_")

View File

@@ -0,0 +1,34 @@
# src/pkgmgr/core/credentials/types.py
from __future__ import annotations
from dataclasses import dataclass
from typing import Optional
class CredentialError(RuntimeError):
"""Base class for credential resolution errors."""
class NoCredentialsError(CredentialError):
"""Raised when no usable credential could be resolved."""
class KeyringUnavailableError(CredentialError):
"""Raised when keyring is requested but no backend is available."""
@dataclass(frozen=True)
class TokenRequest:
"""Parameters describing which token we need."""
provider_kind: str # e.g. "gitea", "github"
host: str # e.g. "git.example.org" or "github.com"
owner: Optional[str] = None # optional org/user
@dataclass(frozen=True)
class TokenResult:
"""A resolved token plus metadata about its source."""
token: str
source: str # "env" | "keyring" | "prompt"

View File

@@ -0,0 +1,14 @@
# src/pkgmgr/core/remote_provisioning/__init__.py
"""Remote repository provisioning (ensure remote repo exists)."""
from .ensure import ensure_remote_repo
from .registry import ProviderRegistry
from .types import EnsureResult, ProviderHint, RepoSpec
__all__ = [
"ensure_remote_repo",
"RepoSpec",
"EnsureResult",
"ProviderHint",
"ProviderRegistry",
]

View File

@@ -0,0 +1,99 @@
# src/pkgmgr/core/remote_provisioning/ensure.py
from __future__ import annotations
from dataclasses import dataclass
from typing import Optional
from pkgmgr.core.credentials.resolver import ResolutionOptions, TokenResolver
from .http.errors import HttpError
from .registry import ProviderRegistry
from .types import (
AuthError,
EnsureResult,
NetworkError,
PermissionError,
ProviderHint,
RepoSpec,
UnsupportedProviderError,
)
@dataclass(frozen=True)
class EnsureOptions:
"""Options controlling remote provisioning."""
preview: bool = False
interactive: bool = True
allow_prompt: bool = True
save_prompt_token_to_keyring: bool = True
def _raise_mapped_http_error(exc: HttpError, host: str) -> None:
"""Map HttpError into domain-specific error types."""
if exc.status == 0:
raise NetworkError(f"Network error while talking to {host}: {exc}") from exc
if exc.status == 401:
raise AuthError(f"Authentication failed for {host} (401).") from exc
if exc.status == 403:
raise PermissionError(f"Permission denied for {host} (403).") from exc
raise NetworkError(
f"HTTP error from {host}: status={exc.status}, message={exc}, body={exc.body}"
) from exc
def ensure_remote_repo(
spec: RepoSpec,
provider_hint: Optional[ProviderHint] = None,
options: Optional[EnsureOptions] = None,
registry: Optional[ProviderRegistry] = None,
token_resolver: Optional[TokenResolver] = None,
) -> EnsureResult:
"""Ensure that the remote repository exists (create if missing).
- Uses TokenResolver (ENV -> keyring -> prompt)
- Selects provider via ProviderRegistry (or provider_hint override)
- Respects preview mode (no remote changes)
- Maps HTTP errors to domain-specific errors
"""
opts = options or EnsureOptions()
reg = registry or ProviderRegistry.default()
resolver = token_resolver or TokenResolver()
provider = reg.resolve(spec.host)
if provider_hint and provider_hint.kind:
forced = provider_hint.kind.strip().lower()
forced_provider = next(
(p for p in reg.providers if getattr(p, "kind", "").lower() == forced),
None,
)
if forced_provider is not None:
provider = forced_provider
if provider is None:
raise UnsupportedProviderError(f"No provider matched host: {spec.host}")
token_opts = ResolutionOptions(
interactive=opts.interactive,
allow_prompt=opts.allow_prompt,
save_prompt_token_to_keyring=opts.save_prompt_token_to_keyring,
)
token = resolver.get_token(
provider_kind=getattr(provider, "kind", "unknown"),
host=spec.host,
owner=spec.owner,
options=token_opts,
)
if opts.preview:
return EnsureResult(
status="skipped",
message="Preview mode: no remote changes performed.",
)
try:
return provider.ensure_repo(token.token, spec)
except HttpError as exc:
_raise_mapped_http_error(exc, host=spec.host)
return EnsureResult(status="failed", message="Unreachable error mapping.")

View File

@@ -0,0 +1,5 @@
# src/pkgmgr/core/remote_provisioning/http/__init__.py
from .client import HttpClient, HttpResponse
from .errors import HttpError
__all__ = ["HttpClient", "HttpResponse", "HttpError"]

View File

@@ -0,0 +1,69 @@
# src/pkgmgr/core/remote_provisioning/http/client.py
from __future__ import annotations
import json
import ssl
import urllib.error
import urllib.request
from dataclasses import dataclass
from typing import Any, Dict, Optional
from .errors import HttpError
@dataclass(frozen=True)
class HttpResponse:
status: int
text: str
json: Optional[Dict[str, Any]] = None
class HttpClient:
"""Tiny HTTP client (stdlib) with JSON support."""
def __init__(self, timeout_s: int = 15) -> None:
self._timeout_s = int(timeout_s)
def request_json(
self,
method: str,
url: str,
headers: Optional[Dict[str, str]] = None,
payload: Optional[Dict[str, Any]] = None,
) -> HttpResponse:
data: Optional[bytes] = None
final_headers: Dict[str, str] = dict(headers or {})
if payload is not None:
data = json.dumps(payload).encode("utf-8")
final_headers.setdefault("Content-Type", "application/json")
req = urllib.request.Request(url=url, data=data, method=method.upper())
for k, v in final_headers.items():
req.add_header(k, v)
try:
with urllib.request.urlopen(
req,
timeout=self._timeout_s,
context=ssl.create_default_context(),
) as resp:
raw = resp.read().decode("utf-8", errors="replace")
parsed: Optional[Dict[str, Any]] = None
if raw:
try:
loaded = json.loads(raw)
parsed = loaded if isinstance(loaded, dict) else None
except Exception:
parsed = None
return HttpResponse(status=int(resp.status), text=raw, json=parsed)
except urllib.error.HTTPError as exc:
try:
body = exc.read().decode("utf-8", errors="replace")
except Exception:
body = ""
raise HttpError(status=int(exc.code), message=str(exc), body=body) from exc
except urllib.error.URLError as exc:
raise HttpError(status=0, message=str(exc), body="") from exc

View File

@@ -0,0 +1,9 @@
# src/pkgmgr/core/remote_provisioning/http/errors.py
from __future__ import annotations
class HttpError(RuntimeError):
def __init__(self, status: int, message: str, body: str = "") -> None:
super().__init__(message)
self.status = status
self.body = body

View File

@@ -0,0 +1,6 @@
# src/pkgmgr/core/remote_provisioning/providers/__init__.py
from .base import RemoteProvider
from .gitea import GiteaProvider
from .github import GitHubProvider
__all__ = ["RemoteProvider", "GiteaProvider", "GitHubProvider"]

View File

@@ -0,0 +1,36 @@
# src/pkgmgr/core/remote_provisioning/providers/base.py
from __future__ import annotations
from abc import ABC, abstractmethod
from ..types import EnsureResult, RepoSpec
class RemoteProvider(ABC):
"""Provider interface for remote repo provisioning."""
kind: str
@abstractmethod
def can_handle(self, host: str) -> bool:
"""Return True if this provider implementation matches the host."""
@abstractmethod
def repo_exists(self, token: str, spec: RepoSpec) -> bool:
"""Return True if repo exists and is accessible."""
@abstractmethod
def create_repo(self, token: str, spec: RepoSpec) -> EnsureResult:
"""Create a repository (owner may be user or org)."""
def ensure_repo(self, token: str, spec: RepoSpec) -> EnsureResult:
if self.repo_exists(token, spec):
return EnsureResult(status="exists", message="Repository exists.")
return self.create_repo(token, spec)
@staticmethod
def _api_base(host: str) -> str:
# Default to https. If you need http for local dev, store host as "http://..."
if host.startswith("http://") or host.startswith("https://"):
return host.rstrip("/")
return f"https://{host}".rstrip("/")

View File

@@ -0,0 +1,106 @@
# src/pkgmgr/core/remote_provisioning/providers/gitea.py
from __future__ import annotations
from typing import Any, Dict
from ..http.client import HttpClient
from ..http.errors import HttpError
from ..types import EnsureResult, RepoSpec
from .base import RemoteProvider
class GiteaProvider(RemoteProvider):
"""Gitea provider using Gitea REST API v1."""
kind = "gitea"
def __init__(self, timeout_s: int = 15) -> None:
self._http = HttpClient(timeout_s=timeout_s)
def can_handle(self, host: str) -> bool:
"""
Heuristic host match:
- Acts as a fallback provider for self-hosted setups.
- Must NOT claim GitHub hosts.
- If you add more providers later, tighten this heuristic or use provider hints.
"""
h = host.lower()
if h in ("github.com", "api.github.com") or h.endswith(".github.com"):
return False
return True
def _headers(self, token: str) -> Dict[str, str]:
"""
Gitea commonly supports:
Authorization: token <TOKEN>
Newer versions may also accept Bearer tokens, but "token" is broadly compatible.
"""
return {
"Authorization": f"token {token}",
"Accept": "application/json",
"User-Agent": "pkgmgr",
}
def repo_exists(self, token: str, spec: RepoSpec) -> bool:
base = self._api_base(spec.host)
url = f"{base}/api/v1/repos/{spec.owner}/{spec.name}"
try:
resp = self._http.request_json("GET", url, headers=self._headers(token))
return 200 <= resp.status < 300
except HttpError as exc:
if exc.status == 404:
return False
raise
def create_repo(self, token: str, spec: RepoSpec) -> EnsureResult:
base = self._api_base(spec.host)
payload: Dict[str, Any] = {
"name": spec.name,
"private": bool(spec.private),
}
if spec.description:
payload["description"] = spec.description
if spec.default_branch:
payload["default_branch"] = spec.default_branch
org_url = f"{base}/api/v1/orgs/{spec.owner}/repos"
user_url = f"{base}/api/v1/user/repos"
# Try org first, then fall back to user creation.
try:
resp = self._http.request_json(
"POST",
org_url,
headers=self._headers(token),
payload=payload,
)
if 200 <= resp.status < 300:
html_url = (resp.json or {}).get("html_url") if resp.json else None
return EnsureResult(
status="created",
message="Repository created (org).",
url=str(html_url) if html_url else None,
)
except HttpError:
# Typical org failures: 404 (not an org), 403 (no rights), 401 (bad token).
pass
resp = self._http.request_json(
"POST",
user_url,
headers=self._headers(token),
payload=payload,
)
if 200 <= resp.status < 300:
html_url = (resp.json or {}).get("html_url") if resp.json else None
return EnsureResult(
status="created",
message="Repository created (user).",
url=str(html_url) if html_url else None,
)
return EnsureResult(
status="failed",
message=f"Failed to create repository (status {resp.status}).",
)

View File

@@ -0,0 +1,101 @@
# src/pkgmgr/core/remote_provisioning/providers/github.py
from __future__ import annotations
from typing import Any, Dict
from ..http.client import HttpClient
from ..http.errors import HttpError
from ..types import EnsureResult, RepoSpec
from .base import RemoteProvider
class GitHubProvider(RemoteProvider):
"""GitHub provider using GitHub REST API."""
kind = "github"
def __init__(self, timeout_s: int = 15) -> None:
self._http = HttpClient(timeout_s=timeout_s)
def can_handle(self, host: str) -> bool:
h = host.lower()
return h in ("github.com", "api.github.com") or h.endswith(".github.com")
def _api_base(self, host: str) -> str:
"""
GitHub API base:
- Public GitHub: https://api.github.com
- GitHub Enterprise Server: https://<host>/api/v3
"""
h = host.lower()
if h in ("github.com", "api.github.com"):
return "https://api.github.com"
# Enterprise instance:
if host.startswith("http://") or host.startswith("https://"):
return host.rstrip("/") + "/api/v3"
return f"https://{host}/api/v3"
def _headers(self, token: str) -> Dict[str, str]:
return {
"Authorization": f"Bearer {token}",
"Accept": "application/vnd.github+json",
"User-Agent": "pkgmgr",
}
def repo_exists(self, token: str, spec: RepoSpec) -> bool:
api = self._api_base(spec.host)
url = f"{api}/repos/{spec.owner}/{spec.name}"
try:
resp = self._http.request_json("GET", url, headers=self._headers(token))
return 200 <= resp.status < 300
except HttpError as exc:
if exc.status == 404:
return False
raise
def create_repo(self, token: str, spec: RepoSpec) -> EnsureResult:
api = self._api_base(spec.host)
payload: Dict[str, Any] = {
"name": spec.name,
"private": bool(spec.private),
}
if spec.description:
payload["description"] = spec.description
if spec.default_branch:
payload["default_branch"] = spec.default_branch
org_url = f"{api}/orgs/{spec.owner}/repos"
user_url = f"{api}/user/repos"
# Try org first, then fall back to user creation.
try:
resp = self._http.request_json(
"POST", org_url, headers=self._headers(token), payload=payload
)
if 200 <= resp.status < 300:
html_url = (resp.json or {}).get("html_url") if resp.json else None
return EnsureResult(
status="created",
message="Repository created (org).",
url=str(html_url) if html_url else None,
)
except HttpError:
pass
resp = self._http.request_json(
"POST", user_url, headers=self._headers(token), payload=payload
)
if 200 <= resp.status < 300:
html_url = (resp.json or {}).get("html_url") if resp.json else None
return EnsureResult(
status="created",
message="Repository created (user).",
url=str(html_url) if html_url else None,
)
return EnsureResult(
status="failed",
message=f"Failed to create repository (status {resp.status}).",
)

View File

@@ -0,0 +1,30 @@
# src/pkgmgr/core/remote_provisioning/registry.py
from __future__ import annotations
from dataclasses import dataclass
from typing import List, Optional
from .providers.base import RemoteProvider
from .providers.gitea import GiteaProvider
from .providers.github import GitHubProvider
@dataclass
class ProviderRegistry:
"""Resolve the correct provider implementation for a host."""
providers: List[RemoteProvider]
@classmethod
def default(cls) -> "ProviderRegistry":
# Order matters: more specific providers first; fallback providers last.
return cls(providers=[GitHubProvider(), GiteaProvider()])
def resolve(self, host: str) -> Optional[RemoteProvider]:
for p in self.providers:
try:
if p.can_handle(host):
return p
except Exception:
continue
return None

View File

@@ -0,0 +1,61 @@
# src/pkgmgr/core/remote_provisioning/types.py
from __future__ import annotations
from dataclasses import dataclass
from typing import Literal, Optional
EnsureStatus = Literal["exists", "created", "skipped", "failed"]
@dataclass(frozen=True)
class ProviderHint:
"""Optional hint to force a provider kind."""
kind: Optional[str] = None # e.g. "gitea" or "github"
@dataclass(frozen=True)
class RepoSpec:
"""Desired remote repository."""
host: str
owner: str
name: str
private: bool = True
description: str = ""
default_branch: Optional[str] = None
@dataclass(frozen=True)
class EnsureResult:
status: EnsureStatus
message: str
url: Optional[str] = None
class RemoteProvisioningError(RuntimeError):
"""Base class for remote provisioning errors."""
class AuthError(RemoteProvisioningError):
"""Authentication failed (401)."""
class PermissionError(RemoteProvisioningError):
"""Permission denied (403)."""
class NotFoundError(RemoteProvisioningError):
"""Resource not found (404)."""
class PolicyError(RemoteProvisioningError):
"""Provider/org policy prevents the operation."""
class NetworkError(RemoteProvisioningError):
"""Network/transport errors."""
class UnsupportedProviderError(RemoteProvisioningError):
"""No provider matched for the given host."""

View File

@@ -0,0 +1,124 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Central repository path resolver.
Goal:
- Provide ONE place to define where packaging / changelog / metadata files live.
- Prefer modern layout (packaging/*) but stay backwards-compatible with legacy
root-level paths.
Both:
- readers (pkgmgr.core.version.source)
- writers (pkgmgr.actions.release.workflow)
should use this module instead of hardcoding paths.
"""
from __future__ import annotations
import os
from dataclasses import dataclass
from typing import Iterable, Optional
@dataclass(frozen=True)
class RepoPaths:
repo_dir: str
pyproject_toml: str
flake_nix: str
# Human changelog (typically Markdown)
changelog_md: Optional[str]
# Packaging-related files
arch_pkgbuild: Optional[str]
debian_changelog: Optional[str]
rpm_spec: Optional[str]
def _first_existing(candidates: Iterable[str]) -> Optional[str]:
for p in candidates:
if p and os.path.isfile(p):
return p
return None
def _find_first_spec_in_dir(dir_path: str) -> Optional[str]:
if not os.path.isdir(dir_path):
return None
try:
for fn in sorted(os.listdir(dir_path)):
if fn.endswith(".spec"):
p = os.path.join(dir_path, fn)
if os.path.isfile(p):
return p
except OSError:
return None
return None
def resolve_repo_paths(repo_dir: str) -> RepoPaths:
"""
Resolve canonical file locations for a repository.
Preferences (new layout first, legacy fallback second):
- PKGBUILD: packaging/arch/PKGBUILD -> PKGBUILD
- Debian changelog: packaging/debian/changelog -> debian/changelog
- RPM spec: packaging/fedora/package-manager.spec
-> first *.spec in packaging/fedora
-> first *.spec in repo root
- CHANGELOG.md: CHANGELOG.md -> packaging/CHANGELOG.md (optional fallback)
Notes:
- This resolver only returns paths; it does not read/parse files.
- Callers should treat Optional paths as "may not exist".
"""
repo_dir = os.path.abspath(repo_dir)
pyproject_toml = os.path.join(repo_dir, "pyproject.toml")
flake_nix = os.path.join(repo_dir, "flake.nix")
changelog_md = _first_existing(
[
os.path.join(repo_dir, "CHANGELOG.md"),
os.path.join(repo_dir, "packaging", "CHANGELOG.md"),
]
)
arch_pkgbuild = _first_existing(
[
os.path.join(repo_dir, "packaging", "arch", "PKGBUILD"),
os.path.join(repo_dir, "PKGBUILD"),
]
)
debian_changelog = _first_existing(
[
os.path.join(repo_dir, "packaging", "debian", "changelog"),
os.path.join(repo_dir, "debian", "changelog"),
]
)
# RPM spec: prefer the canonical file, else first spec in packaging/fedora, else first spec in repo root.
rpm_spec = _first_existing(
[
os.path.join(repo_dir, "packaging", "fedora", "package-manager.spec"),
]
)
if rpm_spec is None:
rpm_spec = _find_first_spec_in_dir(os.path.join(repo_dir, "packaging", "fedora"))
if rpm_spec is None:
rpm_spec = _find_first_spec_in_dir(repo_dir)
return RepoPaths(
repo_dir=repo_dir,
pyproject_toml=pyproject_toml,
flake_nix=flake_nix,
changelog_md=changelog_md,
arch_pkgbuild=arch_pkgbuild,
debian_changelog=debian_changelog,
rpm_spec=rpm_spec,
)

View File

@@ -0,0 +1,168 @@
from __future__ import annotations
import json
import re
import shutil
import subprocess
from dataclasses import dataclass
from typing import Iterable, Optional, Tuple
@dataclass(frozen=True)
class InstalledVersion:
"""
Represents a resolved installed version and the matched name.
"""
name: str
version: str
def _normalize(name: str) -> str:
return re.sub(r"[-_.]+", "-", (name or "").strip()).lower()
def _unique_candidates(names: Iterable[str]) -> list[str]:
seen: set[str] = set()
out: list[str] = []
for n in names:
if not n:
continue
key = _normalize(n)
if key in seen:
continue
seen.add(key)
out.append(n)
return out
def get_installed_python_version(*candidates: str) -> Optional[InstalledVersion]:
"""
Detect installed Python package version in the CURRENT Python environment.
Strategy:
1) Exact normalized match using importlib.metadata.version()
2) Substring fallback by scanning installed distributions
"""
try:
from importlib import metadata as importlib_metadata
except Exception:
return None
candidates = _unique_candidates(candidates)
expanded: list[str] = []
for c in candidates:
n = _normalize(c)
expanded.extend([c, n, n.replace("-", "_"), n.replace("-", ".")])
expanded = _unique_candidates(expanded)
# 1) Direct queries first (fast path)
for name in expanded:
try:
version = importlib_metadata.version(name)
return InstalledVersion(name=name, version=version)
except Exception:
continue
# 2) Fallback: scan distributions (last resort)
try:
dists = importlib_metadata.distributions()
except Exception:
return None
norm_candidates = {_normalize(c) for c in candidates}
for dist in dists:
dist_name = dist.metadata.get("Name", "") or ""
norm_dist = _normalize(dist_name)
for c in norm_candidates:
if c and (c in norm_dist or norm_dist in c):
ver = getattr(dist, "version", None)
if ver:
return InstalledVersion(name=dist_name, version=ver)
return None
def _run_nix(args: list[str]) -> Tuple[int, str, str]:
p = subprocess.run(
args,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
check=False,
)
return p.returncode, p.stdout or "", p.stderr or ""
def _extract_version_from_store_path(path: str) -> Optional[str]:
if not path:
return None
base = path.rstrip("/").split("/")[-1]
if "-" not in base:
return None
tail = base.split("-")[-1]
if re.match(r"\d+(\.\d+){0,3}([a-z0-9+._-]*)?$", tail, re.I):
return tail
return None
def get_installed_nix_profile_version(*candidates: str) -> Optional[InstalledVersion]:
"""
Detect installed version from the current Nix profile.
Strategy:
1) JSON output (exact normalized match)
2) Text fallback (substring)
"""
if shutil.which("nix") is None:
return None
candidates = _unique_candidates(candidates)
if not candidates:
return None
norm_candidates = {_normalize(c) for c in candidates}
# Preferred: JSON output
rc, out, _ = _run_nix(["nix", "profile", "list", "--json"])
if rc == 0 and out.strip():
try:
data = json.loads(out)
elements = data.get("elements") or data.get("items") or {}
if isinstance(elements, dict):
for elem in elements.values():
if not isinstance(elem, dict):
continue
name = (elem.get("name") or elem.get("pname") or "").strip()
version = (elem.get("version") or "").strip()
norm_name = _normalize(name)
if norm_name in norm_candidates:
if version:
return InstalledVersion(name=name, version=version)
for sp in elem.get("storePaths", []) or []:
guess = _extract_version_from_store_path(sp)
if guess:
return InstalledVersion(name=name, version=guess)
except Exception:
pass
# Fallback: text mode
rc, out, _ = _run_nix(["nix", "profile", "list"])
if rc != 0:
return None
for line in out.splitlines():
norm_line = _normalize(line)
for c in norm_candidates:
if c in norm_line:
m = re.search(r"\b\d+(\.\d+){0,3}[a-z0-9+._-]*\b", line, re.I)
if m:
return InstalledVersion(name=c, version=m.group(0))
if "/nix/store/" in line:
guess = _extract_version_from_store_path(line.split()[-1])
if guess:
return InstalledVersion(name=c, version=guess)
return None

View File

@@ -1,21 +1,4 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Helpers to extract version information from various packaging files.
All functions take a repository directory and return either a version
string or None if the corresponding file or version field is missing.
Supported sources:
- pyproject.toml (PEP 621, [project].version)
- flake.nix (version = "X.Y.Z";)
- PKGBUILD (pkgver / pkgrel)
- debian/changelog (first entry line: package (version) ...)
- RPM spec file (package-manager.spec: Version / Release)
- Ansible Galaxy (galaxy.yml or meta/main.yml)
"""
# src/pkgmgr/core/version/source.py
from __future__ import annotations
import os
@@ -24,52 +7,72 @@ from typing import Optional
import yaml
from pkgmgr.core.repository.paths import resolve_repo_paths
def read_pyproject_version(repo_dir: str) -> Optional[str]:
"""
Read the version from pyproject.toml in repo_dir, if present.
Expects a PEP 621-style [project] table with a 'version' field.
Returns the version string or None.
"""
path = os.path.join(repo_dir, "pyproject.toml")
if not os.path.exists(path):
paths = resolve_repo_paths(repo_dir)
path = paths.pyproject_toml
if not os.path.isfile(path):
return None
try:
try:
import tomllib # Python 3.11+
except ModuleNotFoundError: # pragma: no cover
tomllib = None
if tomllib is None:
return None
import tomllib # Python 3.11+
except Exception:
import tomli as tomllib # type: ignore
try:
with open(path, "rb") as f:
data = tomllib.load(f)
project = data.get("project", {})
if isinstance(project, dict):
version = project.get("version")
if isinstance(version, str):
return version.strip() or None
project = data.get("project") or {}
version = project.get("version")
return str(version).strip() if version else None
except Exception:
# Intentionally swallow errors and fall back to None.
return None
return None
def read_pyproject_project_name(repo_dir: str) -> Optional[str]:
"""
Read distribution name from pyproject.toml ([project].name).
This is required to correctly resolve installed Python package
versions via importlib.metadata.
"""
paths = resolve_repo_paths(repo_dir)
path = paths.pyproject_toml
if not os.path.isfile(path):
return None
try:
import tomllib # Python 3.11+
except Exception:
import tomli as tomllib # type: ignore
try:
with open(path, "rb") as f:
data = tomllib.load(f)
project = data.get("project") or {}
name = project.get("name")
return str(name).strip() if name else None
except Exception:
return None
def read_flake_version(repo_dir: str) -> Optional[str]:
"""
Read the version from flake.nix in repo_dir, if present.
Looks for a line like:
version = "1.2.3";
and returns the string inside the quotes.
Looks for:
version = "X.Y.Z";
"""
path = os.path.join(repo_dir, "flake.nix")
if not os.path.exists(path):
paths = resolve_repo_paths(repo_dir)
path = paths.flake_nix
if not os.path.isfile(path):
return None
try:
@@ -81,22 +84,22 @@ def read_flake_version(repo_dir: str) -> Optional[str]:
match = re.search(r'version\s*=\s*"([^"]+)"', text)
if not match:
return None
version = match.group(1).strip()
return version or None
return match.group(1).strip() or None
def read_pkgbuild_version(repo_dir: str) -> Optional[str]:
"""
Read the version from PKGBUILD in repo_dir, if present.
Read the version from PKGBUILD (preferring packaging/arch/PKGBUILD).
Expects:
pkgver=1.2.3
pkgrel=1
Returns either "1.2.3-1" (if both are present) or just "1.2.3".
Combines pkgver and pkgrel if both exist:
pkgver=1.2.3
pkgrel=1
-> 1.2.3-1
"""
path = os.path.join(repo_dir, "PKGBUILD")
if not os.path.exists(path):
paths = resolve_repo_paths(repo_dir)
path = paths.arch_pkgbuild
if not path or not os.path.isfile(path):
return None
try:
@@ -121,15 +124,19 @@ def read_pkgbuild_version(repo_dir: str) -> Optional[str]:
def read_debian_changelog_version(repo_dir: str) -> Optional[str]:
"""
Read the latest Debian version from debian/changelog in repo_dir, if present.
Read the latest version from debian changelog.
The first non-empty line typically looks like:
package-name (1.2.3-1) unstable; urgency=medium
Preferred path:
packaging/debian/changelog
Fallback:
debian/changelog
We extract the text inside the first parentheses.
Expected format:
package (1.2.3-1) unstable; urgency=medium
"""
path = os.path.join(repo_dir, "debian", "changelog")
if not os.path.exists(path):
paths = resolve_repo_paths(repo_dir)
path = paths.debian_changelog
if not path or not os.path.isfile(path):
return None
try:
@@ -140,8 +147,7 @@ def read_debian_changelog_version(repo_dir: str) -> Optional[str]:
continue
match = re.search(r"\(([^)]+)\)", line)
if match:
version = match.group(1).strip()
return version or None
return match.group(1).strip() or None
break
except Exception:
return None
@@ -151,19 +157,21 @@ def read_debian_changelog_version(repo_dir: str) -> Optional[str]:
def read_spec_version(repo_dir: str) -> Optional[str]:
"""
Read the version from a RPM spec file.
Read the version from an RPM spec file.
For now, we assume a fixed file name 'package-manager.spec'
in repo_dir with lines like:
Preferred paths:
packaging/fedora/package-manager.spec
packaging/fedora/*.spec
repo_root/*.spec
Version: 1.2.3
Release: 1%{?dist}
Returns either "1.2.3-1" (if Release is present) or "1.2.3".
Any RPM macro suffix like '%{?dist}' is stripped from the release.
Combines:
Version: 1.2.3
Release: 1%{?dist}
-> 1.2.3-1
"""
path = os.path.join(repo_dir, "package-manager.spec")
if not os.path.exists(path):
paths = resolve_repo_paths(repo_dir)
path = paths.rpm_spec
if not path or not os.path.isfile(path):
return None
try:
@@ -180,10 +188,7 @@ def read_spec_version(repo_dir: str) -> Optional[str]:
rel_match = re.search(r"^Release:\s*(.+)$", text, re.MULTILINE)
if rel_match:
release_raw = rel_match.group(1).strip()
# Strip common RPM macro suffix like %... (e.g. 1%{?dist})
release = release_raw.split("%", 1)[0].strip()
# Also strip anything after first whitespace, just in case
release = release.split(" ", 1)[0].strip()
release = release_raw.split("%", 1)[0].split(" ", 1)[0].strip()
if release:
return f"{version}-{release}"
@@ -192,40 +197,35 @@ def read_spec_version(repo_dir: str) -> Optional[str]:
def read_ansible_galaxy_version(repo_dir: str) -> Optional[str]:
"""
Read the version from Ansible Galaxy metadata, if present.
Read the version from Ansible Galaxy metadata.
Supported locations:
- galaxy.yml (preferred for modern roles/collections)
- meta/main.yml (legacy style roles; uses galaxy_info.version or version)
Supported:
- galaxy.yml
- meta/main.yml (galaxy_info.version or version)
"""
# 1) galaxy.yml in repo root
galaxy_path = os.path.join(repo_dir, "galaxy.yml")
if os.path.exists(galaxy_path):
galaxy_yml = os.path.join(repo_dir, "galaxy.yml")
if os.path.isfile(galaxy_yml):
try:
with open(galaxy_path, "r", encoding="utf-8") as f:
with open(galaxy_yml, "r", encoding="utf-8") as f:
data = yaml.safe_load(f) or {}
version = data.get("version")
if isinstance(version, str) and version.strip():
return version.strip()
except Exception:
# Ignore parse errors and fall through to meta/main.yml
pass
# 2) meta/main.yml (classic Ansible role)
meta_path = os.path.join(repo_dir, "meta", "main.yml")
if os.path.exists(meta_path):
meta_yml = os.path.join(repo_dir, "meta", "main.yml")
if os.path.isfile(meta_yml):
try:
with open(meta_path, "r", encoding="utf-8") as f:
with open(meta_yml, "r", encoding="utf-8") as f:
data = yaml.safe_load(f) or {}
# Preferred: galaxy_info.version
galaxy_info = data.get("galaxy_info") or {}
if isinstance(galaxy_info, dict):
version = galaxy_info.get("version")
if isinstance(version, str) and version.strip():
return version.strip()
# Fallback: top-level 'version'
version = data.get("version")
if isinstance(version, str) and version.strip():
return version.strip()

View File

@@ -1,143 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
E2E integration tests for the `pkgmgr mirror` command family.
This test class covers:
- pkgmgr mirror --help
- pkgmgr mirror list --preview --all
- pkgmgr mirror diff --preview --all
- pkgmgr mirror merge config file --preview --all
- pkgmgr mirror setup --preview --all
All of these subcommands are fully wired at CLI level and do not
require mocks. With --preview, merge and setup do not perform
destructive actions, making them safe for CI execution.
"""
from __future__ import annotations
import io
import runpy
import sys
import unittest
from contextlib import redirect_stdout, redirect_stderr
class TestIntegrationMirrorCommands(unittest.TestCase):
"""
E2E tests for `pkgmgr mirror` commands.
"""
# ------------------------------------------------------------
# Helper
# ------------------------------------------------------------
def _run_pkgmgr(self, args: list[str]) -> str:
"""
Execute pkgmgr with the given arguments and return captured stdout+stderr.
- Treat SystemExit(0) or SystemExit(None) as success.
- Convert non-zero exit codes into AssertionError.
"""
original_argv = list(sys.argv)
buffer = io.StringIO()
cmd_repr = "pkgmgr " + " ".join(args)
try:
sys.argv = ["pkgmgr"] + args
try:
with redirect_stdout(buffer), redirect_stderr(buffer):
runpy.run_module("pkgmgr", run_name="__main__")
except SystemExit as exc:
code = exc.code if isinstance(exc.code, int) else None
if code not in (0, None):
raise AssertionError(
f"{cmd_repr!r} failed with exit code {exc.code}. "
"Scroll up to inspect the pkgmgr output."
) from exc
return buffer.getvalue()
finally:
sys.argv = original_argv
# ------------------------------------------------------------
# Tests
# ------------------------------------------------------------
def test_mirror_help(self) -> None:
"""
Ensure `pkgmgr mirror --help` runs successfully
and prints a usage message for the mirror command.
"""
output = self._run_pkgmgr(["mirror", "--help"])
self.assertIn("usage:", output)
self.assertIn("pkgmgr mirror", output)
def test_mirror_list_preview_all(self) -> None:
"""
`pkgmgr mirror list --preview --all` should run without error
and produce some output for the selected repositories.
"""
output = self._run_pkgmgr(["mirror", "list", "--preview", "--all"])
# Do not assert specific wording; just ensure something was printed.
self.assertTrue(
output.strip(),
msg="Expected `pkgmgr mirror list --preview --all` to produce output.",
)
def test_mirror_diff_preview_all(self) -> None:
"""
`pkgmgr mirror diff --preview --all` should run without error
and produce some diagnostic output (diff header, etc.).
"""
output = self._run_pkgmgr(["mirror", "diff", "--preview", "--all"])
self.assertTrue(
output.strip(),
msg="Expected `pkgmgr mirror diff --preview --all` to produce output.",
)
def test_mirror_merge_config_to_file_preview_all(self) -> None:
"""
`pkgmgr mirror merge config file --preview --all` should run without error.
In preview mode this does not change either config or MIRRORS files;
it only prints what would be merged.
"""
output = self._run_pkgmgr(
[
"mirror",
"merge",
"config",
"file",
"--preview",
"--all",
]
)
self.assertTrue(
output.strip(),
msg=(
"Expected `pkgmgr mirror merge config file --preview --all` "
"to produce output."
),
)
def test_mirror_setup_preview_all(self) -> None:
"""
`pkgmgr mirror setup --preview --all` should run without error.
In preview mode only the intended Git operations and remote
suggestions are printed; no real changes are made.
"""
output = self._run_pkgmgr(["mirror", "setup", "--preview", "--all"])
self.assertTrue(
output.strip(),
msg="Expected `pkgmgr mirror setup --preview --all` to produce output.",
)
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,70 @@
from __future__ import annotations
import os
import shutil
import subprocess
import unittest
PROJECT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".."))
def _run_help(cmd: list[str], label: str) -> str:
print(f"\n[TEST] Running ({label}): {' '.join(cmd)}")
proc = subprocess.run(
cmd,
cwd=PROJECT_ROOT,
text=True,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
check=False,
env=os.environ.copy(),
)
print(proc.stdout.rstrip())
# For --help we expect success (0). Anything else is an error.
if proc.returncode != 0:
raise AssertionError(
f"[TEST] Help command failed ({label}).\n"
f"Command: {' '.join(cmd)}\n"
f"Exit code: {proc.returncode}\n"
f"--- output ---\n{proc.stdout}\n"
)
return proc.stdout
class TestPublishHelpE2E(unittest.TestCase):
def test_pkgmgr_publish_help(self) -> None:
out = _run_help(["pkgmgr", "publish", "--help"], "pkgmgr publish --help")
self.assertIn("usage:", out)
self.assertIn("publish", out)
def test_pkgmgr_help_mentions_publish(self) -> None:
out = _run_help(["pkgmgr", "--help"], "pkgmgr --help")
self.assertIn("publish", out)
def test_nix_run_pkgmgr_publish_help(self) -> None:
if shutil.which("nix") is None:
self.skipTest("nix is not available in this environment")
out = _run_help(
["nix", "run", ".#pkgmgr", "--", "publish", "--help"],
"nix run .#pkgmgr -- publish --help",
)
self.assertIn("usage:", out)
self.assertIn("publish", out)
def test_nix_run_pkgmgr_help_mentions_publish(self) -> None:
if shutil.which("nix") is None:
self.skipTest("nix is not available in this environment")
out = _run_help(
["nix", "run", ".#pkgmgr", "--", "--help"],
"nix run .#pkgmgr -- --help",
)
self.assertIn("publish", out)
if __name__ == "__main__":
unittest.main()

View File

@@ -1,6 +1,6 @@
"""
Integration test: update all configured repositories using
--clone-mode https and --no-verification.
--clone-mode shallow and --no-verification, WITHOUT system updates.
This test is intended to be run inside the Docker container where:
- network access is available,
@@ -8,8 +8,8 @@ This test is intended to be run inside the Docker container where:
- and it is safe to perform real git operations.
It passes if BOTH commands complete successfully (in separate tests):
1) pkgmgr update --all --clone-mode https --no-verification --system-update
2) nix run .#pkgmgr -- update --all --clone-mode https --no-verification --system-update
1) pkgmgr update --all --clone-mode shallow --no-verification
2) nix run .#pkgmgr -- update --all --clone-mode shallow --no-verification
"""
from __future__ import annotations
@@ -38,7 +38,7 @@ def _make_temp_gitconfig_with_safe_dirs(home: Path) -> Path:
return gitconfig
class TestIntegrationUpdateAllHttps(unittest.TestCase):
class TestIntegrationUpdateAllshallowNoSystem(unittest.TestCase):
def _common_env(self, home_dir: str) -> dict[str, str]:
env = os.environ.copy()
env["HOME"] = home_dir
@@ -86,32 +86,30 @@ class TestIntegrationUpdateAllHttps(unittest.TestCase):
remove_pkgmgr_from_nix_profile()
nix_profile_list_debug("AFTER CLEANUP")
def test_update_all_repositories_https_pkgmgr(self) -> None:
def test_update_all_repositories_shallow_pkgmgr_no_system(self) -> None:
self._common_setup()
with tempfile.TemporaryDirectory(prefix="pkgmgr-updateall-") as tmp:
with tempfile.TemporaryDirectory(prefix="pkgmgr-updateall-nosys-") as tmp:
env = self._common_env(tmp)
args = [
"update",
"--all",
"--clone-mode",
"https",
"shallow",
"--no-verification",
"--system-update",
]
self._run_cmd(["pkgmgr", *args], label="pkgmgr", env=env)
pkgmgr_help_debug()
def test_update_all_repositories_https_nix_pkgmgr(self) -> None:
def test_update_all_repositories_shallow_nix_pkgmgr_no_system(self) -> None:
self._common_setup()
with tempfile.TemporaryDirectory(prefix="pkgmgr-updateall-nix-") as tmp:
with tempfile.TemporaryDirectory(prefix="pkgmgr-updateall-nosys-nix-") as tmp:
env = self._common_env(tmp)
args = [
"update",
"--all",
"--clone-mode",
"https",
"shallow",
"--no-verification",
"--system-update",
]
self._run_cmd(
["nix", "run", ".#pkgmgr", "--", *args],

View File

@@ -0,0 +1,124 @@
"""
Integration test: update ONLY the 'pkgmgr' repository with system updates enabled.
This test is intended to be run inside the Docker container where:
- network access is available,
- the config/config.yaml is present,
- and it is safe to perform real git operations.
It passes if BOTH commands complete successfully (in separate tests):
1) pkgmgr update pkgmgr --clone-mode shallow --no-verification --system
2) nix run .#pkgmgr -- update pkgmgr --clone-mode shallow --no-verification --system
"""
from __future__ import annotations
import os
import subprocess
import tempfile
import unittest
from pathlib import Path
from test_install_pkgmgr_shallow import (
nix_profile_list_debug,
remove_pkgmgr_from_nix_profile,
pkgmgr_help_debug,
)
def _make_temp_gitconfig_with_safe_dirs(home: Path) -> Path:
gitconfig = home / ".gitconfig"
gitconfig.write_text(
"[safe]\n"
"\tdirectory = /src\n"
"\tdirectory = /src/.git\n"
"\tdirectory = *\n"
)
return gitconfig
class TestIntegrationUpdatePkgmgrWithSystem(unittest.TestCase):
def _common_env(self, home_dir: str) -> dict[str, str]:
env = os.environ.copy()
env["HOME"] = home_dir
home = Path(home_dir)
home.mkdir(parents=True, exist_ok=True)
env["GIT_CONFIG_GLOBAL"] = str(_make_temp_gitconfig_with_safe_dirs(home))
# Ensure nix is discoverable if the container has it
env["PATH"] = "/nix/var/nix/profiles/default/bin:" + env.get("PATH", "")
return env
def _run_cmd(self, cmd: list[str], label: str, env: dict[str, str]) -> None:
cmd_repr = " ".join(cmd)
print(f"\n[TEST] Running ({label}): {cmd_repr}")
proc = subprocess.run(
cmd,
check=False,
cwd=os.getcwd(),
env=env,
text=True,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
)
print(proc.stdout.rstrip())
if proc.returncode != 0:
print(f"\n[TEST] Command failed ({label})")
print(f"[TEST] Command : {cmd_repr}")
print(f"[TEST] Exit code: {proc.returncode}")
nix_profile_list_debug(f"ON FAILURE ({label})")
raise AssertionError(
f"({label}) {cmd_repr!r} failed with exit code {proc.returncode}.\n\n"
f"--- output ---\n{proc.stdout}\n"
)
def _common_setup(self) -> None:
nix_profile_list_debug("BEFORE CLEANUP")
remove_pkgmgr_from_nix_profile()
nix_profile_list_debug("AFTER CLEANUP")
def test_update_pkgmgr_shallow_pkgmgr_with_system(self) -> None:
self._common_setup()
with tempfile.TemporaryDirectory(prefix="pkgmgr-update-pkgmgr-sys-") as tmp:
env = self._common_env(tmp)
args = [
"update",
"pkgmgr",
"--clone-mode",
"shallow",
"--no-verification",
"--system",
]
self._run_cmd(["pkgmgr", *args], label="pkgmgr", env=env)
pkgmgr_help_debug()
def test_update_pkgmgr_shallow_nix_pkgmgr_with_system(self) -> None:
self._common_setup()
with tempfile.TemporaryDirectory(prefix="pkgmgr-update-pkgmgr-sys-nix-") as tmp:
env = self._common_env(tmp)
args = [
"update",
"pkgmgr",
"--clone-mode",
"shallow",
"--no-verification",
"--system",
]
self._run_cmd(
["nix", "run", ".#pkgmgr", "--", *args],
label="nix run .#pkgmgr",
env=env,
)
pkgmgr_help_debug()
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,172 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
CLI integration tests for `pkgmgr mirror`.
These tests validate:
- CLI argument parsing
- command dispatch
- command orchestration
All side effects (git, network, remote provisioning, filesystem writes)
are patched to keep tests deterministic and CI-safe.
"""
from __future__ import annotations
import importlib
import io
import os
import runpy
import sys
import unittest
from contextlib import ExitStack, redirect_stderr, redirect_stdout
from typing import Dict, List, Optional
from unittest.mock import MagicMock, PropertyMock, patch
class TestIntegrationMirrorCommands(unittest.TestCase):
"""
Integration tests for `pkgmgr mirror` commands.
"""
def _run_pkgmgr(self, args: List[str], extra_env: Optional[Dict[str, str]] = None) -> str:
"""
Execute pkgmgr with the given arguments and return captured output.
- Treat SystemExit(0) or SystemExit(None) as success.
- Any other exit code is considered a test failure.
- Mirror commands are patched to avoid network/destructive operations.
"""
original_argv = list(sys.argv)
original_env = dict(os.environ)
buffer = io.StringIO()
cmd_repr = "pkgmgr " + " ".join(args)
# Shared dummy context used by multiple mirror commands
dummy_ctx = MagicMock()
dummy_ctx.identifier = "dummy-repo"
dummy_ctx.repo_dir = "/tmp/dummy-repo"
dummy_ctx.config_mirrors = {"origin": "git@github.com:alice/repo.git"}
dummy_ctx.file_mirrors = {"backup": "ssh://git@git.example:2201/alice/repo.git"}
type(dummy_ctx).resolved_mirrors = PropertyMock(
return_value={
"origin": "git@github.com:alice/repo.git",
"backup": "ssh://git@git.example:2201/alice/repo.git",
}
)
# Helper: patch with create=True so missing symbols don't explode.
# IMPORTANT: patch() does not auto-import submodules when resolving dotted names.
def _p(target: str, **kwargs):
module_name = target.rsplit(".", 1)[0]
try:
importlib.import_module(module_name)
except ModuleNotFoundError:
# If the module truly doesn't exist, create=True may still allow patching
# in some cases, but dotted resolution can still fail. Best-effort.
pass
return patch(target, create=True, **kwargs)
# Fake result for remote provisioning (preview-safe)
def _fake_ensure_remote_repo(spec, provider_hint=None, options=None):
# Safety: E2E should only ever call this in preview mode
if options is not None and getattr(options, "preview", False) is not True:
raise AssertionError(
f"{cmd_repr} attempted ensure_remote_repo without preview=True in E2E."
)
r = MagicMock()
r.status = "preview"
r.message = "Preview mode (E2E patched): no remote provisioning performed."
r.url = None
return r
try:
sys.argv = ["pkgmgr"] + list(args)
if extra_env:
os.environ.update(extra_env)
with ExitStack() as stack:
# build_context is imported directly in these modules:
stack.enter_context(_p("pkgmgr.actions.mirror.list_cmd.build_context", return_value=dummy_ctx))
stack.enter_context(_p("pkgmgr.actions.mirror.diff_cmd.build_context", return_value=dummy_ctx))
stack.enter_context(_p("pkgmgr.actions.mirror.merge_cmd.build_context", return_value=dummy_ctx))
stack.enter_context(_p("pkgmgr.actions.mirror.setup_cmd.build_context", return_value=dummy_ctx))
stack.enter_context(_p("pkgmgr.actions.mirror.remote_provision.build_context", return_value=dummy_ctx))
# Deterministic remote probing (covers setup + likely check implementations)
stack.enter_context(_p("pkgmgr.actions.mirror.remote_check.probe_mirror", return_value=(True, "")))
stack.enter_context(_p("pkgmgr.actions.mirror.setup_cmd.probe_mirror", return_value=(True, "")))
stack.enter_context(_p("pkgmgr.actions.mirror.git_remote.is_remote_reachable", return_value=True))
# setup_cmd imports ensure_origin_remote directly:
stack.enter_context(_p("pkgmgr.actions.mirror.setup_cmd.ensure_origin_remote", return_value=None))
# Extra safety: if any code calls git_remote.ensure_origin_remote directly
stack.enter_context(_p("pkgmgr.actions.mirror.git_remote.ensure_origin_remote", return_value=None))
# remote provisioning: remote_provision imports ensure_remote_repo directly from core:
stack.enter_context(
_p(
"pkgmgr.actions.mirror.remote_provision.ensure_remote_repo",
side_effect=_fake_ensure_remote_repo,
)
)
# Extra safety: if anything calls remote_check.run_git directly, make it inert
stack.enter_context(_p("pkgmgr.actions.mirror.remote_check.run_git", return_value="dummy"))
with redirect_stdout(buffer), redirect_stderr(buffer):
try:
runpy.run_module("pkgmgr", run_name="__main__")
except SystemExit as exc:
code = exc.code if isinstance(exc.code, int) else None
if code not in (0, None):
raise AssertionError(
"%r failed with exit code %r.\n\nOutput:\n%s"
% (cmd_repr, exc.code, buffer.getvalue())
)
return buffer.getvalue()
finally:
sys.argv = original_argv
os.environ.clear()
os.environ.update(original_env)
# ------------------------------------------------------------
# Tests
# ------------------------------------------------------------
def test_mirror_help(self) -> None:
output = self._run_pkgmgr(["mirror", "--help"])
self.assertIn("usage:", output.lower())
self.assertIn("mirror", output.lower())
def test_mirror_list_preview_all(self) -> None:
output = self._run_pkgmgr(["mirror", "list", "--preview", "--all"])
self.assertTrue(output.strip(), "Expected output from mirror list")
def test_mirror_diff_preview_all(self) -> None:
output = self._run_pkgmgr(["mirror", "diff", "--preview", "--all"])
self.assertTrue(output.strip(), "Expected output from mirror diff")
def test_mirror_merge_config_to_file_preview_all(self) -> None:
output = self._run_pkgmgr(["mirror", "merge", "config", "file", "--preview", "--all"])
self.assertTrue(output.strip(), "Expected output from mirror merge (config -> file)")
def test_mirror_setup_preview_all(self) -> None:
output = self._run_pkgmgr(["mirror", "setup", "--preview", "--all"])
self.assertTrue(output.strip(), "Expected output from mirror setup")
def test_mirror_check_preview_all(self) -> None:
output = self._run_pkgmgr(["mirror", "check", "--preview", "--all"])
self.assertTrue(output.strip(), "Expected output from mirror check")
def test_mirror_provision_preview_all(self) -> None:
output = self._run_pkgmgr(["mirror", "provision", "--preview", "--all"])
self.assertTrue(output.strip(), "Expected output from mirror provision (preview)")
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,63 @@
from __future__ import annotations
import json
import unittest
from dataclasses import dataclass
@dataclass
class FakeRunResult:
"""
Mimics your runner returning a structured result object.
"""
returncode: int
stdout: str
stderr: str = ""
class FakeRunner:
"""
Minimal runner stub: returns exactly what we configure.
"""
def __init__(self, result):
self._result = result
def run(self, ctx, cmd: str, allow_failure: bool = False):
return self._result
class TestE2ENixProfileListJsonParsing(unittest.TestCase):
"""
This test verifies that NixProfileInspector can parse `nix profile list --json`
regardless of whether the CommandRunner returns:
- raw stdout string, OR
- a RunResult-like object with a `.stdout` attribute.
"""
def test_list_json_accepts_raw_string(self) -> None:
from pkgmgr.actions.install.installers.nix.profile import NixProfileInspector
payload = {"elements": {"pkgmgr-1": {"attrPath": "packages.x86_64-linux.pkgmgr"}}}
raw = json.dumps(payload)
runner = FakeRunner(raw)
inspector = NixProfileInspector()
data = inspector.list_json(ctx=None, runner=runner)
self.assertEqual(data["elements"]["pkgmgr-1"]["attrPath"], "packages.x86_64-linux.pkgmgr")
def test_list_json_accepts_runresult_object(self) -> None:
from pkgmgr.actions.install.installers.nix.profile import NixProfileInspector
payload = {"elements": {"pkgmgr-1": {"attrPath": "packages.x86_64-linux.pkgmgr"}}}
raw = json.dumps(payload)
runner = FakeRunner(FakeRunResult(returncode=0, stdout=raw))
inspector = NixProfileInspector()
data = inspector.list_json(ctx=None, runner=runner)
self.assertEqual(data["elements"]["pkgmgr-1"]["attrPath"], "packages.x86_64-linux.pkgmgr")
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,119 @@
from __future__ import annotations
import io
import os
import shutil
import subprocess
import tempfile
import unittest
from contextlib import redirect_stdout
from types import SimpleNamespace
from pkgmgr.cli.commands.publish import handle_publish
def _run(cmd: list[str], cwd: str) -> None:
subprocess.run(
cmd,
cwd=cwd,
check=True,
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
)
class TestIntegrationPublish(unittest.TestCase):
def setUp(self) -> None:
if shutil.which("git") is None:
self.skipTest("git is required for this integration test")
self.tmp = tempfile.TemporaryDirectory()
self.repo_dir = self.tmp.name
# Initialize git repository
_run(["git", "init"], cwd=self.repo_dir)
_run(["git", "config", "user.email", "ci@example.invalid"], cwd=self.repo_dir)
_run(["git", "config", "user.name", "CI"], cwd=self.repo_dir)
with open(os.path.join(self.repo_dir, "README.md"), "w", encoding="utf-8") as f:
f.write("test\n")
_run(["git", "add", "README.md"], cwd=self.repo_dir)
_run(["git", "commit", "-m", "init"], cwd=self.repo_dir)
_run(["git", "tag", "-a", "v1.2.3", "-m", "v1.2.3"], cwd=self.repo_dir)
# Create MIRRORS file with PyPI target
with open(os.path.join(self.repo_dir, "MIRRORS"), "w", encoding="utf-8") as f:
f.write("https://pypi.org/project/pkgmgr/\n")
def tearDown(self) -> None:
self.tmp.cleanup()
def test_publish_preview_end_to_end(self) -> None:
ctx = SimpleNamespace(
repositories_base_dir=self.repo_dir,
all_repositories=[
{
"name": "pkgmgr",
"directory": self.repo_dir,
}
],
)
selected = [
{
"name": "pkgmgr",
"directory": self.repo_dir,
}
]
args = SimpleNamespace(
preview=True,
non_interactive=False,
)
buf = io.StringIO()
with redirect_stdout(buf):
handle_publish(args=args, ctx=ctx, selected=selected)
out = buf.getvalue()
self.assertIn("[pkgmgr] Publishing repository", out)
self.assertIn("[INFO] Publishing pkgmgr for tag v1.2.3", out)
self.assertIn("[PREVIEW] Would build and upload to PyPI.", out)
# Preview must not create dist/
self.assertFalse(os.path.isdir(os.path.join(self.repo_dir, "dist")))
def test_publish_skips_without_pypi_mirror(self) -> None:
with open(os.path.join(self.repo_dir, "MIRRORS"), "w", encoding="utf-8") as f:
f.write("git@github.com:example/example.git\n")
ctx = SimpleNamespace(
repositories_base_dir=self.repo_dir,
all_repositories=[
{
"name": "pkgmgr",
"directory": self.repo_dir,
}
],
)
selected = [
{
"name": "pkgmgr",
"directory": self.repo_dir,
}
]
args = SimpleNamespace(
preview=True,
non_interactive=False,
)
buf = io.StringIO()
with redirect_stdout(buf):
handle_publish(args=args, ctx=ctx, selected=selected)
out = buf.getvalue()
self.assertIn("[INFO] No PyPI mirror found. Skipping publish.", out)

View File

@@ -23,7 +23,7 @@ from unittest.mock import patch
import pkgmgr.actions.install as install_mod
from pkgmgr.actions.install import install_repos
from pkgmgr.actions.install.installers.makefile import MakefileInstaller
from pkgmgr.actions.install.installers.nix_flake import NixFlakeInstaller
from pkgmgr.actions.install.installers.nix import NixFlakeInstaller
from pkgmgr.actions.install.installers.os_packages.arch_pkgbuild import (
ArchPkgbuildInstaller,
)

View File

@@ -0,0 +1,65 @@
from __future__ import annotations
import os
import unittest
from pathlib import Path
from pkgmgr.core.repository.paths import resolve_repo_paths
def _find_repo_root() -> Path:
"""
Locate the pkgmgr repository root from the test location.
Assumes:
repo_root/
src/pkgmgr/...
tests/integration/...
"""
here = Path(__file__).resolve()
for parent in here.parents:
if (parent / "pyproject.toml").is_file() and (parent / "src" / "pkgmgr").is_dir():
return parent
raise RuntimeError("Could not determine repository root for pkgmgr integration test")
class TestRepositoryPathsExist(unittest.TestCase):
"""
Integration test: pkgmgr is the TEMPLATE repository.
All canonical paths resolved for pkgmgr must exist.
"""
def test_pkgmgr_repository_paths_exist(self) -> None:
repo_root = _find_repo_root()
paths = resolve_repo_paths(str(repo_root))
missing: list[str] = []
def require(path: str | None, description: str) -> None:
if not path:
missing.append(f"{description}: <not resolved>")
return
if not os.path.isfile(path):
missing.append(f"{description}: {path} (missing)")
# Core metadata
require(paths.pyproject_toml, "pyproject.toml")
require(paths.flake_nix, "flake.nix")
# Human changelog
require(paths.changelog_md, "CHANGELOG.md")
# Packaging files (pkgmgr defines the template)
require(paths.arch_pkgbuild, "Arch PKGBUILD")
require(paths.debian_changelog, "Debian changelog")
require(paths.rpm_spec, "RPM spec file")
if missing:
self.fail(
"pkgmgr repository does not satisfy the canonical repository layout:\n"
+ "\n".join(f" - {item}" for item in missing)
)
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,44 @@
from __future__ import annotations
from dataclasses import dataclass
from typing import Any, Optional
@dataclass
class FakeRunResult:
returncode: int
stdout: str = ""
stderr: str = ""
class FakeRunner:
"""
Minimal runner stub compatible with:
- CommandRunner.run(ctx, cmd, allow_failure=...)
- Generic runner.run(ctx, cmd, allow_failure=...)
"""
def __init__(self, mapping: Optional[dict[str, Any]] = None, default: Any = None):
self.mapping = mapping or {}
self.default = default if default is not None else FakeRunResult(0, "", "")
self.calls: list[tuple[Any, str, bool]] = []
def run(self, ctx, cmd: str, allow_failure: bool = False):
self.calls.append((ctx, cmd, allow_failure))
return self.mapping.get(cmd, self.default)
class FakeRetry:
"""
Mimics GitHubRateLimitRetry.run_with_retry(ctx, runner, cmd)
"""
def __init__(self, results: list[FakeRunResult]):
self._results = list(results)
self.calls: list[str] = []
def run_with_retry(self, ctx, runner, cmd: str):
self.calls.append(cmd)
if self._results:
return self._results.pop(0)
return FakeRunResult(0, "", "")

View File

@@ -0,0 +1,58 @@
from __future__ import annotations
import unittest
from pkgmgr.actions.install.installers.nix.conflicts import NixConflictResolver
from ._fakes import FakeRunResult, FakeRunner, FakeRetry
class DummyCtx:
quiet = True
class TestNixConflictResolver(unittest.TestCase):
def test_resolve_removes_tokens_and_retries_success(self) -> None:
ctx = DummyCtx()
install_cmd = "nix profile install /repo#default"
stderr = '''
error: An existing package already provides the following file:
/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pkgmgr/bin/pkgmgr
'''
runner = FakeRunner(mapping={
"nix profile remove pkgmgr": FakeRunResult(0, "", ""),
})
retry = FakeRetry(results=[FakeRunResult(0, "", "")])
class FakeProfile:
def find_remove_tokens_for_store_prefixes(self, ctx, runner, prefixes):
return []
def find_remove_tokens_for_output(self, ctx, runner, output):
return ["pkgmgr"]
resolver = NixConflictResolver(runner=runner, retry=retry, profile=FakeProfile())
ok = resolver.resolve(ctx, install_cmd, stdout="", stderr=stderr, output="pkgmgr", max_rounds=2)
self.assertTrue(ok)
self.assertIn("nix profile remove pkgmgr", [c[1] for c in runner.calls])
def test_resolve_uses_textual_remove_tokens_last_resort(self) -> None:
ctx = DummyCtx()
install_cmd = "nix profile install /repo#default"
stderr = "hint: try:\n nix profile remove 'pkgmgr-1'\n"
runner = FakeRunner(mapping={
"nix profile remove pkgmgr-1": FakeRunResult(0, "", ""),
})
retry = FakeRetry(results=[FakeRunResult(0, "", "")])
class FakeProfile:
def find_remove_tokens_for_store_prefixes(self, ctx, runner, prefixes):
return []
def find_remove_tokens_for_output(self, ctx, runner, output):
return []
resolver = NixConflictResolver(runner=runner, retry=retry, profile=FakeProfile())
ok = resolver.resolve(ctx, install_cmd, stdout="", stderr=stderr, output="pkgmgr", max_rounds=2)
self.assertTrue(ok)
self.assertIn("nix profile remove pkgmgr-1", [c[1] for c in runner.calls])

View File

@@ -0,0 +1,62 @@
from __future__ import annotations
import json
import unittest
from pkgmgr.actions.install.installers.nix.profile import NixProfileInspector
from ._fakes import FakeRunResult, FakeRunner
class TestNixProfileInspector(unittest.TestCase):
def test_list_json_accepts_raw_string(self) -> None:
payload = {"elements": {"pkgmgr-1": {"attrPath": "packages.x86_64-linux.pkgmgr"}}}
raw = json.dumps(payload)
runner = FakeRunner(default=raw)
insp = NixProfileInspector()
data = insp.list_json(ctx=None, runner=runner)
self.assertEqual(data["elements"]["pkgmgr-1"]["attrPath"], "packages.x86_64-linux.pkgmgr")
def test_list_json_accepts_result_object(self) -> None:
payload = {"elements": {"pkgmgr-1": {"attrPath": "packages.x86_64-linux.pkgmgr"}}}
raw = json.dumps(payload)
runner = FakeRunner(default=FakeRunResult(0, stdout=raw))
insp = NixProfileInspector()
data = insp.list_json(ctx=None, runner=runner)
self.assertEqual(data["elements"]["pkgmgr-1"]["attrPath"], "packages.x86_64-linux.pkgmgr")
def test_find_remove_tokens_for_output_includes_output_first(self) -> None:
payload = {
"elements": {
"pkgmgr-1": {"name": "pkgmgr-1", "attrPath": "packages.x86_64-linux.pkgmgr"},
"default-1": {"name": "default-1", "attrPath": "packages.x86_64-linux.default"},
}
}
raw = json.dumps(payload)
runner = FakeRunner(default=FakeRunResult(0, stdout=raw))
insp = NixProfileInspector()
tokens = insp.find_remove_tokens_for_output(ctx=None, runner=runner, output="pkgmgr")
self.assertEqual(tokens[0], "pkgmgr")
self.assertIn("pkgmgr-1", tokens)
def test_find_remove_tokens_for_store_prefixes(self) -> None:
payload = {
"elements": {
"pkgmgr-1": {
"name": "pkgmgr-1",
"attrPath": "packages.x86_64-linux.pkgmgr",
"storePaths": ["/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pkgmgr"],
},
"something": {
"name": "other",
"attrPath": "packages.x86_64-linux.other",
"storePaths": ["/nix/store/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb-other"],
},
}
}
raw = json.dumps(payload)
runner = FakeRunner(default=FakeRunResult(0, stdout=raw))
insp = NixProfileInspector()
tokens = insp.find_remove_tokens_for_store_prefixes(
ctx=None, runner=runner, prefixes=["/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pkgmgr"]
)
self.assertIn("pkgmgr-1", tokens)

View File

@@ -0,0 +1,88 @@
from __future__ import annotations
import unittest
from unittest.mock import MagicMock
from pkgmgr.actions.install.installers.nix.installer import NixFlakeInstaller
from ._fakes import FakeRunResult
class DummyCtx:
def __init__(self, identifier: str = "x", repo_dir: str = "/repo", quiet: bool = True, force_update: bool = False):
self.identifier = identifier
self.repo_dir = repo_dir
self.quiet = quiet
self.force_update = force_update
class TestNixFlakeInstallerCore(unittest.TestCase):
def test_install_only_success_returns(self) -> None:
ins = NixFlakeInstaller()
ins.supports = MagicMock(return_value=True)
ins._retry = MagicMock()
ins._retry.run_with_retry.return_value = FakeRunResult(0, "", "")
ins._conflicts = MagicMock()
ins._profile = MagicMock()
ins._runner = MagicMock()
ctx = DummyCtx(identifier="lib", repo_dir="/repo", quiet=True)
ins.run(ctx)
ins._retry.run_with_retry.assert_called()
def test_conflict_resolver_success_short_circuits(self) -> None:
ins = NixFlakeInstaller()
ins.supports = MagicMock(return_value=True)
ins._retry = MagicMock()
ins._retry.run_with_retry.return_value = FakeRunResult(1, "out", "err")
ins._conflicts = MagicMock()
ins._conflicts.resolve.return_value = True
ins._profile = MagicMock()
ins._runner = MagicMock()
ctx = DummyCtx(identifier="lib", repo_dir="/repo", quiet=True)
ins.run(ctx)
ins._conflicts.resolve.assert_called()
def test_mandatory_failure_raises_systemexit(self) -> None:
ins = NixFlakeInstaller()
ins.supports = MagicMock(return_value=True)
ins._retry = MagicMock()
ins._retry.run_with_retry.return_value = FakeRunResult(2, "", "no")
ins._conflicts = MagicMock()
ins._conflicts.resolve.return_value = False
ins._profile = MagicMock()
ins._profile.find_installed_indices_for_output.return_value = []
ins._runner = MagicMock()
ins._runner.run.return_value = FakeRunResult(2, "", "")
ctx = DummyCtx(identifier="lib", repo_dir="/repo", quiet=True)
with self.assertRaises(SystemExit) as cm:
ins.run(ctx)
self.assertEqual(cm.exception.code, 2)
def test_optional_failure_does_not_raise(self) -> None:
ins = NixFlakeInstaller()
ins.supports = MagicMock(return_value=True)
results = [
FakeRunResult(0, "", ""),
FakeRunResult(2, "", ""),
]
def run_with_retry(ctx, runner, cmd):
return results.pop(0)
ins._retry = MagicMock()
ins._retry.run_with_retry.side_effect = run_with_retry
ins._conflicts = MagicMock()
ins._conflicts.resolve.return_value = False
ins._profile = MagicMock()
ins._profile.find_installed_indices_for_output.return_value = []
ins._runner = MagicMock()
ins._runner.run.return_value = FakeRunResult(2, "", "")
ctx = DummyCtx(identifier="pkgmgr", repo_dir="/repo", quiet=True)
ins.run(ctx) # must not raise

View File

@@ -0,0 +1,136 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Unit tests for NixFlakeInstaller using unittest (no pytest).
Covers:
- Successful installation (returncode == 0)
- Mandatory failure → SystemExit with correct code
- Optional failure (pkgmgr default) → no raise, but warning
- supports() behavior incl. PKGMGR_DISABLE_NIX_FLAKE_INSTALLER
"""
from __future__ import annotations
import io
import os
import shutil
import subprocess
import tempfile
import unittest
from contextlib import redirect_stdout
from typing import List
from unittest.mock import patch
from pkgmgr.actions.install.installers.nix import NixFlakeInstaller
class DummyCtx:
"""Minimal context object to satisfy NixFlakeInstaller.run() / supports()."""
def __init__(
self,
identifier: str,
repo_dir: str,
preview: bool = False,
quiet: bool = False,
force_update: bool = False,
):
self.identifier = identifier
self.repo_dir = repo_dir
self.preview = preview
self.quiet = quiet
self.force_update = force_update
class TestNixFlakeInstaller(unittest.TestCase):
def setUp(self) -> None:
# Create a temporary repository directory with a flake.nix file
self._tmpdir = tempfile.mkdtemp(prefix="nix_flake_test_")
self.repo_dir = self._tmpdir
flake_path = os.path.join(self.repo_dir, "flake.nix")
with open(flake_path, "w", encoding="utf-8") as f:
f.write("{}\n")
# Ensure the disable env var is not set by default
os.environ.pop("PKGMGR_DISABLE_NIX_FLAKE_INSTALLER", None)
def tearDown(self) -> None:
if os.path.isdir(self._tmpdir):
shutil.rmtree(self._tmpdir, ignore_errors=True)
@staticmethod
def _cp(code: int, stdout: str = "", stderr: str = "") -> subprocess.CompletedProcess:
return subprocess.CompletedProcess(args=["nix"], returncode=code, stdout=stdout, stderr=stderr)
@staticmethod
def _enable_nix_in_module(which_patch) -> None:
"""Ensure shutil.which('nix') in nix installer module returns a path."""
which_patch.return_value = "/usr/bin/nix"
@staticmethod
def _install_cmds_from_calls(call_args_list) -> List[str]:
cmds: List[str] = []
for c in call_args_list:
if not c.args:
continue
cmd = c.args[0]
if isinstance(cmd, str) and cmd.startswith("nix profile install "):
cmds.append(cmd)
return cmds
def test_nix_flake_run_success(self) -> None:
"""
When install returns success (returncode 0), installer
should report success and not raise.
"""
ctx = DummyCtx(identifier="some-lib", repo_dir=self.repo_dir)
installer = NixFlakeInstaller()
install_results = [self._cp(0)] # first install succeeds
def fake_subprocess_run(cmd, *args, **kwargs):
# cmd is a string because CommandRunner uses shell=True
if isinstance(cmd, str) and cmd.startswith("nix profile list --json"):
return self._cp(0, stdout='{"elements": []}', stderr="")
if isinstance(cmd, str) and cmd.startswith("nix profile install "):
return install_results.pop(0)
return self._cp(0)
buf = io.StringIO()
with patch("pkgmgr.actions.install.installers.nix.installer.shutil.which") as which_mock, patch(
"pkgmgr.actions.install.installers.nix.installer.os.path.exists", return_value=True
), patch(
"pkgmgr.actions.install.installers.nix.runner.subprocess.run", side_effect=fake_subprocess_run
) as subproc_mock, redirect_stdout(buf):
self._enable_nix_in_module(which_mock)
self.assertTrue(installer.supports(ctx))
installer.run(ctx)
out = buf.getvalue()
self.assertIn("[nix] install: nix profile install", out)
self.assertIn("[nix] output 'default' successfully installed.", out)
install_cmds = self._install_cmds_from_calls(subproc_mock.call_args_list)
self.assertEqual(install_cmds, [f"nix profile install {self.repo_dir}#default"])
def test_nix_flake_supports_respects_disable_env(self) -> None:
"""
PKGMGR_DISABLE_NIX_FLAKE_INSTALLER=1 must disable the installer,
even if flake.nix exists and nix is available.
"""
ctx = DummyCtx(identifier="pkgmgr", repo_dir=self.repo_dir, quiet=False)
installer = NixFlakeInstaller()
with patch("pkgmgr.actions.install.installers.nix.installer.shutil.which") as which_mock, patch(
"pkgmgr.actions.install.installers.nix.installer.os.path.exists", return_value=True
):
self._enable_nix_in_module(which_mock)
os.environ["PKGMGR_DISABLE_NIX_FLAKE_INSTALLER"] = "1"
self.assertFalse(installer.supports(ctx))
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,37 @@
from __future__ import annotations
import unittest
from pkgmgr.actions.install.installers.nix.profile.models import NixProfileEntry
from pkgmgr.actions.install.installers.nix.profile.matcher import entry_matches_output, entry_matches_store_path
class TestMatcher(unittest.TestCase):
def _e(self, name: str, attr: str) -> NixProfileEntry:
return NixProfileEntry(
key="pkgmgr-1",
index=None,
name=name,
attr_path=attr,
store_paths=["/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pkgmgr"],
)
def test_matches_direct_name(self) -> None:
self.assertTrue(entry_matches_output(self._e("pkgmgr", ""), "pkgmgr"))
def test_matches_attrpath_hash(self) -> None:
self.assertTrue(entry_matches_output(self._e("", "github:me/repo#pkgmgr"), "pkgmgr"))
def test_matches_attrpath_dot_suffix(self) -> None:
self.assertTrue(entry_matches_output(self._e("", "packages.x86_64-linux.pkgmgr"), "pkgmgr"))
def test_matches_name_with_suffix_number(self) -> None:
self.assertTrue(entry_matches_output(self._e("pkgmgr-1", ""), "pkgmgr"))
def test_package_manager_special_case(self) -> None:
self.assertTrue(entry_matches_output(self._e("package-manager-2", ""), "pkgmgr"))
def test_store_path_match(self) -> None:
entry = self._e("pkgmgr-1", "")
self.assertTrue(entry_matches_store_path(entry, "/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pkgmgr"))
self.assertFalse(entry_matches_store_path(entry, "/nix/store/cccccccccccccccccccccccccccccccc-zzz"))

View File

@@ -0,0 +1,106 @@
from __future__ import annotations
import unittest
from unittest.mock import patch
from pkgmgr.actions.install.installers.nix.retry import GitHubRateLimitRetry, RetryPolicy
from pkgmgr.actions.install.installers.nix.types import RunResult
class DummyCtx:
def __init__(self, quiet: bool = True) -> None:
self.quiet = quiet
class FakeRunner:
"""
Simulates a runner that returns:
- HTTP 403 for the first N calls
- success afterwards
"""
def __init__(self, fail_count: int) -> None:
self.fail_count = fail_count
self.calls = 0
def run(self, ctx: DummyCtx, cmd: str, allow_failure: bool) -> RunResult:
self.calls += 1
if self.calls <= self.fail_count:
return RunResult(
returncode=1,
stdout="",
stderr="error: HTTP error 403: rate limit exceeded (simulated)",
)
return RunResult(returncode=0, stdout="ok", stderr="")
class TestGitHub403Retry(unittest.TestCase):
def test_retries_on_403_without_realtime_waiting(self) -> None:
"""
Ensure:
- It retries only on GitHub 403-like errors
- It does not actually sleep in realtime (time.sleep patched)
- It stops once a success occurs
- Wait times follow Fibonacci(base=30) + jitter
"""
policy = RetryPolicy(
max_attempts=3, # attempts: 1,2,3
base_delay_seconds=30, # fibonacci delays: 30, 30, 60
jitter_seconds_min=0,
jitter_seconds_max=60,
)
retry = GitHubRateLimitRetry(policy=policy)
ctx = DummyCtx(quiet=True)
runner = FakeRunner(fail_count=2) # fail twice (403), then succeed
# Make jitter deterministic and prevent real sleeping.
with patch("pkgmgr.actions.install.installers.nix.retry.random.randint", return_value=5) as jitter_mock, patch(
"pkgmgr.actions.install.installers.nix.retry.time.sleep"
) as sleep_mock:
res = retry.run_with_retry(ctx, runner, "nix profile install /tmp#default")
# Result should be success on 3rd attempt.
self.assertEqual(res.returncode, 0)
self.assertEqual(runner.calls, 3)
# jitter should be used for each retry sleep (attempt 1->2, attempt 2->3) => 2 sleeps
self.assertEqual(jitter_mock.call_count, 2)
self.assertEqual(sleep_mock.call_count, 2)
# Fibonacci delays for attempts=3: [30, 30, 60]
# sleep occurs after failed attempt 1 and 2, so base delays are 30 and 30
# wait_time = base_delay + jitter(5) => 35, 35
sleep_args = [c.args[0] for c in sleep_mock.call_args_list]
self.assertEqual(sleep_args, [35, 35])
def test_does_not_retry_on_non_403_errors(self) -> None:
"""
Ensure it does not retry when the error is not recognized as GitHub 403/rate limit.
"""
policy = RetryPolicy(max_attempts=7, base_delay_seconds=30)
retry = GitHubRateLimitRetry(policy=policy)
ctx = DummyCtx(quiet=True)
class Non403Runner:
def __init__(self) -> None:
self.calls = 0
def run(self, ctx: DummyCtx, cmd: str, allow_failure: bool) -> RunResult:
self.calls += 1
return RunResult(returncode=1, stdout="", stderr="some other error (simulated)")
runner = Non403Runner()
with patch("pkgmgr.actions.install.installers.nix.retry.time.sleep") as sleep_mock:
res = retry.run_with_retry(ctx, runner, "nix profile install /tmp#default")
self.assertEqual(res.returncode, 1)
self.assertEqual(runner.calls, 1) # no retries
self.assertEqual(sleep_mock.call_count, 0)
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,39 @@
from __future__ import annotations
import unittest
from pkgmgr.actions.install.installers.nix.profile.normalizer import coerce_index, normalize_elements
class TestNormalizer(unittest.TestCase):
def test_coerce_index_numeric_key(self) -> None:
self.assertEqual(coerce_index("3", {"name": "x"}), 3)
def test_coerce_index_explicit_field(self) -> None:
self.assertEqual(coerce_index("pkgmgr-1", {"index": 7}), 7)
self.assertEqual(coerce_index("pkgmgr-1", {"id": "8"}), 8)
def test_coerce_index_trailing_number(self) -> None:
self.assertEqual(coerce_index("pkgmgr-42", {"name": "x"}), 42)
def test_normalize_elements_handles_missing_elements(self) -> None:
self.assertEqual(normalize_elements({}), [])
def test_normalize_elements_collects_store_paths(self) -> None:
data = {
"elements": {
"pkgmgr-1": {
"name": "pkgmgr-1",
"attrPath": "packages.x86_64-linux.pkgmgr",
"storePaths": ["/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pkgmgr"],
},
"2": {
"name": "foo",
"attrPath": "packages.x86_64-linux.default",
"storePath": "/nix/store/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb-foo",
},
}
}
entries = normalize_elements(data)
self.assertEqual(len(entries), 2)
self.assertTrue(entries[0].store_paths)

View File

@@ -0,0 +1,18 @@
from __future__ import annotations
import json
import unittest
from pkgmgr.actions.install.installers.nix.profile.parser import parse_profile_list_json
class TestParseProfileListJson(unittest.TestCase):
def test_parses_valid_json(self) -> None:
payload = {"elements": {"0": {"name": "pkgmgr"}}}
raw = json.dumps(payload)
self.assertEqual(parse_profile_list_json(raw)["elements"]["0"]["name"], "pkgmgr")
def test_raises_systemexit_on_invalid_json(self) -> None:
with self.assertRaises(SystemExit) as cm:
parse_profile_list_json("{not json")
self.assertIn("Failed to parse", str(cm.exception))

Some files were not shown because too many files have changed in this diff Show More