Compare commits

...

33 Commits

Author SHA1 Message Date
Kevin Veen-Birkenbach
9485bc9e3f Release version 1.8.0
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2025-12-15 13:37:42 +01:00
Kevin Veen-Birkenbach
dcda23435d git commit -m "feat(update): add --silent mode with continue-on-failure and unified summary
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
- Introduce --silent flag for install/update to downgrade per-repo errors to warnings
- Continue processing remaining repositories on pull/install failures
- Emit a single summary at the end (suppress per-repo summaries during update)
- Preserve interactive verification behavior when not silent
- Add integration test covering silent vs non-silent update behavior
- Update e2e tests to use --silent for stability"

https://chatgpt.com/share/693ffcca-f680-800f-9f95-9d8c52a9a678
2025-12-15 13:19:14 +01:00
Kevin Veen-Birkenbach
a69e81c44b fix(dependencies): install python-pip for all supported distributions
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
- Added python-pip for Arch, python3-pip for CentOS, Debian, Fedora, and Ubuntu.
- Ensures that pip is available for Python package installations across systems.

https://chatgpt.com/share/693fedab-69ac-800f-a8f9-19d504787565
2025-12-15 12:14:48 +01:00
Kevin Veen-Birkenbach
2ca004d056 fix(arch/dependencies): initialize pacman keyring before package installation
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
- Added pacman-key initialization to ensure keyring is properly set up before installing packages.
- This prevents errors related to missing secret keys during package signing.

https://chatgpt.com/share/693fddec-3800-800f-9ad8-6f2d3cd90cc6
2025-12-15 11:07:31 +01:00
Kevin Veen-Birkenbach
f7bd5bfd0b Optimized linters and solved linting bugs
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2025-12-15 11:00:17 +01:00
Kevin Veen-Birkenbach
2c15a4016b feat(create): scaffold repositories via templates with preview and mirror setup
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / linter-shell (push) Has been cancelled
Mark stable commit / linter-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
https://chatgpt.com/share/693f5bdb-1780-800f-a772-0ecf399627fc
2025-12-15 01:52:38 +01:00
Kevin Veen-Birkenbach
9e3ce34626 Release version 1.7.2
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / linter-shell (push) Has been cancelled
Mark stable commit / linter-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2025-12-15 00:53:26 +01:00
Kevin Veen-Birkenbach
1a13fcaa4e refactor(mirror): enforce primary origin URL and align mirror resolution logic
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / linter-shell (push) Has been cancelled
Mark stable commit / linter-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
- Resolve primary remote via RepoMirrorContext (origin → file order → config → default)
- Always set origin fetch and push URL to primary
- Add additional mirrors as extra push URLs without duplication
- Update remote provisioning and setup commands to use context-based resolution
- Adjust and extend unit tests to cover new origin/push behavior

https://chatgpt.com/share/693f4538-42d4-800f-98c2-2ec264fd2e19
2025-12-15 00:16:04 +01:00
Kevin Veen-Birkenbach
48a0d1d458 feat(release): auto-run publish after release with --no-publish opt-out
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / linter-shell (push) Has been cancelled
Mark stable commit / linter-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
- Run publish automatically after successful release
- Add --no-publish flag to disable auto-publish
- Respect TTY for interactive/credential prompts
- Harden repo directory resolution
- Add integration and unit tests for release→publish hook

https://chatgpt.com/share/693f335b-b820-800f-8666-68355f3c938f
2025-12-14 22:59:43 +01:00
Kevin Veen-Birkenbach
783d2b921a fix(publish): store PyPI token per user
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / linter-shell (push) Has been cancelled
Mark stable commit / linter-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
https://chatgpt.com/share/693f2e20-b94c-800f-9d8e-0c88187f7be6
2025-12-14 22:37:28 +01:00
Kevin Veen-Birkenbach
6effacefef Release version 1.7.1
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / linter-shell (push) Has been cancelled
Mark stable commit / linter-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2025-12-14 21:19:11 +01:00
Kevin Veen-Birkenbach
65903e740b Release version 1.7.0
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / linter-shell (push) Has been cancelled
Mark stable commit / linter-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2025-12-14 21:10:06 +01:00
Kevin Veen-Birkenbach
aa80a2ddb4 Added correct e2e test and pypi mirror
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / linter-shell (push) Has been cancelled
Mark stable commit / linter-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2025-12-14 21:08:23 +01:00
Kevin Veen-Birkenbach
9456ad4475 feat(publish): add PyPI publish workflow, CLI command, parser integration, and tests
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / linter-shell (push) Has been cancelled
Mark stable commit / linter-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
* Introduce publish action with PyPI target detection via MIRRORS
* Resolve version from SemVer git tags on HEAD
* Support preview mode and non-interactive CI usage
* Build and upload artifacts using build + twine with token resolution
* Add CLI wiring (dispatch, command handler, parser)
* Add E2E publish help tests for pkgmgr and nix run
* Add integration tests for publish preview and mirror handling
* Add unit tests for git tag parsing, PyPI URL parsing, workflow preview, and CLI handler
* Clean up dispatch and parser structure while integrating publish

https://chatgpt.com/share/693f0f00-af68-800f-8846-193dca69bd2e
2025-12-14 20:24:01 +01:00
Kevin Veen-Birkenbach
3d7d7e9c09 Release version 1.6.4
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / linter-shell (push) Has been cancelled
Mark stable commit / linter-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2025-12-14 19:33:07 +01:00
Kevin Veen-Birkenbach
328203ccd7 **test(nix): add comprehensive unittest coverage for nix installer helpers**
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / linter-shell (push) Has been cancelled
Mark stable commit / linter-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
* Add reusable fakes for runner and retry logic
* Cover conflict resolution paths (store-prefix, output-token, textual fallback)
* Add unit tests for profile parsing, normalization, matching, and text parsing
* Verify installer core behavior for success, mandatory failure, and optional failure
* Keep tests Nix-free using pure unittest + mocks

https://chatgpt.com/share/693efe80-d928-800f-98b7-0aaafee1d32a
2025-12-14 19:27:26 +01:00
Kevin Veen-Birkenbach
ac16378807 Deleted deprecated unit tests:
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / linter-shell (push) Has been cancelled
Mark stable commit / linter-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
https://chatgpt.com/share/693efe80-d928-800f-98b7-0aaafee1d32a
2025-12-14 19:14:42 +01:00
Kevin Veen-Birkenbach
f7a86bc353 fix(launcher): avoid calling missing retry helper in packaged installs
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / linter-shell (push) Has been cancelled
Mark stable commit / linter-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
- Load GitHub 403 retry helper only when available
- Fallback to plain `nix run` if retry function is not defined
- Prevent exit 127 when pkgmgr launcher is installed without retry script
- Fix E2E failure for `pkgmgr update pkgmgr --system`

https://chatgpt.com/share/693efd23-8b60-800f-adbb-9dfffc33f1f7
2025-12-14 19:08:32 +01:00
Kevin Veen-Birkenbach
06a6a77a48 *fix(nix): resolve nix profile conflicts without numeric indices and fix update pkgmgr system test*
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / linter-shell (push) Has been cancelled
Mark stable commit / linter-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
* Switch conflict handling from index-based removal to token-based removal (*nix profile remove <name>*) for newer nix versions
* Add robust parsing of *nix profile list --json* with normalization and heuristics for output/name matching
* Detect at runtime whether numeric profile indices are supported and fall back automatically when they are not
* Ensure *pkgmgr* / *package-manager* flake outputs are correctly identified and cleaned up during reinstall
* Fix failing E2E test *test_update_pkgmgr_shallow_pkgmgr_with_system* by reliably removing conflicting profile entries before reinstall

https://chatgpt.com/share/693efae5-b8bc-800f-94e3-28c93b74ed7b
2025-12-14 18:58:29 +01:00
Kevin Veen-Birkenbach
4883e40812 fix(ci): skip container publish when no version tag exists
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / linter-shell (push) Has been cancelled
Mark stable commit / linter-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
* Remove unsupported `fetch-tags` input from checkout step
* Detect missing `v*` tag on workflow_run SHA and exit successfully
* Gate Buildx, GHCR login, and publish steps behind `should_publish` flag

https://chatgpt.com/share/693ee7f1-ed80-800f-bb03-369a1cc659e3
2025-12-14 17:38:06 +01:00
Kevin Veen-Birkenbach
031ae5ac69 test(integration): fix mirror tests by removing non-existent check_cmd patches
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / linter-shell (push) Has been cancelled
Mark stable commit / linter-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
- Remove patches referencing pkgmgr.actions.mirror.check_cmd (module does not exist)
- Patch actual mirror probe/remote helpers used at runtime
- Make mirror integration tests deterministic and CI-safe

https://chatgpt.com/share/693ee657-b260-800f-a69a-8b0680e6baa5
2025-12-14 17:31:05 +01:00
Kevin Veen-Birkenbach
1c4fc531fa fix(shellcheck): correct source path hint for retry_403 helper
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / linter-shell (push) Has been cancelled
Mark stable commit / linter-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
- Align ShellCheck source hint with repository layout
- Fix SC1091 without disabling checks
- Runtime sourcing via ${RETRY_LIB} remains unchanged

https://chatgpt.com/share/693ee308-6c48-800f-b14f-7d6081e14eb4
2025-12-14 17:16:35 +01:00
Kevin Veen-Birkenbach
33dfbf3a4d test(env-virtual): execute pkgmgr from Python venv instead of system launcher
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / linter-shell (push) Has been cancelled
Mark stable commit / linter-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
The virtual environment test no longer invokes the distro-installed pkgmgr launcher (Nix-based).
Instead, it explicitly installs and activates the Python venv via make setup-venv and runs pkgmgr from there.

This aligns the test with its actual purpose (venv validation), avoids accidental execution of the Nix launcher, and fixes the failure caused by the missing run_with_github_403_retry helper in the venv workflow.

https://chatgpt.com/share/693ee224-e838-800f-8fa0-45295b2f5e20
2025-12-14 17:12:48 +01:00
Kevin Veen-Birkenbach
a3aa7b6394 git commit -am "fix(shellcheck): point source hint to repo-local retry_403.sh
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / linter-shell (push) Has been cancelled
Mark stable commit / linter-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
- Fix SC1091 by updating ShellCheck source hint to repo path
- Keep runtime sourcing from /usr/lib/package-manager unchanged
- CI-safe without disabling ShellCheck rules"

https://chatgpt.com/share/693edae1-6d84-800f-8556-0e54dd15b944
2025-12-14 16:42:22 +01:00
Kevin Veen-Birkenbach
724c262a4a fix(test): import mirror submodules before patching in integration tests
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / linter-shell (push) Has been cancelled
Mark stable commit / linter-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
Ensure pkgmgr.actions.mirror.* submodules are imported before unittest.mock.patch
to avoid AttributeError when patching dotted paths (e.g. check_cmd).
Stabilizes mirror CLI integration tests in CI.

https://chatgpt.com/share/693ed9f5-9918-800f-a880-d1238b3da1c9
2025-12-14 16:38:24 +01:00
Kevin Veen-Birkenbach
dcbe16c5f0 feat(launcher): enforce GitHub 403 retry for nix run
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / linter-shell (push) Has been cancelled
Mark stable commit / linter-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
- Require retry_403.sh to exist and fail hard if missing
- Source retry helper unconditionally
- Run nix flake execution via run_with_github_403_retry
- Prevent transient GitHub API rate-limit failures during nix run

https://chatgpt.com/share/693ed83e-a2e8-800f-8c1b-d5d5afeaa6ad
2025-12-14 16:31:02 +01:00
Kevin Veen-Birkenbach
f63b0a9f08 chore(ci): rename codesniffer workflows to linter
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / linter-shell (push) Has been cancelled
Mark stable commit / linter-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
- Rename ShellCheck workflow to linter-shell
- Rename Ruff workflow to linter-python
- Update workflow calls and dependencies accordingly

https://chatgpt.com/share/693ed61a-7490-800f-aef1-fce845e717a2
2025-12-14 16:21:57 +01:00
Kevin Veen-Birkenbach
822c418503 Added missing import
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / codesniffer-shellcheck (push) Has been cancelled
Mark stable commit / codesniffer-ruff (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2025-12-14 16:16:37 +01:00
Kevin Veen-Birkenbach
562a6da291 test(integration): move mirror CLI tests from e2e to integration and patch side effects
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / codesniffer-shellcheck (push) Has been cancelled
Mark stable commit / codesniffer-ruff (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
https://chatgpt.com/share/693ed188-eb80-800f-8541-356e3fbd98c5
2025-12-14 16:14:17 +01:00
Kevin Veen-Birkenbach
e61b30d9af feat(tests): add unit tests for mirror context, io, commands, and remote helpers
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / codesniffer-shellcheck (push) Has been cancelled
Mark stable commit / codesniffer-ruff (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
https://chatgpt.com/share/693ed188-eb80-800f-8541-356e3fbd98c5
2025-12-14 16:02:11 +01:00
Kevin Veen-Birkenbach
27c0c7c01f **fix(mirror): derive remote repository owner and name from URL**
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / codesniffer-shellcheck (push) Has been cancelled
Mark stable commit / codesniffer-ruff (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
* Parse host, owner, and repository name directly from Git remote URLs
* Prevent provisioning under incorrect repository names
* Make Git URL the single source of truth for remote provisioning
* Improve diagnostics when URL parsing fails
2025-12-14 14:54:19 +01:00
Kevin Veen-Birkenbach
0d652d995e **feat(mirror,credentials): improve remote provisioning UX and token handling**
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / codesniffer-shellcheck (push) Has been cancelled
Mark stable commit / codesniffer-ruff (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
* Split mirror logic into atomic modules (remote check, provisioning, URL utils)
* Normalize Git remote URLs and provider host detection
* Add provider-specific token help URLs (GitHub, Gitea/Forgejo, GitLab)
* Improve keyring handling with clear warnings and install hints
* Gracefully fall back to prompt when keyring is unavailable
* Fix provider hint override logic during remote provisioning
2025-12-14 14:48:05 +01:00
Kevin Veen-Birkenbach
0e03fbbee2 Changed Mirror Name
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / codesniffer-shellcheck (push) Has been cancelled
Mark stable commit / codesniffer-ruff (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2025-12-14 14:01:19 +01:00
103 changed files with 4232 additions and 1230 deletions

View File

@@ -28,8 +28,8 @@ jobs:
test-virgin-root: test-virgin-root:
uses: ./.github/workflows/test-virgin-root.yml uses: ./.github/workflows/test-virgin-root.yml
codesniffer-shellcheck: lint-shell:
uses: ./.github/workflows/codesniffer-shellcheck.yml uses: ./.github/workflows/lint-shell.yml
codesniffer-ruff: lint-python:
uses: ./.github/workflows/codesniffer-ruff.yml uses: ./.github/workflows/lint-python.yml

View File

@@ -4,7 +4,7 @@ on:
workflow_call: workflow_call:
jobs: jobs:
codesniffer-ruff: lint-python:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:

View File

@@ -4,7 +4,7 @@ on:
workflow_call: workflow_call:
jobs: jobs:
codesniffer-shellcheck: lint-shell:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4

View File

@@ -29,16 +29,16 @@ jobs:
test-virgin-root: test-virgin-root:
uses: ./.github/workflows/test-virgin-root.yml uses: ./.github/workflows/test-virgin-root.yml
codesniffer-shellcheck: lint-shell:
uses: ./.github/workflows/codesniffer-shellcheck.yml uses: ./.github/workflows/lint-shell.yml
codesniffer-ruff: lint-python:
uses: ./.github/workflows/codesniffer-ruff.yml uses: ./.github/workflows/lint-python.yml
mark-stable: mark-stable:
needs: needs:
- codesniffer-shellcheck - lint-shell
- codesniffer-ruff - lint-python
- test-unit - test-unit
- test-integration - test-integration
- test-env-nix - test-env-nix

View File

@@ -19,7 +19,6 @@ jobs:
uses: actions/checkout@v4 uses: actions/checkout@v4
with: with:
fetch-depth: 0 fetch-depth: 0
fetch-tags: true
- name: Checkout workflow_run commit and refresh tags - name: Checkout workflow_run commit and refresh tags
run: | run: |
@@ -35,22 +34,30 @@ jobs:
SHA="$(git rev-parse HEAD)" SHA="$(git rev-parse HEAD)"
V_TAG="$(git tag --points-at "${SHA}" --list 'v*' | sort -V | tail -n1)" V_TAG="$(git tag --points-at "${SHA}" --list 'v*' | sort -V | tail -n1)"
[[ -n "$V_TAG" ]] || { echo "No version tag found"; exit 1; } if [[ -z "${V_TAG}" ]]; then
echo "No version tag found for ${SHA}. Skipping publish."
echo "should_publish=false" >> "$GITHUB_OUTPUT"
exit 0
fi
VERSION="${V_TAG#v}" VERSION="${V_TAG#v}"
STABLE_SHA="$(git rev-parse -q --verify refs/tags/stable^{commit} 2>/dev/null || true)" STABLE_SHA="$(git rev-parse -q --verify refs/tags/stable^{commit} 2>/dev/null || true)"
IS_STABLE=false IS_STABLE=false
[[ -n "${STABLE_SHA}" && "${STABLE_SHA}" == "${SHA}" ]] && IS_STABLE=true [[ -n "${STABLE_SHA}" && "${STABLE_SHA}" == "${SHA}" ]] && IS_STABLE=true
echo "should_publish=true" >> "$GITHUB_OUTPUT"
echo "version=${VERSION}" >> "$GITHUB_OUTPUT" echo "version=${VERSION}" >> "$GITHUB_OUTPUT"
echo "is_stable=${IS_STABLE}" >> "$GITHUB_OUTPUT" echo "is_stable=${IS_STABLE}" >> "$GITHUB_OUTPUT"
- name: Set up Docker Buildx - name: Set up Docker Buildx
if: ${{ steps.info.outputs.should_publish == 'true' }}
uses: docker/setup-buildx-action@v3 uses: docker/setup-buildx-action@v3
with: with:
use: true use: true
- name: Login to GHCR - name: Login to GHCR
if: ${{ steps.info.outputs.should_publish == 'true' }}
uses: docker/login-action@v3 uses: docker/login-action@v3
with: with:
registry: ghcr.io registry: ghcr.io
@@ -58,6 +65,7 @@ jobs:
password: ${{ secrets.GITHUB_TOKEN }} password: ${{ secrets.GITHUB_TOKEN }}
- name: Publish all images - name: Publish all images
if: ${{ steps.info.outputs.should_publish == 'true' }}
run: | run: |
set -euo pipefail set -euo pipefail
OWNER="${{ github.repository_owner }}" \ OWNER="${{ github.repository_owner }}" \

View File

@@ -1,3 +1,44 @@
## [1.8.0] - 2025-12-15
* *** New Features: ***
- **Silent Updates**: You can now use the `--silent` flag during installs and updates to suppress error messages for individual repositories and get a single summary at the end. This ensures the process continues even if some repositories fail, while still preserving interactive checks when not in silent mode.
- **Repository Scaffolding**: The process for creating new repositories has been improved. You can now use templates to scaffold repositories with a preview and automatic mirror setup.
*** Bug Fixes: ***
- **Pip Installation**: Pip is now installed automatically on all supported systems. This includes `python-pip` for Arch and `python3-pip` for CentOS, Debian, Fedora, and Ubuntu, ensuring that pip is available for Python package installations.
- **Pacman Keyring**: Fixed an issue on Arch Linux where package installation would fail due to missing keys. The pacman keyring is now properly initialized before installing packages.
## [1.7.2] - 2025-12-15
* * Git mirrors are now resolved consistently (origin → MIRRORS file → config → default).
* The `origin` remote is always enforced to use the primary URL for both fetch and push.
* Additional mirrors are added as extra push targets without duplication.
* Local and remote mirror setup behaves more predictably and consistently.
* Improved test coverage ensures stable origin and push URL handling.
## [1.7.1] - 2025-12-14
* Patched package-manager to kpmx to publish on pypi
## [1.7.0] - 2025-12-14
* * New *pkgmgr publish* command to publish repository artifacts to PyPI based on the *MIRRORS* file.
* Automatically selects the current repository when no explicit selection is given.
* Publishes only when a semantic version tag is present on *HEAD*; otherwise skips with a clear info message.
* Supports non-interactive mode for CI environments via *--non-interactive*.
## [1.6.4] - 2025-12-14
* * Improved reliability of Nix installs and updates, including automatic resolution of profile conflicts and better handling of GitHub 403 rate limits.
* More stable launcher behavior in packaged and virtual-env setups.
* Enhanced mirror and remote handling: repository owner/name are derived from URLs, with smoother provisioning and clearer credential handling.
* More reliable releases and artifacts due to safer CI behavior when no version tag is present.
## [1.6.3] - 2025-12-14 ## [1.6.3] - 2025-12-14
* ***Fixed:*** Corrected repository path resolution so release and version logic consistently use the canonical packaging/* layout, preventing changelog and packaging files from being read or updated from incorrect locations. * ***Fixed:*** Corrected repository path resolution so release and version logic consistently use the canonical packaging/* layout, preventing changelog and packaging files from being read or updated from incorrect locations.

View File

@@ -1,3 +1,4 @@
git@github.com:kevinveenbirkenbach/package-manager.git git@github.com:kevinveenbirkenbach/package-manager.git
ssh://git@git.veen.world:2201/kevinveenbirkenbach/pkgmgr.git ssh://git@git.veen.world:2201/kevinveenbirkenbach/pkgmgr.git
ssh://git@code.cymais.cloud:2201/kevinveenbirkenbach/pkgmgr.git ssh://git@code.infinito.nexus:2201/kevinveenbirkenbach/pkgmgr.git
https://pypi.org/project/kpmx/

View File

@@ -32,7 +32,7 @@
rec { rec {
pkgmgr = pyPkgs.buildPythonApplication { pkgmgr = pyPkgs.buildPythonApplication {
pname = "package-manager"; pname = "package-manager";
version = "1.6.3"; version = "1.8.0";
# Use the git repo as source # Use the git repo as source
src = ./.; src = ./.;
@@ -49,6 +49,7 @@
# Runtime dependencies (matches [project.dependencies] in pyproject.toml) # Runtime dependencies (matches [project.dependencies] in pyproject.toml)
propagatedBuildInputs = [ propagatedBuildInputs = [
pyPkgs.pyyaml pyPkgs.pyyaml
pyPkgs.jinja2
pyPkgs.pip pyPkgs.pip
]; ];
@@ -78,6 +79,7 @@
pythonWithDeps = python.withPackages (ps: [ pythonWithDeps = python.withPackages (ps: [
ps.pip ps.pip
ps.pyyaml ps.pyyaml
ps.jinja2
]); ]);
in in
{ {

View File

@@ -1,7 +1,7 @@
# Maintainer: Kevin Veen-Birkenbach <info@veen.world> # Maintainer: Kevin Veen-Birkenbach <info@veen.world>
pkgname=package-manager pkgname=package-manager
pkgver=1.6.3 pkgver=1.8.0
pkgrel=1 pkgrel=1
pkgdesc="Local-flake wrapper for Kevin's package-manager (Nix-based)." pkgdesc="Local-flake wrapper for Kevin's package-manager (Nix-based)."
arch=('any') arch=('any')

View File

@@ -1,3 +1,49 @@
package-manager (1.8.0-1) unstable; urgency=medium
* *** New Features: ***
- **Silent Updates**: You can now use the `--silent` flag during installs and updates to suppress error messages for individual repositories and get a single summary at the end. This ensures the process continues even if some repositories fail, while still preserving interactive checks when not in silent mode.
- **Repository Scaffolding**: The process for creating new repositories has been improved. You can now use templates to scaffold repositories with a preview and automatic mirror setup.
*** Bug Fixes: ***
- **Pip Installation**: Pip is now installed automatically on all supported systems. This includes `python-pip` for Arch and `python3-pip` for CentOS, Debian, Fedora, and Ubuntu, ensuring that pip is available for Python package installations.
- **Pacman Keyring**: Fixed an issue on Arch Linux where package installation would fail due to missing keys. The pacman keyring is now properly initialized before installing packages.
-- Kevin Veen-Birkenbach <kevin@veen.world> Mon, 15 Dec 2025 13:37:42 +0100
package-manager (1.7.2-1) unstable; urgency=medium
* * Git mirrors are now resolved consistently (origin → MIRRORS file → config → default).
* The `origin` remote is always enforced to use the primary URL for both fetch and push.
* Additional mirrors are added as extra push targets without duplication.
* Local and remote mirror setup behaves more predictably and consistently.
* Improved test coverage ensures stable origin and push URL handling.
-- Kevin Veen-Birkenbach <kevin@veen.world> Mon, 15 Dec 2025 00:53:26 +0100
package-manager (1.7.1-1) unstable; urgency=medium
* Patched package-manager to kpmx to publish on pypi
-- Kevin Veen-Birkenbach <kevin@veen.world> Sun, 14 Dec 2025 21:19:11 +0100
package-manager (1.7.0-1) unstable; urgency=medium
* * New *pkgmgr publish* command to publish repository artifacts to PyPI based on the *MIRRORS* file.
* Automatically selects the current repository when no explicit selection is given.
* Publishes only when a semantic version tag is present on *HEAD*; otherwise skips with a clear info message.
* Supports non-interactive mode for CI environments via *--non-interactive*.
-- Kevin Veen-Birkenbach <kevin@veen.world> Sun, 14 Dec 2025 21:10:06 +0100
package-manager (1.6.4-1) unstable; urgency=medium
* * Improved reliability of Nix installs and updates, including automatic resolution of profile conflicts and better handling of GitHub 403 rate limits.
* More stable launcher behavior in packaged and virtual-env setups.
* Enhanced mirror and remote handling: repository owner/name are derived from URLs, with smoother provisioning and clearer credential handling.
* More reliable releases and artifacts due to safer CI behavior when no version tag is present.
-- Kevin Veen-Birkenbach <kevin@veen.world> Sun, 14 Dec 2025 19:33:07 +0100
package-manager (1.6.3-1) unstable; urgency=medium package-manager (1.6.3-1) unstable; urgency=medium
* ***Fixed:*** Corrected repository path resolution so release and version logic consistently use the canonical packaging/* layout, preventing changelog and packaging files from being read or updated from incorrect locations. * ***Fixed:*** Corrected repository path resolution so release and version logic consistently use the canonical packaging/* layout, preventing changelog and packaging files from being read or updated from incorrect locations.

View File

@@ -1,5 +1,5 @@
Name: package-manager Name: package-manager
Version: 1.6.3 Version: 1.8.0
Release: 1%{?dist} Release: 1%{?dist}
Summary: Wrapper that runs Kevin's package-manager via Nix flake Summary: Wrapper that runs Kevin's package-manager via Nix flake
@@ -74,6 +74,37 @@ echo ">>> package-manager removed. Nix itself was not removed."
/usr/lib/package-manager/ /usr/lib/package-manager/
%changelog %changelog
* Mon Dec 15 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.8.0-1
- *** New Features: ***
- **Silent Updates**: You can now use the `--silent` flag during installs and updates to suppress error messages for individual repositories and get a single summary at the end. This ensures the process continues even if some repositories fail, while still preserving interactive checks when not in silent mode.
- **Repository Scaffolding**: The process for creating new repositories has been improved. You can now use templates to scaffold repositories with a preview and automatic mirror setup.
*** Bug Fixes: ***
- **Pip Installation**: Pip is now installed automatically on all supported systems. This includes `python-pip` for Arch and `python3-pip` for CentOS, Debian, Fedora, and Ubuntu, ensuring that pip is available for Python package installations.
- **Pacman Keyring**: Fixed an issue on Arch Linux where package installation would fail due to missing keys. The pacman keyring is now properly initialized before installing packages.
* Mon Dec 15 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.7.2-1
- * Git mirrors are now resolved consistently (origin → MIRRORS file → config → default).
* The `origin` remote is always enforced to use the primary URL for both fetch and push.
* Additional mirrors are added as extra push targets without duplication.
* Local and remote mirror setup behaves more predictably and consistently.
* Improved test coverage ensures stable origin and push URL handling.
* Sun Dec 14 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.7.1-1
- Patched package-manager to kpmx to publish on pypi
* Sun Dec 14 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.7.0-1
- * New *pkgmgr publish* command to publish repository artifacts to PyPI based on the *MIRRORS* file.
* Automatically selects the current repository when no explicit selection is given.
* Publishes only when a semantic version tag is present on *HEAD*; otherwise skips with a clear info message.
* Supports non-interactive mode for CI environments via *--non-interactive*.
* Sun Dec 14 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.6.4-1
- * Improved reliability of Nix installs and updates, including automatic resolution of profile conflicts and better handling of GitHub 403 rate limits.
* More stable launcher behavior in packaged and virtual-env setups.
* Enhanced mirror and remote handling: repository owner/name are derived from URLs, with smoother provisioning and clearer credential handling.
* More reliable releases and artifacts due to safer CI behavior when no version tag is present.
* Sun Dec 14 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.6.3-1 * Sun Dec 14 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.6.3-1
- ***Fixed:*** Corrected repository path resolution so release and version logic consistently use the canonical packaging/* layout, preventing changelog and packaging files from being read or updated from incorrect locations. - ***Fixed:*** Corrected repository path resolution so release and version logic consistently use the canonical packaging/* layout, preventing changelog and packaging files from being read or updated from incorrect locations.

View File

@@ -6,8 +6,8 @@ requires = [
build-backend = "setuptools.build_meta" build-backend = "setuptools.build_meta"
[project] [project]
name = "package-manager" name = "kpmx"
version = "1.6.3" version = "1.8.0"
description = "Kevin's package-manager tool (pkgmgr)" description = "Kevin's package-manager tool (pkgmgr)"
readme = "README.md" readme = "README.md"
requires-python = ">=3.9" requires-python = ">=3.9"
@@ -21,6 +21,7 @@ authors = [
dependencies = [ dependencies = [
"PyYAML>=6.0", "PyYAML>=6.0",
"tomli; python_version < \"3.11\"", "tomli; python_version < \"3.11\"",
"jinja2>=3.1"
] ]
[project.urls] [project.urls]

View File

@@ -6,6 +6,13 @@ SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
echo "[arch/dependencies] Installing Arch build dependencies..." echo "[arch/dependencies] Installing Arch build dependencies..."
pacman -Syu --noconfirm pacman -Syu --noconfirm
if ! pacman-key --list-sigs &>/dev/null; then
echo "[arch/dependencies] Initializing pacman keyring..."
pacman-key --init
pacman-key --populate archlinux
fi
pacman -S --noconfirm --needed \ pacman -S --noconfirm --needed \
base-devel \ base-devel \
git \ git \
@@ -13,6 +20,7 @@ pacman -S --noconfirm --needed \
curl \ curl \
ca-certificates \ ca-certificates \
python \ python \
python-pip \
xz xz
pacman -Scc --noconfirm pacman -Scc --noconfirm

View File

@@ -14,6 +14,7 @@ dnf -y install \
curl-minimal \ curl-minimal \
ca-certificates \ ca-certificates \
python3 \ python3 \
python3-pip \
sudo \ sudo \
xz xz

View File

@@ -15,6 +15,7 @@ DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
ca-certificates \ ca-certificates \
python3 \ python3 \
python3-venv \ python3-venv \
python3-pip \
xz-utils xz-utils
rm -rf /var/lib/apt/lists/* rm -rf /var/lib/apt/lists/*

View File

@@ -14,6 +14,7 @@ dnf -y install \
curl \ curl \
ca-certificates \ ca-certificates \
python3 \ python3 \
python3-pip \
xz xz
dnf clean all dnf clean all

View File

@@ -17,6 +17,7 @@ DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
make \ make \
python3 \ python3 \
python3-venv \ python3-venv \
python3-pip \
ca-certificates \ ca-certificates \
xz-utils xz-utils

View File

@@ -2,6 +2,16 @@
set -euo pipefail set -euo pipefail
FLAKE_DIR="/usr/lib/package-manager" FLAKE_DIR="/usr/lib/package-manager"
NIX_LIB_DIR="${FLAKE_DIR}/nix/lib"
RETRY_LIB="${NIX_LIB_DIR}/retry_403.sh"
# ---------------------------------------------------------------------------
# Hard requirement: retry helper must exist (fail if missing)
# ---------------------------------------------------------------------------
if [[ ! -f "${RETRY_LIB}" ]]; then
echo "[launcher] ERROR: Required retry helper not found: ${RETRY_LIB}" >&2
exit 1
fi
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
# Try to ensure that "nix" is on PATH (common locations + container user) # Try to ensure that "nix" is on PATH (common locations + container user)
@@ -32,9 +42,13 @@ if ! command -v nix >/dev/null 2>&1; then
fi fi
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
# Primary path: use Nix flake if available # Primary path: use Nix flake if available (with GitHub 403 retry)
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
if command -v nix >/dev/null 2>&1; then if declare -F run_with_github_403_retry >/dev/null; then
# shellcheck source=./scripts/nix/lib/retry_403.sh
source "${RETRY_LIB}"
exec run_with_github_403_retry nix run "${FLAKE_DIR}#pkgmgr" -- "$@"
else
exec nix run "${FLAKE_DIR}#pkgmgr" -- "$@" exec nix run "${FLAKE_DIR}#pkgmgr" -- "$@"
fi fi

View File

@@ -1,32 +1,49 @@
#!/usr/bin/env bash #!/usr/bin/env bash
set -euo pipefail set -euo pipefail
IMAGE="pkgmgr-$PKGMGR_DISTRO" IMAGE="pkgmgr-${PKGMGR_DISTRO}"
echo echo
echo "------------------------------------------------------------" echo "------------------------------------------------------------"
echo ">>> Testing VENV: $IMAGE" echo ">>> Testing VENV: ${IMAGE}"
echo "------------------------------------------------------------" echo "------------------------------------------------------------"
echo "[test-env-virtual] Inspect image metadata:" echo "[test-env-virtual] Inspect image metadata:"
docker image inspect "$IMAGE" | sed -n '1,40p' docker image inspect "${IMAGE}" | sed -n '1,40p'
echo "[test-env-virtual] Running: docker run --rm --entrypoint pkgmgr $IMAGE --help"
echo echo
# Run the command and capture the output # ------------------------------------------------------------
# Run VENV-based pkgmgr test inside container
# ------------------------------------------------------------
if OUTPUT=$(docker run --rm \ if OUTPUT=$(docker run --rm \
-e REINSTALL_PKGMGR=1 \ -e REINSTALL_PKGMGR=1 \
-v "pkgmgr_nix_store_${PKGMGR_DISTRO}:/nix" \ -v "$(pwd):/src" \
-v "$(pwd):/src" \ -w /src \
-v "pkgmgr_nix_cache_${PKGMGR_DISTRO}:/root/.cache/nix" \ "${IMAGE}" \
"$IMAGE" 2>&1); then bash -lc '
set -euo pipefail
echo "[test-env-virtual] Installing pkgmgr (distro package)..."
make install
echo "[test-env-virtual] Setting up Python venv..."
make setup-venv
echo "[test-env-virtual] Activating venv..."
. "$HOME/.venvs/pkgmgr/bin/activate"
echo "[test-env-virtual] Using pkgmgr from:"
command -v pkgmgr
pkgmgr --help
' 2>&1); then
echo "$OUTPUT" echo "$OUTPUT"
echo echo
echo "[test-env-virtual] SUCCESS: $IMAGE responded to 'pkgmgr --help'" echo "[test-env-virtual] SUCCESS: venv-based pkgmgr works in ${IMAGE}"
else else
echo "$OUTPUT" echo "$OUTPUT"
echo echo
echo "[test-env-virtual] ERROR: $IMAGE failed to run 'pkgmgr --help'" echo "[test-env-virtual] ERROR: venv-based pkgmgr failed in ${IMAGE}"
exit 1 exit 1
fi fi

View File

@@ -16,7 +16,7 @@ Responsibilities:
from __future__ import annotations from __future__ import annotations
import os import os
from typing import Any, Dict, List, Optional from typing import Any, Dict, List, Optional, Tuple
from pkgmgr.core.repository.identifier import get_repo_identifier from pkgmgr.core.repository.identifier import get_repo_identifier
from pkgmgr.core.repository.dir import get_repo_dir from pkgmgr.core.repository.dir import get_repo_dir
@@ -93,6 +93,7 @@ def _verify_repo(
repo_dir: str, repo_dir: str,
no_verification: bool, no_verification: bool,
identifier: str, identifier: str,
silent: bool,
) -> bool: ) -> bool:
""" """
Verify a repository using the configured verification data. Verify a repository using the configured verification data.
@@ -111,10 +112,15 @@ def _verify_repo(
print(f"Warning: Verification failed for {identifier}:") print(f"Warning: Verification failed for {identifier}:")
for err in errors: for err in errors:
print(f" - {err}") print(f" - {err}")
choice = input("Continue anyway? [y/N]: ").strip().lower()
if choice != "y": if silent:
print(f"Skipping installation for {identifier}.") # Non-interactive mode: continue with a warning.
return False print(f"[Warning] Continuing despite verification failure for {identifier} (--silent).")
else:
choice = input("Continue anyway? [y/N]: ").strip().lower()
if choice != "y":
print(f"Skipping installation for {identifier}.")
return False
return True return True
@@ -163,6 +169,8 @@ def install_repos(
clone_mode: str, clone_mode: str,
update_dependencies: bool, update_dependencies: bool,
force_update: bool = False, force_update: bool = False,
silent: bool = False,
emit_summary: bool = True,
) -> None: ) -> None:
""" """
Install one or more repositories according to the configured installers Install one or more repositories according to the configured installers
@@ -170,45 +178,72 @@ def install_repos(
If force_update=True, installers of the currently active layer are allowed If force_update=True, installers of the currently active layer are allowed
to run again (upgrade/refresh), even if that layer is already loaded. to run again (upgrade/refresh), even if that layer is already loaded.
If silent=True, repository failures are downgraded to warnings and the
overall command never exits non-zero because of per-repository failures.
""" """
pipeline = InstallationPipeline(INSTALLERS) pipeline = InstallationPipeline(INSTALLERS)
failures: List[Tuple[str, str]] = []
for repo in selected_repos: for repo in selected_repos:
identifier = get_repo_identifier(repo, all_repos) identifier = get_repo_identifier(repo, all_repos)
repo_dir = _ensure_repo_dir( try:
repo=repo, repo_dir = _ensure_repo_dir(
repositories_base_dir=repositories_base_dir, repo=repo,
all_repos=all_repos, repositories_base_dir=repositories_base_dir,
preview=preview, all_repos=all_repos,
no_verification=no_verification, preview=preview,
clone_mode=clone_mode, no_verification=no_verification,
identifier=identifier, clone_mode=clone_mode,
) identifier=identifier,
if not repo_dir: )
if not repo_dir:
failures.append((identifier, "clone/ensure repo directory failed"))
continue
if not _verify_repo(
repo=repo,
repo_dir=repo_dir,
no_verification=no_verification,
identifier=identifier,
silent=silent,
):
continue
ctx = _create_context(
repo=repo,
identifier=identifier,
repo_dir=repo_dir,
repositories_base_dir=repositories_base_dir,
bin_dir=bin_dir,
all_repos=all_repos,
no_verification=no_verification,
preview=preview,
quiet=quiet,
clone_mode=clone_mode,
update_dependencies=update_dependencies,
force_update=force_update,
)
pipeline.run(ctx)
except SystemExit as exc:
code = exc.code if isinstance(exc.code, int) else str(exc.code)
failures.append((identifier, f"installer failed (exit={code})"))
if not quiet:
print(f"[Warning] install: repository {identifier} failed (exit={code}). Continuing...")
continue
except Exception as exc:
failures.append((identifier, f"unexpected error: {exc}"))
if not quiet:
print(f"[Warning] install: repository {identifier} hit an unexpected error: {exc}. Continuing...")
continue continue
if not _verify_repo( if failures and emit_summary and not quiet:
repo=repo, print("\n[pkgmgr] Installation finished with warnings:")
repo_dir=repo_dir, for ident, msg in failures:
no_verification=no_verification, print(f" - {ident}: {msg}")
identifier=identifier,
):
continue
ctx = _create_context( if failures and not silent:
repo=repo, raise SystemExit(1)
identifier=identifier,
repo_dir=repo_dir,
repositories_base_dir=repositories_base_dir,
bin_dir=bin_dir,
all_repos=all_repos,
no_verification=no_verification,
preview=preview,
quiet=quiet,
clone_mode=clone_mode,
update_dependencies=update_dependencies,
force_update=force_update,
)
pipeline.run(ctx)

View File

@@ -0,0 +1,100 @@
from __future__ import annotations
from typing import TYPE_CHECKING, List
from .profile import NixProfileInspector
from .retry import GitHubRateLimitRetry
from .runner import CommandRunner
from .textparse import NixConflictTextParser
if TYPE_CHECKING:
from pkgmgr.actions.install.context import RepoContext
class NixConflictResolver:
"""
Resolves nix profile file conflicts by:
1. Parsing conflicting store paths from stderr
2. Mapping them to profile remove tokens via `nix profile list --json`
3. Removing those tokens deterministically
4. Retrying install
"""
def __init__(
self,
runner: CommandRunner,
retry: GitHubRateLimitRetry,
profile: NixProfileInspector,
) -> None:
self._runner = runner
self._retry = retry
self._profile = profile
self._parser = NixConflictTextParser()
def resolve(
self,
ctx: "RepoContext",
install_cmd: str,
stdout: str,
stderr: str,
*,
output: str,
max_rounds: int = 10,
) -> bool:
quiet = bool(getattr(ctx, "quiet", False))
combined = f"{stdout}\n{stderr}"
for _ in range(max_rounds):
# 1) Extract conflicting store prefixes from nix error output
store_prefixes = self._parser.existing_store_prefixes(combined)
# 2) Resolve them to concrete remove tokens
tokens: List[str] = self._profile.find_remove_tokens_for_store_prefixes(
ctx,
self._runner,
store_prefixes,
)
# 3) Fallback: output-name based lookup (also covers nix suggesting: `nix profile remove pkgmgr`)
if not tokens:
tokens = self._profile.find_remove_tokens_for_output(ctx, self._runner, output)
if tokens:
if not quiet:
print(
"[nix] conflict detected; removing existing profile entries: "
+ ", ".join(tokens)
)
for t in tokens:
# tokens may contain things like "pkgmgr" or "pkgmgr-1" or quoted tokens (we keep raw)
self._runner.run(ctx, f"nix profile remove {t}", allow_failure=True)
res = self._retry.run_with_retry(ctx, self._runner, install_cmd)
if res.returncode == 0:
return True
combined = f"{res.stdout}\n{res.stderr}"
continue
# 4) Last-resort fallback: use textual remove tokens from stderr (“nix profile remove X”)
tokens = self._parser.remove_tokens(combined)
if tokens:
if not quiet:
print("[nix] fallback remove tokens: " + ", ".join(tokens))
for t in tokens:
self._runner.run(ctx, f"nix profile remove {t}", allow_failure=True)
res = self._retry.run_with_retry(ctx, self._runner, install_cmd)
if res.returncode == 0:
return True
combined = f"{res.stdout}\n{res.stderr}"
continue
if not quiet:
print("[nix] conflict detected but could not resolve profile entries to remove.")
return False
return False

View File

@@ -1,12 +1,12 @@
# src/pkgmgr/actions/install/installers/nix/installer.py
from __future__ import annotations from __future__ import annotations
import os import os
import shutil import shutil
from typing import List, Tuple, TYPE_CHECKING from typing import TYPE_CHECKING, List, Tuple
from pkgmgr.actions.install.installers.base import BaseInstaller from pkgmgr.actions.install.installers.base import BaseInstaller
from .conflicts import NixConflictResolver
from .profile import NixProfileInspector from .profile import NixProfileInspector
from .retry import GitHubRateLimitRetry, RetryPolicy from .retry import GitHubRateLimitRetry, RetryPolicy
from .runner import CommandRunner from .runner import CommandRunner
@@ -14,6 +14,7 @@ from .runner import CommandRunner
if TYPE_CHECKING: if TYPE_CHECKING:
from pkgmgr.actions.install.context import RepoContext from pkgmgr.actions.install.context import RepoContext
class NixFlakeInstaller(BaseInstaller): class NixFlakeInstaller(BaseInstaller):
layer = "nix" layer = "nix"
FLAKE_FILE = "flake.nix" FLAKE_FILE = "flake.nix"
@@ -22,15 +23,18 @@ class NixFlakeInstaller(BaseInstaller):
self._runner = CommandRunner() self._runner = CommandRunner()
self._retry = GitHubRateLimitRetry(policy=policy) self._retry = GitHubRateLimitRetry(policy=policy)
self._profile = NixProfileInspector() self._profile = NixProfileInspector()
self._conflicts = NixConflictResolver(self._runner, self._retry, self._profile)
# ------------------------------------------------------------------ # # Newer nix rejects numeric indices; we learn this at runtime and cache the decision.
# Compatibility: supports() self._indices_supported: bool | None = None
# ------------------------------------------------------------------ #
def supports(self, ctx: "RepoContext") -> bool: def supports(self, ctx: "RepoContext") -> bool:
if os.environ.get("PKGMGR_DISABLE_NIX_FLAKE_INSTALLER") == "1": if os.environ.get("PKGMGR_DISABLE_NIX_FLAKE_INSTALLER") == "1":
if not ctx.quiet: if not ctx.quiet:
print("[INFO] PKGMGR_DISABLE_NIX_FLAKE_INSTALLER=1 skipping NixFlakeInstaller.") print(
"[INFO] PKGMGR_DISABLE_NIX_FLAKE_INSTALLER=1 "
"skipping NixFlakeInstaller."
)
return False return False
if shutil.which("nix") is None: if shutil.which("nix") is None:
@@ -38,20 +42,12 @@ class NixFlakeInstaller(BaseInstaller):
return os.path.exists(os.path.join(ctx.repo_dir, self.FLAKE_FILE)) return os.path.exists(os.path.join(ctx.repo_dir, self.FLAKE_FILE))
# ------------------------------------------------------------------ #
# Compatibility: output selection
# ------------------------------------------------------------------ #
def _profile_outputs(self, ctx: "RepoContext") -> List[Tuple[str, bool]]: def _profile_outputs(self, ctx: "RepoContext") -> List[Tuple[str, bool]]:
# (output_name, allow_failure) # (output_name, allow_failure)
if ctx.identifier in {"pkgmgr", "package-manager"}: if ctx.identifier in {"pkgmgr", "package-manager"}:
return [("pkgmgr", False), ("default", True)] return [("pkgmgr", False), ("default", True)]
return [("default", False)] return [("default", False)]
# ------------------------------------------------------------------ #
# Compatibility: run()
# ------------------------------------------------------------------ #
def run(self, ctx: "RepoContext") -> None: def run(self, ctx: "RepoContext") -> None:
if not self.supports(ctx): if not self.supports(ctx):
return return
@@ -59,11 +55,12 @@ class NixFlakeInstaller(BaseInstaller):
outputs = self._profile_outputs(ctx) outputs = self._profile_outputs(ctx)
if not ctx.quiet: if not ctx.quiet:
print( msg = (
"[nix] flake detected in " "[nix] flake detected in "
f"{ctx.identifier}, ensuring outputs: " f"{ctx.identifier}, ensuring outputs: "
+ ", ".join(name for name, _ in outputs) + ", ".join(name for name, _ in outputs)
) )
print(msg)
for output, allow_failure in outputs: for output, allow_failure in outputs:
if ctx.force_update: if ctx.force_update:
@@ -71,13 +68,13 @@ class NixFlakeInstaller(BaseInstaller):
else: else:
self._install_only(ctx, output, allow_failure) self._install_only(ctx, output, allow_failure)
# ------------------------------------------------------------------ #
# Core logic (unchanged semantics)
# ------------------------------------------------------------------ #
def _installable(self, ctx: "RepoContext", output: str) -> str: def _installable(self, ctx: "RepoContext", output: str) -> str:
return f"{ctx.repo_dir}#{output}" return f"{ctx.repo_dir}#{output}"
# ---------------------------------------------------------------------
# Core install path
# ---------------------------------------------------------------------
def _install_only(self, ctx: "RepoContext", output: str, allow_failure: bool) -> None: def _install_only(self, ctx: "RepoContext", output: str, allow_failure: bool) -> None:
install_cmd = f"nix profile install {self._installable(ctx, output)}" install_cmd = f"nix profile install {self._installable(ctx, output)}"
@@ -85,35 +82,56 @@ class NixFlakeInstaller(BaseInstaller):
print(f"[nix] install: {install_cmd}") print(f"[nix] install: {install_cmd}")
res = self._retry.run_with_retry(ctx, self._runner, install_cmd) res = self._retry.run_with_retry(ctx, self._runner, install_cmd)
if res.returncode == 0: if res.returncode == 0:
if not ctx.quiet: if not ctx.quiet:
print(f"[nix] output '{output}' successfully installed.") print(f"[nix] output '{output}' successfully installed.")
return return
# Conflict resolver first (handles the common “existing package already provides file” case)
if self._conflicts.resolve(
ctx,
install_cmd,
res.stdout,
res.stderr,
output=output,
):
if not ctx.quiet:
print(f"[nix] output '{output}' successfully installed after conflict cleanup.")
return
if not ctx.quiet: if not ctx.quiet:
print( print(
f"[nix] install failed for '{output}' (exit {res.returncode}), " f"[nix] install failed for '{output}' (exit {res.returncode}), "
"trying index-based upgrade/remove+install..." "trying upgrade/remove+install..."
) )
indices = self._profile.find_installed_indices_for_output(ctx, self._runner, output) # If indices are supported, try legacy index-upgrade path.
if self._indices_supported is not False:
indices = self._profile.find_installed_indices_for_output(ctx, self._runner, output)
upgraded = False upgraded = False
for idx in indices: for idx in indices:
if self._upgrade_index(ctx, idx): if self._upgrade_index(ctx, idx):
upgraded = True upgraded = True
if not ctx.quiet: if not ctx.quiet:
print(f"[nix] output '{output}' successfully upgraded (index {idx}).") print(f"[nix] output '{output}' successfully upgraded (index {idx}).")
if upgraded: if upgraded:
return return
if indices and not ctx.quiet: if indices and not ctx.quiet:
print(f"[nix] upgrade failed; removing indices {indices} and reinstalling '{output}'.") print(f"[nix] upgrade failed; removing indices {indices} and reinstalling '{output}'.")
for idx in indices: for idx in indices:
self._remove_index(ctx, idx) self._remove_index(ctx, idx)
# If we learned indices are unsupported, immediately fall back below
if self._indices_supported is False:
self._remove_tokens_for_output(ctx, output)
else:
# indices explicitly unsupported
self._remove_tokens_for_output(ctx, output)
final = self._runner.run(ctx, install_cmd, allow_failure=True) final = self._runner.run(ctx, install_cmd, allow_failure=True)
if final.returncode == 0: if final.returncode == 0:
@@ -122,17 +140,24 @@ class NixFlakeInstaller(BaseInstaller):
return return
print(f"[ERROR] Failed to install Nix flake output '{output}' (exit {final.returncode})") print(f"[ERROR] Failed to install Nix flake output '{output}' (exit {final.returncode})")
if not allow_failure: if not allow_failure:
raise SystemExit(final.returncode) raise SystemExit(final.returncode)
print(f"[WARNING] Continuing despite failure of optional output '{output}'.") print(f"[WARNING] Continuing despite failure of optional output '{output}'.")
# ------------------------------------------------------------------ # # ---------------------------------------------------------------------
# force_update path (unchanged semantics) # force_update path
# ------------------------------------------------------------------ # # ---------------------------------------------------------------------
def _force_upgrade_output(self, ctx: "RepoContext", output: str, allow_failure: bool) -> None: def _force_upgrade_output(self, ctx: "RepoContext", output: str, allow_failure: bool) -> None:
# Prefer token path if indices unsupported (new nix)
if self._indices_supported is False:
self._remove_tokens_for_output(ctx, output)
self._install_only(ctx, output, allow_failure)
if not ctx.quiet:
print(f"[nix] output '{output}' successfully upgraded.")
return
indices = self._profile.find_installed_indices_for_output(ctx, self._runner, output) indices = self._profile.find_installed_indices_for_output(ctx, self._runner, output)
upgraded_any = False upgraded_any = False
@@ -143,7 +168,8 @@ class NixFlakeInstaller(BaseInstaller):
print(f"[nix] output '{output}' successfully upgraded (index {idx}).") print(f"[nix] output '{output}' successfully upgraded (index {idx}).")
if upgraded_any: if upgraded_any:
print(f"[nix] output '{output}' successfully upgraded.") if not ctx.quiet:
print(f"[nix] output '{output}' successfully upgraded.")
return return
if indices and not ctx.quiet: if indices and not ctx.quiet:
@@ -152,17 +178,52 @@ class NixFlakeInstaller(BaseInstaller):
for idx in indices: for idx in indices:
self._remove_index(ctx, idx) self._remove_index(ctx, idx)
# If we learned indices are unsupported, also remove by token to actually clear conflicts
if self._indices_supported is False:
self._remove_tokens_for_output(ctx, output)
self._install_only(ctx, output, allow_failure) self._install_only(ctx, output, allow_failure)
print(f"[nix] output '{output}' successfully upgraded.") if not ctx.quiet:
print(f"[nix] output '{output}' successfully upgraded.")
# ------------------------------------------------------------------ # # ---------------------------------------------------------------------
# Helpers # Helpers
# ------------------------------------------------------------------ # # ---------------------------------------------------------------------
def _stderr_says_indices_unsupported(self, stderr: str) -> bool:
s = (stderr or "").lower()
return "no longer supports indices" in s or "does not support indices" in s
def _upgrade_index(self, ctx: "RepoContext", idx: int) -> bool: def _upgrade_index(self, ctx: "RepoContext", idx: int) -> bool:
res = self._runner.run(ctx, f"nix profile upgrade --refresh {idx}", allow_failure=True) cmd = f"nix profile upgrade --refresh {idx}"
res = self._runner.run(ctx, cmd, allow_failure=True)
if self._stderr_says_indices_unsupported(getattr(res, "stderr", "")):
self._indices_supported = False
return False
if self._indices_supported is None:
self._indices_supported = True
return res.returncode == 0 return res.returncode == 0
def _remove_index(self, ctx: "RepoContext", idx: int) -> None: def _remove_index(self, ctx: "RepoContext", idx: int) -> None:
self._runner.run(ctx, f"nix profile remove {idx}", allow_failure=True) res = self._runner.run(ctx, f"nix profile remove {idx}", allow_failure=True)
if self._stderr_says_indices_unsupported(getattr(res, "stderr", "")):
self._indices_supported = False
if self._indices_supported is None:
self._indices_supported = True
def _remove_tokens_for_output(self, ctx: "RepoContext", output: str) -> None:
tokens = self._profile.find_remove_tokens_for_output(ctx, self._runner, output)
if not tokens:
return
if not ctx.quiet:
print(f"[nix] indices unsupported; removing by token(s): {', '.join(tokens)}")
for t in tokens:
self._runner.run(ctx, f"nix profile remove {t}", allow_failure=True)

View File

@@ -1,71 +0,0 @@
from __future__ import annotations
import json
from typing import Any, List, TYPE_CHECKING
if TYPE_CHECKING:
from pkgmgr.actions.install.context import RepoContext
from .runner import CommandRunner
class NixProfileInspector:
"""
Reads and interprets `nix profile list --json` and provides helpers for
finding indices matching a given output name.
"""
def find_installed_indices_for_output(self, ctx: "RepoContext", runner: "CommandRunner", output: str) -> List[int]:
res = runner.run(ctx, "nix profile list --json", allow_failure=True)
if res.returncode != 0:
return []
try:
data = json.loads(res.stdout or "{}")
except json.JSONDecodeError:
return []
indices: List[int] = []
elements = data.get("elements")
if isinstance(elements, dict):
for idx_str, elem in elements.items():
try:
idx = int(idx_str)
except (TypeError, ValueError):
continue
if self._element_matches_output(elem, output):
indices.append(idx)
return sorted(indices)
if isinstance(elements, list):
for elem in elements:
idx = elem.get("index") if isinstance(elem, dict) else None
if isinstance(idx, int) and self._element_matches_output(elem, output):
indices.append(idx)
return sorted(indices)
return []
@staticmethod
def element_matches_output(elem: Any, output: str) -> bool:
return NixProfileInspector._element_matches_output(elem, output)
@staticmethod
def _element_matches_output(elem: Any, output: str) -> bool:
out = (output or "").strip()
if not out or not isinstance(elem, dict):
return False
candidates: List[str] = []
for k in ("attrPath", "originalUrl", "url", "storePath", "name"):
v = elem.get(k)
if isinstance(v, str) and v:
candidates.append(v)
for c in candidates:
if c == out:
return True
if f"#{out}" in c:
return True
return False

View File

@@ -0,0 +1,4 @@
from .inspector import NixProfileInspector
from .models import NixProfileEntry
__all__ = ["NixProfileInspector", "NixProfileEntry"]

View File

@@ -0,0 +1,162 @@
from __future__ import annotations
from typing import Any, List, TYPE_CHECKING
from .matcher import (
entry_matches_output,
entry_matches_store_path,
stable_unique_ints,
)
from .normalizer import normalize_elements
from .parser import parse_profile_list_json
from .result import extract_stdout_text
if TYPE_CHECKING:
# Keep these as TYPE_CHECKING-only to avoid runtime import cycles.
from pkgmgr.actions.install.context import RepoContext
from pkgmgr.core.command.runner import CommandRunner
class NixProfileInspector:
"""
Reads and inspects the user's Nix profile list (JSON).
Public API:
- list_json()
- find_installed_indices_for_output() (legacy; may not work on newer nix)
- find_indices_by_store_path() (legacy; may not work on newer nix)
- find_remove_tokens_for_output()
- find_remove_tokens_for_store_prefixes()
"""
def list_json(self, ctx: "RepoContext", runner: "CommandRunner") -> dict[str, Any]:
res = runner.run(ctx, "nix profile list --json", allow_failure=False)
raw = extract_stdout_text(res)
return parse_profile_list_json(raw)
# ---------------------------------------------------------------------
# Legacy index helpers (still useful on older nix; newer nix may reject indices)
# ---------------------------------------------------------------------
def find_installed_indices_for_output(
self,
ctx: "RepoContext",
runner: "CommandRunner",
output: str,
) -> List[int]:
data = self.list_json(ctx, runner)
entries = normalize_elements(data)
hits: List[int] = []
for e in entries:
if e.index is None:
continue
if entry_matches_output(e, output):
hits.append(e.index)
return stable_unique_ints(hits)
def find_indices_by_store_path(
self,
ctx: "RepoContext",
runner: "CommandRunner",
store_path: str,
) -> List[int]:
needle = (store_path or "").strip()
if not needle:
return []
data = self.list_json(ctx, runner)
entries = normalize_elements(data)
hits: List[int] = []
for e in entries:
if e.index is None:
continue
if entry_matches_store_path(e, needle):
hits.append(e.index)
return stable_unique_ints(hits)
# ---------------------------------------------------------------------
# New token-based helpers (works with newer nix where indices are rejected)
# ---------------------------------------------------------------------
def find_remove_tokens_for_output(
self,
ctx: "RepoContext",
runner: "CommandRunner",
output: str,
) -> List[str]:
"""
Returns profile remove tokens to remove entries matching a given output.
We always include the raw output token first because nix itself suggests:
nix profile remove pkgmgr
"""
out = (output or "").strip()
if not out:
return []
data = self.list_json(ctx, runner)
entries = normalize_elements(data)
tokens: List[str] = [out] # critical: matches nix's own suggestion for conflicts
for e in entries:
if entry_matches_output(e, out):
# Prefer removing by key/name (non-index) when possible.
# New nix rejects numeric indices; these tokens are safer.
k = (e.key or "").strip()
n = (e.name or "").strip()
if k and not k.isdigit():
tokens.append(k)
elif n and not n.isdigit():
tokens.append(n)
# stable unique preserving order
seen: set[str] = set()
uniq: List[str] = []
for t in tokens:
if t and t not in seen:
uniq.append(t)
seen.add(t)
return uniq
def find_remove_tokens_for_store_prefixes(
self,
ctx: "RepoContext",
runner: "CommandRunner",
prefixes: List[str],
) -> List[str]:
"""
Returns remove tokens for entries whose store path matches any prefix.
"""
prefixes = [(p or "").strip() for p in (prefixes or []) if p]
prefixes = [p for p in prefixes if p]
if not prefixes:
return []
data = self.list_json(ctx, runner)
entries = normalize_elements(data)
tokens: List[str] = []
for e in entries:
if not e.store_paths:
continue
if any(sp == p for sp in e.store_paths for p in prefixes):
k = (e.key or "").strip()
n = (e.name or "").strip()
if k and not k.isdigit():
tokens.append(k)
elif n and not n.isdigit():
tokens.append(n)
seen: set[str] = set()
uniq: List[str] = []
for t in tokens:
if t and t not in seen:
uniq.append(t)
seen.add(t)
return uniq

View File

@@ -0,0 +1,62 @@
from __future__ import annotations
from typing import List
from .models import NixProfileEntry
def entry_matches_output(entry: NixProfileEntry, output: str) -> bool:
"""
Heuristic matcher: output is typically a flake output name (e.g. "pkgmgr"),
and we match against name/attrPath patterns.
"""
out = (output or "").strip()
if not out:
return False
candidates = [entry.name, entry.attr_path]
for c in candidates:
c = (c or "").strip()
if not c:
continue
# Direct match
if c == out:
return True
# AttrPath contains "#<output>"
if f"#{out}" in c:
return True
# AttrPath ends with ".<output>"
if c.endswith(f".{out}"):
return True
# Name pattern "<output>-<n>" (common, e.g. pkgmgr-1)
if c.startswith(f"{out}-"):
return True
# Historical special case: repo is "package-manager" but output is "pkgmgr"
if out == "pkgmgr" and c.startswith("package-manager-"):
return True
return False
def entry_matches_store_path(entry: NixProfileEntry, store_path: str) -> bool:
needle = (store_path or "").strip()
if not needle:
return False
return any((p or "") == needle for p in entry.store_paths)
def stable_unique_ints(values: List[int]) -> List[int]:
seen: set[int] = set()
uniq: List[int] = []
for v in values:
if v in seen:
continue
uniq.append(v)
seen.add(v)
return uniq

View File

@@ -0,0 +1,17 @@
from __future__ import annotations
from dataclasses import dataclass
from typing import List, Optional
@dataclass(frozen=True)
class NixProfileEntry:
"""
Minimal normalized representation of one nix profile element entry.
"""
key: str
index: Optional[int]
name: str
attr_path: str
store_paths: List[str]

View File

@@ -0,0 +1,128 @@
from __future__ import annotations
import re
from typing import Any, Dict, Iterable, List, Optional
from .models import NixProfileEntry
def coerce_index(key: str, entry: Dict[str, Any]) -> Optional[int]:
"""
Nix JSON schema varies:
- elements keys might be "0", "1", ...
- or might be names like "pkgmgr-1"
Some versions include an explicit index field.
We try safe options in order.
"""
k = (key or "").strip()
# 1) Classic: numeric keys
if k.isdigit():
try:
return int(k)
except Exception:
return None
# 2) Explicit index fields (schema-dependent)
for field in ("index", "id", "position"):
v = entry.get(field)
if isinstance(v, int):
return v
if isinstance(v, str) and v.strip().isdigit():
try:
return int(v.strip())
except Exception:
pass
# 3) Last resort: extract trailing number from key if it looks like "<name>-<n>"
m = re.match(r"^.+-(\d+)$", k)
if m:
try:
return int(m.group(1))
except Exception:
return None
return None
def iter_store_paths(entry: Dict[str, Any]) -> Iterable[str]:
"""
Yield all possible store paths from a nix profile JSON entry.
Nix has had schema shifts. We support common variants:
- "storePaths": ["/nix/store/..", ...]
- "storePaths": "/nix/store/.." (rare)
- "storePath": "/nix/store/.." (some variants)
- nested "outputs" dict(s) with store paths (best-effort)
"""
if not isinstance(entry, dict):
return
sp = entry.get("storePaths")
if isinstance(sp, list):
for p in sp:
if isinstance(p, str):
yield p
elif isinstance(sp, str):
yield sp
sp2 = entry.get("storePath")
if isinstance(sp2, str):
yield sp2
outs = entry.get("outputs")
if isinstance(outs, dict):
for _, ov in outs.items():
if isinstance(ov, dict):
p = ov.get("storePath")
if isinstance(p, str):
yield p
def normalize_store_path(store_path: str) -> str:
"""
Normalize store path for matching.
Currently just strips whitespace; hook for future normalization if needed.
"""
return (store_path or "").strip()
def normalize_elements(data: Dict[str, Any]) -> List[NixProfileEntry]:
"""
Converts nix profile list JSON into a list of normalized entries.
JSON formats observed:
- {"elements": {"0": {...}, "1": {...}}}
- {"elements": {"pkgmgr-1": {...}, "pkgmgr-2": {...}}}
"""
elements = data.get("elements")
if not isinstance(elements, dict):
return []
normalized: List[NixProfileEntry] = []
for k, entry in elements.items():
if not isinstance(entry, dict):
continue
idx = coerce_index(str(k), entry)
name = str(entry.get("name", "") or "")
attr = str(entry.get("attrPath", "") or "")
store_paths: List[str] = []
for p in iter_store_paths(entry):
sp = normalize_store_path(p)
if sp:
store_paths.append(sp)
normalized.append(
NixProfileEntry(
key=str(k),
index=idx,
name=name,
attr_path=attr,
store_paths=store_paths,
)
)
return normalized

View File

@@ -0,0 +1,19 @@
from __future__ import annotations
import json
from typing import Any, Dict
def parse_profile_list_json(raw: str) -> Dict[str, Any]:
"""
Parse JSON output from `nix profile list --json`.
Raises SystemExit with a helpful excerpt on parse failure.
"""
try:
return json.loads(raw)
except json.JSONDecodeError as e:
excerpt = (raw or "")[:5000]
raise SystemExit(
f"[nix] Failed to parse `nix profile list --json`: {e}\n{excerpt}"
) from e

View File

@@ -0,0 +1,28 @@
from __future__ import annotations
from typing import Any
def extract_stdout_text(result: Any) -> str:
"""
Normalize different runner return types to a stdout string.
Supported patterns:
- result is str -> returned as-is
- result is bytes/bytearray -> decoded UTF-8 (replace errors)
- result has `.stdout` (str or bytes) -> used
- fallback: str(result)
"""
if isinstance(result, str):
return result
if isinstance(result, (bytes, bytearray)):
return bytes(result).decode("utf-8", errors="replace")
stdout = getattr(result, "stdout", None)
if isinstance(stdout, str):
return stdout
if isinstance(stdout, (bytes, bytearray)):
return bytes(stdout).decode("utf-8", errors="replace")
return str(result)

View File

@@ -0,0 +1,69 @@
from __future__ import annotations
import re
from typing import TYPE_CHECKING, List, Tuple
from .runner import CommandRunner
if TYPE_CHECKING:
from pkgmgr.actions.install.context import RepoContext
class NixProfileListReader:
def __init__(self, runner: CommandRunner) -> None:
self._runner = runner
@staticmethod
def _store_prefix(path: str) -> str:
raw = (path or "").strip()
m = re.match(r"^(/nix/store/[0-9a-z]{32}-[^/ \t]+)", raw)
return m.group(1) if m else raw
def entries(self, ctx: "RepoContext") -> List[Tuple[int, str]]:
res = self._runner.run(ctx, "nix profile list", allow_failure=True)
if res.returncode != 0:
return []
entries: List[Tuple[int, str]] = []
pat = re.compile(
r"^\s*(\d+)\s+.*?(/nix/store/[0-9a-z]{32}-[^/ \t]+)",
re.MULTILINE,
)
for m in pat.finditer(res.stdout or ""):
idx_s = m.group(1)
sp = m.group(2)
try:
idx = int(idx_s)
except Exception:
continue
entries.append((idx, self._store_prefix(sp)))
seen: set[int] = set()
uniq: List[Tuple[int, str]] = []
for idx, sp in entries:
if idx not in seen:
seen.add(idx)
uniq.append((idx, sp))
return uniq
def indices_matching_store_prefixes(self, ctx: "RepoContext", prefixes: List[str]) -> List[int]:
prefixes = [self._store_prefix(p) for p in prefixes if p]
prefixes = [p for p in prefixes if p]
if not prefixes:
return []
hits: List[int] = []
for idx, sp in self.entries(ctx):
if any(sp == p for p in prefixes):
hits.append(idx)
seen: set[int] = set()
uniq: List[int] = []
for i in hits:
if i not in seen:
seen.add(i)
uniq.append(i)
return uniq

View File

@@ -0,0 +1,76 @@
from __future__ import annotations
import re
from typing import List
class NixConflictTextParser:
@staticmethod
def _store_prefix(path: str) -> str:
raw = (path or "").strip()
m = re.match(r"^(/nix/store/[0-9a-z]{32}-[^/ \t]+)", raw)
return m.group(1) if m else raw
def remove_tokens(self, text: str) -> List[str]:
pat = re.compile(
r"^\s*nix profile remove\s+([^\s'\"`]+|'[^']+'|\"[^\"]+\")\s*$",
re.MULTILINE,
)
tokens: List[str] = []
for m in pat.finditer(text or ""):
t = (m.group(1) or "").strip()
if (t.startswith("'") and t.endswith("'")) or (t.startswith('"') and t.endswith('"')):
t = t[1:-1]
if t:
tokens.append(t)
seen: set[str] = set()
uniq: List[str] = []
for t in tokens:
if t not in seen:
seen.add(t)
uniq.append(t)
return uniq
def existing_store_prefixes(self, text: str) -> List[str]:
lines = (text or "").splitlines()
prefixes: List[str] = []
in_existing = False
in_new = False
store_pat = re.compile(r"^\s*(/nix/store/[0-9a-z]{32}-[^ \t]+)")
for raw in lines:
line = raw.strip()
if "An existing package already provides the following file" in line:
in_existing = True
in_new = False
continue
if "This is the conflicting file from the new package" in line:
in_existing = False
in_new = True
continue
if in_existing:
m = store_pat.match(raw)
if m:
prefixes.append(m.group(1))
continue
_ = in_new
norm = [self._store_prefix(p) for p in prefixes if p]
seen: set[str] = set()
uniq: List[str] = []
for p in norm:
if p and p not in seen:
seen.add(p)
uniq.append(p)
return uniq

View File

@@ -1,20 +1,15 @@
from __future__ import annotations from __future__ import annotations
import os import os
from typing import List, Optional, Set
from pkgmgr.core.command.run import run_command from pkgmgr.core.command.run import run_command
from pkgmgr.core.git import GitError, run_git from pkgmgr.core.git import GitError, run_git
from typing import List, Optional, Set
from .types import MirrorMap, RepoMirrorContext, Repository from .types import MirrorMap, RepoMirrorContext, Repository
def build_default_ssh_url(repo: Repository) -> Optional[str]: def build_default_ssh_url(repo: Repository) -> Optional[str]:
"""
Build a simple SSH URL from repo config if no explicit mirror is defined.
Example: git@github.com:account/repository.git
"""
provider = repo.get("provider") provider = repo.get("provider")
account = repo.get("account") account = repo.get("account")
name = repo.get("repository") name = repo.get("repository")
@@ -23,95 +18,82 @@ def build_default_ssh_url(repo: Repository) -> Optional[str]:
if not provider or not account or not name: if not provider or not account or not name:
return None return None
provider = str(provider)
account = str(account)
name = str(name)
if port: if port:
return f"ssh://git@{provider}:{port}/{account}/{name}.git" return f"ssh://git@{provider}:{port}/{account}/{name}.git"
# GitHub-style shorthand
return f"git@{provider}:{account}/{name}.git" return f"git@{provider}:{account}/{name}.git"
def determine_primary_remote_url( def determine_primary_remote_url(
repo: Repository, repo: Repository,
resolved_mirrors: MirrorMap, ctx: RepoMirrorContext,
) -> Optional[str]: ) -> Optional[str]:
""" """
Determine the primary remote URL in a consistent way: Priority order:
1. origin from resolved mirrors
1. resolved_mirrors["origin"] 2. MIRRORS file order
2. any resolved mirror (first by name) 3. config mirrors order
3. default SSH URL from provider/account/repository 4. default SSH URL
""" """
if "origin" in resolved_mirrors: resolved = ctx.resolved_mirrors
return resolved_mirrors["origin"]
if resolved_mirrors: if resolved.get("origin"):
first_name = sorted(resolved_mirrors.keys())[0] return resolved["origin"]
return resolved_mirrors[first_name]
for mirrors in (ctx.file_mirrors, ctx.config_mirrors):
for _, url in mirrors.items():
if url:
return url
return build_default_ssh_url(repo) return build_default_ssh_url(repo)
def _safe_git_output(args: List[str], cwd: str) -> Optional[str]: def _safe_git_output(args: List[str], cwd: str) -> Optional[str]:
"""
Run a Git command via run_git and return its stdout, or None on failure.
"""
try: try:
return run_git(args, cwd=cwd) return run_git(args, cwd=cwd)
except GitError: except GitError:
return None return None
def current_origin_url(repo_dir: str) -> Optional[str]:
"""
Return the current URL for remote 'origin', or None if not present.
"""
output = _safe_git_output(["remote", "get-url", "origin"], cwd=repo_dir)
if not output:
return None
url = output.strip()
return url or None
def has_origin_remote(repo_dir: str) -> bool: def has_origin_remote(repo_dir: str) -> bool:
""" out = _safe_git_output(["remote"], cwd=repo_dir)
Check whether a remote called 'origin' exists in the repository. return bool(out and "origin" in out.split())
"""
output = _safe_git_output(["remote"], cwd=repo_dir)
if not output:
return False
names = output.split()
return "origin" in names
def _ensure_push_urls_for_origin( def _set_origin_fetch_and_push(repo_dir: str, url: str, preview: bool) -> None:
fetch = f"git remote set-url origin {url}"
push = f"git remote set-url --push origin {url}"
if preview:
print(f"[PREVIEW] Would run in {repo_dir!r}: {fetch}")
print(f"[PREVIEW] Would run in {repo_dir!r}: {push}")
return
run_command(fetch, cwd=repo_dir, preview=False)
run_command(push, cwd=repo_dir, preview=False)
def _ensure_additional_push_urls(
repo_dir: str, repo_dir: str,
mirrors: MirrorMap, mirrors: MirrorMap,
primary: str,
preview: bool, preview: bool,
) -> None: ) -> None:
""" desired: Set[str] = {u for u in mirrors.values() if u and u != primary}
Ensure that all mirror URLs are present as push URLs on 'origin'.
"""
desired: Set[str] = {url for url in mirrors.values() if url}
if not desired: if not desired:
return return
existing_output = _safe_git_output( out = _safe_git_output(
["remote", "get-url", "--push", "--all", "origin"], ["remote", "get-url", "--push", "--all", "origin"],
cwd=repo_dir, cwd=repo_dir,
) )
existing = set(existing_output.splitlines()) if existing_output else set() existing = set(out.splitlines()) if out else set()
missing = sorted(desired - existing) for url in sorted(desired - existing):
for url in missing:
cmd = f"git remote set-url --add --push origin {url}" cmd = f"git remote set-url --add --push origin {url}"
if preview: if preview:
print(f"[PREVIEW] Would run in {repo_dir!r}: {cmd}") print(f"[PREVIEW] Would run in {repo_dir!r}: {cmd}")
else: else:
print(f"[INFO] Adding push URL to 'origin': {url}")
run_command(cmd, cwd=repo_dir, preview=False) run_command(cmd, cwd=repo_dir, preview=False)
@@ -120,60 +102,32 @@ def ensure_origin_remote(
ctx: RepoMirrorContext, ctx: RepoMirrorContext,
preview: bool, preview: bool,
) -> None: ) -> None:
"""
Ensure that a usable 'origin' remote exists and has all push URLs.
"""
repo_dir = ctx.repo_dir repo_dir = ctx.repo_dir
resolved_mirrors = ctx.resolved_mirrors
if not os.path.isdir(os.path.join(repo_dir, ".git")): if not os.path.isdir(os.path.join(repo_dir, ".git")):
print(f"[WARN] {repo_dir} is not a Git repository (no .git directory).") print(f"[WARN] {repo_dir} is not a Git repository.")
return return
url = determine_primary_remote_url(repo, resolved_mirrors) primary = determine_primary_remote_url(repo, ctx)
if not primary:
print("[WARN] No primary mirror URL could be determined.")
return
if not has_origin_remote(repo_dir): if not has_origin_remote(repo_dir):
if not url: cmd = f"git remote add origin {primary}"
print(
"[WARN] Could not determine URL for 'origin' remote. "
"Please configure mirrors or provider/account/repository."
)
return
cmd = f"git remote add origin {url}"
if preview: if preview:
print(f"[PREVIEW] Would run in {repo_dir!r}: {cmd}") print(f"[PREVIEW] Would run in {repo_dir!r}: {cmd}")
else: else:
print(f"[INFO] Adding 'origin' remote in {repo_dir}: {url}")
run_command(cmd, cwd=repo_dir, preview=False) run_command(cmd, cwd=repo_dir, preview=False)
else:
current = current_origin_url(repo_dir)
if current == url or not url:
print(
"[INFO] 'origin' already points to "
f"{current or '<unknown>'} (no change needed)."
)
else:
# We do not auto-change origin here, only log the mismatch.
print(
"[INFO] 'origin' exists with URL "
f"{current or '<unknown>'}; not changing to {url}."
)
# Ensure all mirrors are present as push URLs _set_origin_fetch_and_push(repo_dir, primary, preview)
_ensure_push_urls_for_origin(repo_dir, resolved_mirrors, preview)
_ensure_additional_push_urls(repo_dir, ctx.resolved_mirrors, primary, preview)
def is_remote_reachable(url: str, cwd: Optional[str] = None) -> bool: def is_remote_reachable(url: str, cwd: Optional[str] = None) -> bool:
"""
Check whether a remote repository is reachable via `git ls-remote`.
This does NOT modify anything; it only probes the remote.
"""
workdir = cwd or os.getcwd()
try: try:
# --exit-code → non-zero exit code if the remote does not exist run_git(["ls-remote", "--exit-code", url], cwd=cwd or os.getcwd())
run_git(["ls-remote", "--exit-code", url], cwd=workdir)
return True return True
except GitError: except GitError:
return False return False

View File

@@ -0,0 +1,21 @@
# src/pkgmgr/actions/mirror/remote_check.py
from __future__ import annotations
from typing import Tuple
from pkgmgr.core.git import GitError, run_git
def probe_mirror(url: str, repo_dir: str) -> Tuple[bool, str]:
"""
Probe a remote mirror URL using `git ls-remote`.
Returns:
(True, "") on success,
(False, error_message) on failure.
"""
try:
run_git(["ls-remote", url], cwd=repo_dir)
return True, ""
except GitError as exc:
return False, str(exc)

View File

@@ -0,0 +1,59 @@
from __future__ import annotations
from typing import List
from pkgmgr.core.remote_provisioning import ProviderHint, RepoSpec, ensure_remote_repo
from pkgmgr.core.remote_provisioning.ensure import EnsureOptions
from .context import build_context
from .git_remote import determine_primary_remote_url
from .types import Repository
from .url_utils import normalize_provider_host, parse_repo_from_git_url
def ensure_remote_repository(
repo: Repository,
repositories_base_dir: str,
all_repos: List[Repository],
preview: bool,
) -> None:
ctx = build_context(repo, repositories_base_dir, all_repos)
primary_url = determine_primary_remote_url(repo, ctx)
if not primary_url:
print("[INFO] No primary URL found; skipping remote provisioning.")
return
host_raw, owner, name = parse_repo_from_git_url(primary_url)
host = normalize_provider_host(host_raw)
if not host or not owner or not name:
print("[WARN] Could not parse remote URL:", primary_url)
return
spec = RepoSpec(
host=host,
owner=owner,
name=name,
private=bool(repo.get("private", True)),
description=str(repo.get("description", "")),
)
provider_kind = str(repo.get("provider", "")).lower() or None
try:
result = ensure_remote_repo(
spec,
provider_hint=ProviderHint(kind=provider_kind),
options=EnsureOptions(
preview=preview,
interactive=True,
allow_prompt=True,
save_prompt_token_to_keyring=True,
),
)
print(f"[REMOTE ENSURE] {result.status.upper()}: {result.message}")
if result.url:
print(f"[REMOTE ENSURE] URL: {result.url}")
except Exception as exc: # noqa: BLE001
print(f"[ERROR] Remote provisioning failed: {exc}")

View File

@@ -1,131 +1,20 @@
# src/pkgmgr/actions/mirror/setup_cmd.py
from __future__ import annotations from __future__ import annotations
from typing import List, Tuple from typing import List
from urllib.parse import urlparse
from pkgmgr.core.git import GitError, run_git
from pkgmgr.core.remote_provisioning import ProviderHint, RepoSpec, ensure_remote_repo
from pkgmgr.core.remote_provisioning.ensure import EnsureOptions
from .context import build_context from .context import build_context
from .git_remote import determine_primary_remote_url, ensure_origin_remote from .git_remote import ensure_origin_remote, determine_primary_remote_url
from .remote_check import probe_mirror
from .remote_provision import ensure_remote_repository
from .types import Repository from .types import Repository
def _probe_mirror(url: str, repo_dir: str) -> Tuple[bool, str]:
"""
Probe a remote mirror URL using `git ls-remote`.
Returns:
(True, "") on success,
(False, error_message) on failure.
"""
try:
run_git(["ls-remote", url], cwd=repo_dir)
return True, ""
except GitError as exc:
return False, str(exc)
def _host_from_git_url(url: str) -> str:
url = (url or "").strip()
if not url:
return ""
if "://" in url:
parsed = urlparse(url)
netloc = (parsed.netloc or "").strip()
if "@" in netloc:
netloc = netloc.split("@", 1)[1]
# keep optional :port
return netloc
# scp-like: git@host:owner/repo.git
if "@" in url and ":" in url:
after_at = url.split("@", 1)[1]
host = after_at.split(":", 1)[0]
return host.strip()
return url.split("/", 1)[0].strip()
def _ensure_remote_repository(
repo: Repository,
repositories_base_dir: str,
all_repos: List[Repository],
preview: bool,
) -> None:
"""
Ensure that the remote repository exists using provider APIs.
This is ONLY called when ensure_remote=True.
"""
ctx = build_context(repo, repositories_base_dir, all_repos)
resolved_mirrors = ctx.resolved_mirrors
primary_url = determine_primary_remote_url(repo, resolved_mirrors)
if not primary_url:
print("[INFO] No remote URL could be derived; skipping remote provisioning.")
return
# IMPORTANT:
# - repo["provider"] is typically a provider *kind* (e.g. "github" / "gitea"),
# NOT a hostname. We derive the actual host from the remote URL.
host = _host_from_git_url(primary_url)
owner = repo.get("account")
name = repo.get("repository")
if not host or not owner or not name:
print("[WARN] Missing host/account/repository; cannot ensure remote repo.")
print(f" host={host!r}, account={owner!r}, repository={name!r}")
return
print("------------------------------------------------------------")
print(f"[REMOTE ENSURE] {ctx.identifier}")
print(f"[REMOTE ENSURE] host: {host}")
print("------------------------------------------------------------")
spec = RepoSpec(
host=str(host),
owner=str(owner),
name=str(name),
private=bool(repo.get("private", True)),
description=str(repo.get("description", "")),
)
provider_kind = str(repo.get("provider", "")).strip().lower() or None
try:
result = ensure_remote_repo(
spec,
provider_hint=ProviderHint(kind=provider_kind),
options=EnsureOptions(
preview=preview,
interactive=True,
allow_prompt=True,
save_prompt_token_to_keyring=True,
),
)
print(f"[REMOTE ENSURE] {result.status.upper()}: {result.message}")
if result.url:
print(f"[REMOTE ENSURE] URL: {result.url}")
except Exception as exc: # noqa: BLE001
# Keep action layer resilient
print(f"[ERROR] Remote provisioning failed: {exc}")
print()
def _setup_local_mirrors_for_repo( def _setup_local_mirrors_for_repo(
repo: Repository, repo: Repository,
repositories_base_dir: str, repositories_base_dir: str,
all_repos: List[Repository], all_repos: List[Repository],
preview: bool, preview: bool,
) -> None: ) -> None:
"""
Local setup:
- Ensure 'origin' remote exists and is sane
"""
ctx = build_context(repo, repositories_base_dir, all_repos) ctx = build_context(repo, repositories_base_dir, all_repos)
print("------------------------------------------------------------") print("------------------------------------------------------------")
@@ -133,7 +22,7 @@ def _setup_local_mirrors_for_repo(
print(f"[MIRROR SETUP:LOCAL] dir: {ctx.repo_dir}") print(f"[MIRROR SETUP:LOCAL] dir: {ctx.repo_dir}")
print("------------------------------------------------------------") print("------------------------------------------------------------")
ensure_origin_remote(repo, ctx, preview=preview) ensure_origin_remote(repo, ctx, preview)
print() print()
@@ -144,19 +33,7 @@ def _setup_remote_mirrors_for_repo(
preview: bool, preview: bool,
ensure_remote: bool, ensure_remote: bool,
) -> None: ) -> None:
"""
Remote-side setup / validation.
Default behavior:
- Non-destructive checks using `git ls-remote`.
Optional behavior:
- If ensure_remote=True:
* Attempt to create missing repositories via provider API
* Uses TokenResolver (ENV -> keyring -> prompt)
"""
ctx = build_context(repo, repositories_base_dir, all_repos) ctx = build_context(repo, repositories_base_dir, all_repos)
resolved_mirrors = ctx.resolved_mirrors
print("------------------------------------------------------------") print("------------------------------------------------------------")
print(f"[MIRROR SETUP:REMOTE] {ctx.identifier}") print(f"[MIRROR SETUP:REMOTE] {ctx.identifier}")
@@ -164,39 +41,30 @@ def _setup_remote_mirrors_for_repo(
print("------------------------------------------------------------") print("------------------------------------------------------------")
if ensure_remote: if ensure_remote:
_ensure_remote_repository( ensure_remote_repository(
repo, repo,
repositories_base_dir=repositories_base_dir, repositories_base_dir,
all_repos=all_repos, all_repos,
preview=preview, preview,
) )
if not resolved_mirrors: if not ctx.resolved_mirrors:
primary_url = determine_primary_remote_url(repo, resolved_mirrors) primary = determine_primary_remote_url(repo, ctx)
if not primary_url: if not primary:
print("[INFO] No mirrors configured and no primary URL available.")
print()
return return
ok, error_message = _probe_mirror(primary_url, ctx.repo_dir) ok, msg = probe_mirror(primary, ctx.repo_dir)
if ok: print("[OK]" if ok else "[WARN]", primary)
print(f"[OK] primary: {primary_url}") if msg:
else: print(msg)
print(f"[WARN] primary: {primary_url}")
for line in error_message.splitlines():
print(f" {line}")
print() print()
return return
for name, url in sorted(resolved_mirrors.items()): for name, url in ctx.resolved_mirrors.items():
ok, error_message = _probe_mirror(url, ctx.repo_dir) ok, msg = probe_mirror(url, ctx.repo_dir)
if ok: print(f"[OK] {name}: {url}" if ok else f"[WARN] {name}: {url}")
print(f"[OK] {name}: {url}") if msg:
else: print(msg)
print(f"[WARN] {name}: {url}")
for line in error_message.splitlines():
print(f" {line}")
print() print()
@@ -210,33 +78,20 @@ def setup_mirrors(
remote: bool = True, remote: bool = True,
ensure_remote: bool = False, ensure_remote: bool = False,
) -> None: ) -> None:
"""
Setup mirrors for the selected repositories.
local:
- Configure local Git remotes (ensure 'origin' exists).
remote:
- Non-destructive remote checks using `git ls-remote`.
ensure_remote:
- If True, attempt to create missing remote repositories via provider APIs.
- This is explicit and NEVER enabled implicitly.
"""
for repo in selected_repos: for repo in selected_repos:
if local: if local:
_setup_local_mirrors_for_repo( _setup_local_mirrors_for_repo(
repo=repo, repo,
repositories_base_dir=repositories_base_dir, repositories_base_dir,
all_repos=all_repos, all_repos,
preview=preview, preview,
) )
if remote: if remote:
_setup_remote_mirrors_for_repo( _setup_remote_mirrors_for_repo(
repo=repo, repo,
repositories_base_dir=repositories_base_dir, repositories_base_dir,
all_repos=all_repos, all_repos,
preview=preview, preview,
ensure_remote=ensure_remote, ensure_remote,
) )

View File

@@ -0,0 +1,111 @@
# src/pkgmgr/actions/mirror/url_utils.py
from __future__ import annotations
from urllib.parse import urlparse
from typing import Optional, Tuple
def hostport_from_git_url(url: str) -> Tuple[str, Optional[str]]:
url = (url or "").strip()
if not url:
return "", None
if "://" in url:
parsed = urlparse(url)
netloc = (parsed.netloc or "").strip()
if "@" in netloc:
netloc = netloc.split("@", 1)[1]
if netloc.startswith("[") and "]" in netloc:
host = netloc[1:netloc.index("]")]
rest = netloc[netloc.index("]") + 1 :]
port = rest[1:] if rest.startswith(":") else None
return host.strip(), (port.strip() if port else None)
if ":" in netloc:
host, port = netloc.rsplit(":", 1)
return host.strip(), (port.strip() or None)
return netloc.strip(), None
if "@" in url and ":" in url:
after_at = url.split("@", 1)[1]
host = after_at.split(":", 1)[0].strip()
return host, None
host = url.split("/", 1)[0].strip()
return host, None
def normalize_provider_host(host: str) -> str:
host = (host or "").strip()
if not host:
return ""
if host.startswith("[") and "]" in host:
host = host[1:host.index("]")]
if ":" in host and host.count(":") == 1:
host = host.rsplit(":", 1)[0]
return host.strip().lower()
def _strip_dot_git(name: str) -> str:
n = (name or "").strip()
if n.lower().endswith(".git"):
return n[:-4]
return n
def parse_repo_from_git_url(url: str) -> Tuple[str, Optional[str], Optional[str]]:
"""
Parse (host, owner, repo_name) from common Git remote URLs.
Supports:
- ssh://git@host:2201/owner/repo.git
- https://host/owner/repo.git
- git@host:owner/repo.git
- host/owner/repo(.git) (best-effort)
Returns:
(host, owner, repo_name) with owner/repo possibly None if not derivable.
"""
u = (url or "").strip()
if not u:
return "", None, None
# URL-style (ssh://, https://, http://)
if "://" in u:
parsed = urlparse(u)
host = (parsed.hostname or "").strip()
path = (parsed.path or "").strip("/")
parts = [p for p in path.split("/") if p]
if len(parts) >= 2:
owner = parts[0]
repo_name = _strip_dot_git(parts[1])
return host, owner, repo_name
return host, None, None
# SCP-like: git@host:owner/repo.git
if "@" in u and ":" in u:
after_at = u.split("@", 1)[1]
host = after_at.split(":", 1)[0].strip()
path = after_at.split(":", 1)[1].strip("/")
parts = [p for p in path.split("/") if p]
if len(parts) >= 2:
owner = parts[0]
repo_name = _strip_dot_git(parts[1])
return host, owner, repo_name
return host, None, None
# Fallback: host/owner/repo.git
host = u.split("/", 1)[0].strip()
rest = u.split("/", 1)[1] if "/" in u else ""
parts = [p for p in rest.strip("/").split("/") if p]
if len(parts) >= 2:
owner = parts[0]
repo_name = _strip_dot_git(parts[1])
return host, owner, repo_name
return host, None, None

View File

@@ -0,0 +1,5 @@
from __future__ import annotations
from .workflow import publish
__all__ = ["publish"]

View File

@@ -0,0 +1,17 @@
from __future__ import annotations
from pkgmgr.core.git import run_git
from pkgmgr.core.version.semver import SemVer, is_semver_tag
def head_semver_tags(cwd: str = ".") -> list[str]:
out = run_git(["tag", "--points-at", "HEAD"], cwd=cwd)
if not out:
return []
tags = [t.strip() for t in out.splitlines() if t.strip()]
tags = [t for t in tags if is_semver_tag(t) and t.startswith("v")]
if not tags:
return []
return sorted(tags, key=SemVer.parse)

View File

@@ -0,0 +1,24 @@
from __future__ import annotations
from urllib.parse import urlparse
from .types import PyPITarget
def parse_pypi_project_url(url: str) -> PyPITarget | None:
u = (url or "").strip()
if not u:
return None
parsed = urlparse(u)
host = (parsed.netloc or "").lower()
path = (parsed.path or "").strip("/")
if host not in ("pypi.org", "test.pypi.org"):
return None
parts = [p for p in path.split("/") if p]
if len(parts) >= 2 and parts[0] == "project":
return PyPITarget(host=host, project=parts[1])
return None

View File

@@ -0,0 +1,9 @@
from __future__ import annotations
from dataclasses import dataclass
@dataclass(frozen=True)
class PyPITarget:
host: str
project: str

View File

@@ -0,0 +1,112 @@
from __future__ import annotations
import glob
import os
import shutil
import subprocess
from pkgmgr.actions.mirror.io import read_mirrors_file
from pkgmgr.actions.mirror.types import Repository
from pkgmgr.core.credentials.resolver import ResolutionOptions, TokenResolver
from pkgmgr.core.version.semver import SemVer
from .git_tags import head_semver_tags
from .pypi_url import parse_pypi_project_url
def _require_tool(module: str) -> None:
try:
subprocess.run(
["python", "-m", module, "--help"],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
check=True,
)
except Exception as exc:
raise RuntimeError(
f"Required Python module '{module}' is not available. "
f"Install it via: pip install {module}"
) from exc
def publish(
repo: Repository,
repo_dir: str,
*,
preview: bool = False,
interactive: bool = True,
allow_prompt: bool = True,
) -> None:
mirrors = read_mirrors_file(repo_dir)
targets = []
for url in mirrors.values():
t = parse_pypi_project_url(url)
if t:
targets.append(t)
if not targets:
print("[INFO] No PyPI mirror found. Skipping publish.")
return
if len(targets) > 1:
raise RuntimeError("Multiple PyPI mirrors found; refusing to publish.")
tags = head_semver_tags(cwd=repo_dir)
if not tags:
print("[INFO] No version tag on HEAD. Skipping publish.")
return
tag = max(tags, key=SemVer.parse)
target = targets[0]
print(f"[INFO] Publishing {target.project} for tag {tag}")
if preview:
print("[PREVIEW] Would build and upload to PyPI.")
return
_require_tool("build")
_require_tool("twine")
dist_dir = os.path.join(repo_dir, "dist")
if os.path.isdir(dist_dir):
shutil.rmtree(dist_dir, ignore_errors=True)
subprocess.run(
["python", "-m", "build"],
cwd=repo_dir,
check=True,
)
artifacts = sorted(glob.glob(os.path.join(dist_dir, "*")))
if not artifacts:
raise RuntimeError("No build artifacts found in dist/.")
resolver = TokenResolver()
# Store PyPI token per OS user (keyring is already user-scoped).
# Do NOT scope by project name.
token = resolver.get_token(
provider_kind="pypi",
host=target.host,
owner=None,
options=ResolutionOptions(
interactive=interactive,
allow_prompt=allow_prompt,
save_prompt_token_to_keyring=True,
),
).token
env = dict(os.environ)
env["TWINE_USERNAME"] = "__token__"
env["TWINE_PASSWORD"] = token
subprocess.run(
["python", "-m", "twine", "upload", *artifacts],
cwd=repo_dir,
env=env,
check=True,
)
print("[INFO] Publish completed.")

View File

@@ -1,143 +1,257 @@
from __future__ import annotations
import os import os
import re
import subprocess import subprocess
from dataclasses import dataclass
from typing import Any, Dict, Optional, Tuple
from urllib.parse import urlparse
import yaml import yaml
from pkgmgr.actions.mirror.io import write_mirrors_file
from pkgmgr.actions.mirror.setup_cmd import setup_mirrors
from pkgmgr.actions.repository.scaffold import render_default_templates
from pkgmgr.core.command.alias import generate_alias from pkgmgr.core.command.alias import generate_alias
from pkgmgr.core.config.save import save_user_config from pkgmgr.core.config.save import save_user_config
def create_repo(identifier, config_merged, user_config_path, bin_dir, remote=False, preview=False): Repository = Dict[str, Any]
"""
Creates a new repository by performing the following steps: _NAME_RE = re.compile(r"^[a-z0-9_-]+$")
1. Parses the identifier (provider:port/account/repository) and adds a new entry to the user config
if it is not already present. The provider part is split into provider and port (if provided). @dataclass(frozen=True)
2. Creates the local repository directory and initializes a Git repository. class RepoParts:
3. If --remote is set, checks for an existing "origin" remote (removing it if found), host: str
adds the remote using a URL built from provider, port, account, and repository, port: Optional[str]
creates an initial commit (e.g. with a README.md), and pushes to the remote. owner: str
The push is attempted on both "main" and "master" branches. name: str
"""
parts = identifier.split("/")
def _run(cmd: str, cwd: str, preview: bool) -> None:
if preview:
print(f"[Preview] Would run in {cwd}: {cmd}")
return
subprocess.run(cmd, cwd=cwd, shell=True, check=True)
def _git_get(key: str) -> str:
try:
out = subprocess.run(
f"git config --get {key}",
shell=True,
check=False,
capture_output=True,
text=True,
)
return (out.stdout or "").strip()
except Exception:
return ""
def _split_host_port(host_with_port: str) -> Tuple[str, Optional[str]]:
if ":" in host_with_port:
host, port = host_with_port.split(":", 1)
return host, port or None
return host_with_port, None
def _strip_git_suffix(name: str) -> str:
return name[:-4] if name.endswith(".git") else name
def _parse_git_url(url: str) -> RepoParts:
if url.startswith("git@") and "://" not in url:
left, right = url.split(":", 1)
host = left.split("@", 1)[1]
path = right.lstrip("/")
owner, name = path.split("/", 1)
return RepoParts(host=host, port=None, owner=owner, name=_strip_git_suffix(name))
parsed = urlparse(url)
host = (parsed.hostname or "").strip()
port = str(parsed.port) if parsed.port else None
path = (parsed.path or "").strip("/")
if not host or not path or "/" not in path:
raise ValueError(f"Could not parse git URL: {url}")
owner, name = path.split("/", 1)
return RepoParts(host=host, port=port, owner=owner, name=_strip_git_suffix(name))
def _parse_identifier(identifier: str) -> RepoParts:
ident = identifier.strip()
if "://" in ident or ident.startswith("git@"):
return _parse_git_url(ident)
parts = ident.split("/")
if len(parts) != 3: if len(parts) != 3:
print("Identifier must be in the format 'provider:port/account/repository' (port is optional).") raise ValueError("Identifier must be URL or 'provider(:port)/owner/repo'.")
host_with_port, owner, name = parts
host, port = _split_host_port(host_with_port)
return RepoParts(host=host, port=port, owner=owner, name=name)
def _ensure_valid_repo_name(name: str) -> None:
if not name or not _NAME_RE.fullmatch(name):
raise ValueError("Repository name must match: lowercase a-z, 0-9, '_' and '-'.")
def _repo_homepage(host: str, owner: str, name: str) -> str:
return f"https://{host}/{owner}/{name}"
def _build_default_primary_url(parts: RepoParts) -> str:
if parts.port:
return f"ssh://git@{parts.host}:{parts.port}/{parts.owner}/{parts.name}.git"
return f"git@{parts.host}:{parts.owner}/{parts.name}.git"
def _write_default_mirrors(repo_dir: str, primary: str, name: str, preview: bool) -> None:
mirrors = {"origin": primary, "pypi": f"https://pypi.org/project/{name}/"}
write_mirrors_file(repo_dir, mirrors, preview=preview)
def _git_init_and_initial_commit(repo_dir: str, preview: bool) -> None:
_run("git init", cwd=repo_dir, preview=preview)
_run("git add -A", cwd=repo_dir, preview=preview)
if preview:
print(f'[Preview] Would run in {repo_dir}: git commit -m "Initial commit"')
return return
provider_with_port, account, repository = parts subprocess.run('git commit -m "Initial commit"', cwd=repo_dir, shell=True, check=False)
# Split provider and port if a colon is present.
if ":" in provider_with_port:
provider_name, port = provider_with_port.split(":", 1)
else:
provider_name = provider_with_port
port = None
# Check if the repository is already present in the merged config (including port)
exists = False def _git_push_main_or_master(repo_dir: str, preview: bool) -> None:
for repo in config_merged.get("repositories", []): _run("git branch -M main", cwd=repo_dir, preview=preview)
if (repo.get("provider") == provider_name and try:
repo.get("account") == account and _run("git push -u origin main", cwd=repo_dir, preview=preview)
repo.get("repository") == repository): return
exists = True except subprocess.CalledProcessError:
print(f"Repository {identifier} already exists in the configuration.") pass
break
try:
_run("git branch -M master", cwd=repo_dir, preview=preview)
_run("git push -u origin master", cwd=repo_dir, preview=preview)
except subprocess.CalledProcessError as exc:
print(f"[WARN] Push failed: {exc}")
def create_repo(
identifier: str,
config_merged: Dict[str, Any],
user_config_path: str,
bin_dir: str,
*,
remote: bool = False,
preview: bool = False,
) -> None:
parts = _parse_identifier(identifier)
_ensure_valid_repo_name(parts.name)
directories = config_merged.get("directories") or {}
base_dir = os.path.expanduser(str(directories.get("repositories", "~/Repositories")))
repo_dir = os.path.join(base_dir, parts.host, parts.owner, parts.name)
author_name = _git_get("user.name") or "Unknown Author"
author_email = _git_get("user.email") or "unknown@example.invalid"
homepage = _repo_homepage(parts.host, parts.owner, parts.name)
primary_url = _build_default_primary_url(parts)
repositories = config_merged.get("repositories") or []
exists = any(
(
r.get("provider") == parts.host
and r.get("account") == parts.owner
and r.get("repository") == parts.name
)
for r in repositories
)
if not exists: if not exists:
# Create a new entry with an automatically generated alias. new_entry: Repository = {
new_entry = { "provider": parts.host,
"provider": provider_name, "port": parts.port,
"port": port, "account": parts.owner,
"account": account, "repository": parts.name,
"repository": repository, "homepage": homepage,
"alias": generate_alias({"repository": repository, "provider": provider_name, "account": account}, bin_dir, existing_aliases=set()), "alias": generate_alias(
"verified": {} # No initial verification info {"repository": parts.name, "provider": parts.host, "account": parts.owner},
bin_dir,
existing_aliases=set(),
),
"verified": {},
} }
# Load or initialize the user configuration.
if os.path.exists(user_config_path): if os.path.exists(user_config_path):
with open(user_config_path, "r") as f: with open(user_config_path, "r", encoding="utf-8") as f:
user_config = yaml.safe_load(f) or {} user_config = yaml.safe_load(f) or {}
else: else:
user_config = {"repositories": []} user_config = {"repositories": []}
user_config.setdefault("repositories", []) user_config.setdefault("repositories", [])
user_config["repositories"].append(new_entry) user_config["repositories"].append(new_entry)
save_user_config(user_config, user_config_path)
print(f"Repository {identifier} added to the configuration.")
# Also update the merged configuration object.
config_merged.setdefault("repositories", []).append(new_entry)
# Create the local repository directory based on the configured base directory.
base_dir = os.path.expanduser(config_merged["directories"]["repositories"])
repo_dir = os.path.join(base_dir, provider_name, account, repository)
if not os.path.exists(repo_dir):
os.makedirs(repo_dir, exist_ok=True)
print(f"Local repository directory created: {repo_dir}")
else:
print(f"Local repository directory already exists: {repo_dir}")
# Initialize a Git repository if not already initialized.
if not os.path.exists(os.path.join(repo_dir, ".git")):
cmd_init = "git init"
if preview: if preview:
print(f"[Preview] Would execute: '{cmd_init}' in {repo_dir}") print(f"[Preview] Would save user config: {user_config_path}")
else: else:
subprocess.run(cmd_init, cwd=repo_dir, shell=True, check=True) save_user_config(user_config, user_config_path)
print(f"Git repository initialized in {repo_dir}.")
config_merged.setdefault("repositories", []).append(new_entry)
repo = new_entry
print(f"[INFO] Added repository to configuration: {parts.host}/{parts.owner}/{parts.name}")
else: else:
print("Git repository is already initialized.") repo = next(
r
for r in repositories
if (
r.get("provider") == parts.host
and r.get("account") == parts.owner
and r.get("repository") == parts.name
)
)
print(f"[INFO] Repository already in configuration: {parts.host}/{parts.owner}/{parts.name}")
if preview:
print(f"[Preview] Would ensure directory exists: {repo_dir}")
else:
os.makedirs(repo_dir, exist_ok=True)
tpl_context = {
"provider": parts.host,
"port": parts.port,
"account": parts.owner,
"repository": parts.name,
"homepage": homepage,
"author_name": author_name,
"author_email": author_email,
"license_text": f"All rights reserved by {author_name}",
"primary_remote": primary_url,
}
render_default_templates(repo_dir, context=tpl_context, preview=preview)
_git_init_and_initial_commit(repo_dir, preview=preview)
_write_default_mirrors(repo_dir, primary=primary_url, name=parts.name, preview=preview)
repo.setdefault("mirrors", {})
repo["mirrors"].setdefault("origin", primary_url)
repo["mirrors"].setdefault("pypi", f"https://pypi.org/project/{parts.name}/")
setup_mirrors(
selected_repos=[repo],
repositories_base_dir=base_dir,
all_repos=config_merged.get("repositories", []),
preview=preview,
local=True,
remote=True,
ensure_remote=bool(remote),
)
if remote: if remote:
# Create a README.md if it does not exist to have content for an initial commit. _git_push_main_or_master(repo_dir, preview=preview)
readme_path = os.path.join(repo_dir, "README.md")
if not os.path.exists(readme_path):
if preview:
print(f"[Preview] Would create README.md in {repo_dir}.")
else:
with open(readme_path, "w") as f:
f.write(f"# {repository}\n")
subprocess.run("git add README.md", cwd=repo_dir, shell=True, check=True)
subprocess.run('git commit -m "Initial commit"', cwd=repo_dir, shell=True, check=True)
print("README.md created and initial commit made.")
# Build the remote URL.
if provider_name.lower() == "github.com":
remote_url = f"git@{provider_name}:{account}/{repository}.git"
else:
if port:
remote_url = f"ssh://git@{provider_name}:{port}/{account}/{repository}.git"
else:
remote_url = f"ssh://git@{provider_name}/{account}/{repository}.git"
# Check if the remote "origin" already exists.
cmd_list = "git remote"
if preview:
print(f"[Preview] Would check for existing remotes in {repo_dir}")
remote_exists = False # Assume no remote in preview mode.
else:
result = subprocess.run(cmd_list, cwd=repo_dir, shell=True, capture_output=True, text=True, check=True)
remote_list = result.stdout.strip().split()
remote_exists = "origin" in remote_list
if remote_exists:
# Remove the existing remote "origin".
cmd_remove = "git remote remove origin"
if preview:
print(f"[Preview] Would execute: '{cmd_remove}' in {repo_dir}")
else:
subprocess.run(cmd_remove, cwd=repo_dir, shell=True, check=True)
print("Existing remote 'origin' removed.")
# Now add the new remote.
cmd_remote = f"git remote add origin {remote_url}"
if preview:
print(f"[Preview] Would execute: '{cmd_remote}' in {repo_dir}")
else:
try:
subprocess.run(cmd_remote, cwd=repo_dir, shell=True, check=True)
print(f"Remote 'origin' added: {remote_url}")
except subprocess.CalledProcessError:
print(f"Failed to add remote using URL: {remote_url}.")
# Push the initial commit to the remote repository
cmd_push = "git push -u origin master"
if preview:
print(f"[Preview] Would execute: '{cmd_push}' in {repo_dir}")
else:
subprocess.run(cmd_push, cwd=repo_dir, shell=True, check=True)
print("Initial push to the remote repository completed.")

View File

@@ -0,0 +1,105 @@
from __future__ import annotations
import os
import subprocess
from pathlib import Path
from typing import Any, Dict, Optional
try:
from jinja2 import Environment, FileSystemLoader, StrictUndefined
except Exception as exc: # pragma: no cover
Environment = None # type: ignore[assignment]
FileSystemLoader = None # type: ignore[assignment]
StrictUndefined = None # type: ignore[assignment]
_JINJA_IMPORT_ERROR = exc
else:
_JINJA_IMPORT_ERROR = None
def _repo_root_from_here(anchor: Optional[Path] = None) -> str:
"""
Prefer git root (robust in editable installs / different layouts).
Fallback to a conservative relative parent lookup.
"""
here = (anchor or Path(__file__)).resolve().parent
try:
r = subprocess.run(
["git", "rev-parse", "--show-toplevel"],
cwd=str(here),
check=False,
capture_output=True,
text=True,
)
if r.returncode == 0:
top = (r.stdout or "").strip()
if top:
return top
except Exception:
pass
# Fallback: src/pkgmgr/actions/repository/scaffold.py -> <repo root> = parents[5]
p = (anchor or Path(__file__)).resolve()
if len(p.parents) < 6:
raise RuntimeError(f"Unexpected path depth for: {p}")
return str(p.parents[5])
def _templates_dir() -> str:
return os.path.join(_repo_root_from_here(), "templates", "default")
def render_default_templates(
repo_dir: str,
*,
context: Dict[str, Any],
preview: bool,
) -> None:
"""
Render templates/default/*.j2 into repo_dir.
Keeps create.py clean: create.py calls this function only.
"""
tpl_dir = _templates_dir()
if not os.path.isdir(tpl_dir):
raise RuntimeError(f"Templates directory not found: {tpl_dir}")
# Preview mode: do not require Jinja2 at all. We only print planned outputs.
if preview:
for root, _, files in os.walk(tpl_dir):
for fn in files:
if not fn.endswith(".j2"):
continue
abs_src = os.path.join(root, fn)
rel_src = os.path.relpath(abs_src, tpl_dir)
rel_out = rel_src[:-3]
print(f"[Preview] Would render template: {rel_src} -> {rel_out}")
return
if Environment is None or FileSystemLoader is None or StrictUndefined is None:
raise RuntimeError(
"Jinja2 is required for repo templates but is not available. "
f"Import error: {_JINJA_IMPORT_ERROR}"
)
env = Environment(
loader=FileSystemLoader(tpl_dir),
undefined=StrictUndefined,
autoescape=False,
keep_trailing_newline=True,
)
for root, _, files in os.walk(tpl_dir):
for fn in files:
if not fn.endswith(".j2"):
continue
abs_src = os.path.join(root, fn)
rel_src = os.path.relpath(abs_src, tpl_dir)
rel_out = rel_src[:-3]
abs_out = os.path.join(repo_dir, rel_out)
os.makedirs(os.path.dirname(abs_out), exist_ok=True)
template = env.get_template(rel_src)
rendered = template.render(**context)
with open(abs_out, "w", encoding="utf-8") as f:
f.write(rendered)

View File

@@ -3,7 +3,7 @@
from __future__ import annotations from __future__ import annotations
from typing import Any, Iterable from typing import Any, Iterable, List, Tuple
from pkgmgr.actions.update.system_updater import SystemUpdater from pkgmgr.actions.update.system_updater import SystemUpdater
@@ -30,32 +30,73 @@ class UpdateManager:
quiet: bool, quiet: bool,
update_dependencies: bool, update_dependencies: bool,
clone_mode: str, clone_mode: str,
silent: bool = False,
force_update: bool = True, force_update: bool = True,
) -> None: ) -> None:
from pkgmgr.actions.install import install_repos from pkgmgr.actions.install import install_repos
from pkgmgr.actions.repository.pull import pull_with_verification from pkgmgr.actions.repository.pull import pull_with_verification
from pkgmgr.core.repository.identifier import get_repo_identifier
pull_with_verification( failures: List[Tuple[str, str]] = []
selected_repos,
repositories_base_dir,
all_repos,
[],
no_verification,
preview,
)
install_repos( for repo in list(selected_repos):
selected_repos, identifier = get_repo_identifier(repo, all_repos)
repositories_base_dir,
bin_dir, try:
all_repos, pull_with_verification(
no_verification, [repo],
preview, repositories_base_dir,
quiet, all_repos,
clone_mode, [],
update_dependencies, no_verification,
force_update=force_update, preview,
) )
except SystemExit as exc:
code = exc.code if isinstance(exc.code, int) else str(exc.code)
failures.append((identifier, f"pull failed (exit={code})"))
if not quiet:
print(f"[Warning] update: pull failed for {identifier} (exit={code}). Continuing...")
continue
except Exception as exc:
failures.append((identifier, f"pull failed: {exc}"))
if not quiet:
print(f"[Warning] update: pull failed for {identifier}: {exc}. Continuing...")
continue
try:
install_repos(
[repo],
repositories_base_dir,
bin_dir,
all_repos,
no_verification,
preview,
quiet,
clone_mode,
update_dependencies,
force_update=force_update,
silent=silent,
emit_summary=False,
)
except SystemExit as exc:
code = exc.code if isinstance(exc.code, int) else str(exc.code)
failures.append((identifier, f"install failed (exit={code})"))
if not quiet:
print(f"[Warning] update: install failed for {identifier} (exit={code}). Continuing...")
continue
except Exception as exc:
failures.append((identifier, f"install failed: {exc}"))
if not quiet:
print(f"[Warning] update: install failed for {identifier}: {exc}. Continuing...")
continue
if failures and not quiet:
print("\n[pkgmgr] Update finished with warnings:")
for ident, msg in failures:
print(f" - {ident}: {msg}")
if failures and not silent:
raise SystemExit(1)
if system_update: if system_update:
self._system_updater.run(preview=preview) self._system_updater.run(preview=preview)

View File

@@ -2,6 +2,7 @@ from .repos import handle_repos_command
from .config import handle_config from .config import handle_config
from .tools import handle_tools_command from .tools import handle_tools_command
from .release import handle_release from .release import handle_release
from .publish import handle_publish
from .version import handle_version from .version import handle_version
from .make import handle_make from .make import handle_make
from .changelog import handle_changelog from .changelog import handle_changelog
@@ -13,6 +14,7 @@ __all__ = [
"handle_config", "handle_config",
"handle_tools_command", "handle_tools_command",
"handle_release", "handle_release",
"handle_publish",
"handle_version", "handle_version",
"handle_make", "handle_make",
"handle_changelog", "handle_changelog",

View File

@@ -0,0 +1,34 @@
from __future__ import annotations
import os
from typing import Any, Dict, List
from pkgmgr.actions.publish import publish
from pkgmgr.cli.context import CLIContext
from pkgmgr.core.repository.dir import get_repo_dir
from pkgmgr.core.repository.identifier import get_repo_identifier
Repository = Dict[str, Any]
def handle_publish(args, ctx: CLIContext, selected: List[Repository]) -> None:
if not selected:
print("[pkgmgr] No repositories selected for publish.")
return
for repo in selected:
identifier = get_repo_identifier(repo, ctx.all_repositories)
repo_dir = repo.get("directory") or get_repo_dir(ctx.repositories_base_dir, repo)
if not os.path.isdir(repo_dir):
print(f"[WARN] Skipping {identifier}: directory missing.")
continue
print(f"[pkgmgr] Publishing repository {identifier}...")
publish(
repo=repo,
repo_dir=repo_dir,
preview=getattr(args, "preview", False),
interactive=not getattr(args, "non_interactive", False),
allow_prompt=not getattr(args, "non_interactive", False),
)

View File

@@ -1,31 +1,17 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
"""
Release command wiring for the pkgmgr CLI.
This module implements the `pkgmgr release` subcommand on top of the
generic selection logic from cli.dispatch. It does not define its
own subparser; the CLI surface is configured in cli.parser.
Responsibilities:
- Take the parsed argparse.Namespace for the `release` command.
- Use the list of selected repositories provided by dispatch_command().
- Optionally list affected repositories when --list is set.
- For each selected repository, run pkgmgr.actions.release.release(...) in
the context of that repository directory.
"""
from __future__ import annotations from __future__ import annotations
import os import os
import sys
from typing import Any, Dict, List from typing import Any, Dict, List
from pkgmgr.actions.publish import publish as run_publish
from pkgmgr.actions.release import release as run_release
from pkgmgr.cli.context import CLIContext from pkgmgr.cli.context import CLIContext
from pkgmgr.core.repository.dir import get_repo_dir from pkgmgr.core.repository.dir import get_repo_dir
from pkgmgr.core.repository.identifier import get_repo_identifier from pkgmgr.core.repository.identifier import get_repo_identifier
from pkgmgr.actions.release import release as run_release
Repository = Dict[str, Any] Repository = Dict[str, Any]
@@ -35,23 +21,10 @@ def handle_release(
ctx: CLIContext, ctx: CLIContext,
selected: List[Repository], selected: List[Repository],
) -> None: ) -> None:
"""
Handle the `pkgmgr release` subcommand.
Flow:
1) Use the `selected` repositories as computed by dispatch_command().
2) If --list is given, print the identifiers of the selected repos
and return without running any release.
3) For each selected repository:
- Resolve its identifier and local directory.
- Change into that directory.
- Call pkgmgr.actions.release.release(...) with the parsed options.
"""
if not selected: if not selected:
print("[pkgmgr] No repositories selected for release.") print("[pkgmgr] No repositories selected for release.")
return return
# List-only mode: show which repositories would be affected.
if getattr(args, "list", False): if getattr(args, "list", False):
print("[pkgmgr] Repositories that would be affected by this release:") print("[pkgmgr] Repositories that would be affected by this release:")
for repo in selected: for repo in selected:
@@ -62,29 +35,22 @@ def handle_release(
for repo in selected: for repo in selected:
identifier = get_repo_identifier(repo, ctx.all_repositories) identifier = get_repo_identifier(repo, ctx.all_repositories)
repo_dir = repo.get("directory") try:
if not repo_dir: repo_dir = repo.get("directory") or get_repo_dir(ctx.repositories_base_dir, repo)
try: except Exception as exc:
repo_dir = get_repo_dir(ctx.repositories_base_dir, repo) print(f"[WARN] Skipping repository {identifier}: failed to resolve directory: {exc}")
except Exception:
repo_dir = None
if not repo_dir or not os.path.isdir(repo_dir):
print(
f"[WARN] Skipping repository {identifier}: "
"local directory does not exist."
)
continue continue
print( if not os.path.isdir(repo_dir):
f"[pkgmgr] Running release for repository {identifier} " print(f"[WARN] Skipping repository {identifier}: directory missing.")
f"in '{repo_dir}'..." continue
)
print(f"[pkgmgr] Running release for repository {identifier}...")
# Change to repo directory and invoke the helper.
cwd_before = os.getcwd() cwd_before = os.getcwd()
try: try:
os.chdir(repo_dir) os.chdir(repo_dir)
run_release( run_release(
pyproject_path="pyproject.toml", pyproject_path="pyproject.toml",
changelog_path="CHANGELOG.md", changelog_path="CHANGELOG.md",
@@ -94,5 +60,17 @@ def handle_release(
force=getattr(args, "force", False), force=getattr(args, "force", False),
close=getattr(args, "close", False), close=getattr(args, "close", False),
) )
if not getattr(args, "no_publish", False):
print(f"[pkgmgr] Running publish for repository {identifier}...")
is_tty = sys.stdin.isatty()
run_publish(
repo=repo,
repo_dir=repo_dir,
preview=getattr(args, "preview", False),
interactive=is_tty,
allow_prompt=is_tty,
)
finally: finally:
os.chdir(cwd_before) os.chdir(cwd_before)

View File

@@ -68,6 +68,7 @@ def handle_repos_command(
args.clone_mode, args.clone_mode,
args.dependencies, args.dependencies,
force_update=getattr(args, "update", False), force_update=getattr(args, "update", False),
silent=getattr(args, "silent", False),
) )
return return

View File

@@ -1,6 +1,3 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from __future__ import annotations from __future__ import annotations
import os import os
@@ -16,6 +13,7 @@ from pkgmgr.cli.commands import (
handle_repos_command, handle_repos_command,
handle_tools_command, handle_tools_command,
handle_release, handle_release,
handle_publish,
handle_version, handle_version,
handle_config, handle_config,
handle_make, handle_make,
@@ -24,40 +22,20 @@ from pkgmgr.cli.commands import (
handle_mirror_command, handle_mirror_command,
) )
def _has_explicit_selection(args) -> bool:
"""
Return True if the user explicitly selected repositories via
identifiers / --all / --category / --tag / --string.
"""
identifiers = getattr(args, "identifiers", []) or []
use_all = getattr(args, "all", False)
categories = getattr(args, "category", []) or []
tags = getattr(args, "tag", []) or []
string_filter = getattr(args, "string", "") or ""
def _has_explicit_selection(args) -> bool:
return bool( return bool(
use_all getattr(args, "all", False)
or identifiers or getattr(args, "identifiers", [])
or categories or getattr(args, "category", [])
or tags or getattr(args, "tag", [])
or string_filter or getattr(args, "string", "")
) )
def _select_repo_for_current_directory( def _select_repo_for_current_directory(ctx: CLIContext) -> List[Dict[str, Any]]:
ctx: CLIContext,
) -> List[Dict[str, Any]]:
"""
Heuristic: find the repository whose local directory matches the
current working directory or is the closest parent.
Example:
- Repo directory: /home/kevin/Repositories/foo
- CWD: /home/kevin/Repositories/foo/subdir
'foo' is selected.
"""
cwd = os.path.abspath(os.getcwd()) cwd = os.path.abspath(os.getcwd())
candidates: List[tuple[str, Dict[str, Any]]] = [] matches = []
for repo in ctx.all_repositories: for repo in ctx.all_repositories:
repo_dir = repo.get("directory") repo_dir = repo.get("directory")
@@ -65,33 +43,24 @@ def _select_repo_for_current_directory(
try: try:
repo_dir = get_repo_dir(ctx.repositories_base_dir, repo) repo_dir = get_repo_dir(ctx.repositories_base_dir, repo)
except Exception: except Exception:
repo_dir = None continue
if not repo_dir:
continue
repo_dir_abs = os.path.abspath(os.path.expanduser(repo_dir)) repo_dir = os.path.abspath(os.path.expanduser(repo_dir))
if cwd == repo_dir_abs or cwd.startswith(repo_dir_abs + os.sep): if cwd == repo_dir or cwd.startswith(repo_dir + os.sep):
candidates.append((repo_dir_abs, repo)) matches.append((repo_dir, repo))
if not candidates: if not matches:
return [] return []
# Pick the repo with the longest (most specific) path. matches.sort(key=lambda x: len(x[0]), reverse=True)
candidates.sort(key=lambda item: len(item[0]), reverse=True) return [matches[0][1]]
return [candidates[0][1]]
def dispatch_command(args, ctx: CLIContext) -> None: def dispatch_command(args, ctx: CLIContext) -> None:
"""
Dispatch the parsed arguments to the appropriate command handler.
"""
# First: proxy commands (git / docker / docker compose / make wrapper etc.)
if maybe_handle_proxy(args, ctx): if maybe_handle_proxy(args, ctx):
return return
# Commands that operate on repository selections commands_with_selection = {
commands_with_selection: List[str] = [
"install", "install",
"update", "update",
"deinstall", "deinstall",
@@ -103,31 +72,25 @@ def dispatch_command(args, ctx: CLIContext) -> None:
"list", "list",
"make", "make",
"release", "release",
"publish",
"version", "version",
"changelog", "changelog",
"explore", "explore",
"terminal", "terminal",
"code", "code",
"mirror", "mirror",
] }
if getattr(args, "command", None) in commands_with_selection: if args.command in commands_with_selection:
if _has_explicit_selection(args): selected = (
# Classic selection logic (identifiers / --all / filters) get_selected_repos(args, ctx.all_repositories)
selected = get_selected_repos(args, ctx.all_repositories) if _has_explicit_selection(args)
else: else _select_repo_for_current_directory(ctx)
# Default per help text: repository of current folder. )
selected = _select_repo_for_current_directory(ctx)
# If none is found, leave 'selected' empty.
# Individual handlers will then emit a clear message instead
# of silently picking an unrelated repository.
else: else:
selected = [] selected = []
# ------------------------------------------------------------------ # if args.command in {
# Repos-related commands
# ------------------------------------------------------------------ #
if args.command in (
"install", "install",
"deinstall", "deinstall",
"delete", "delete",
@@ -136,15 +99,13 @@ def dispatch_command(args, ctx: CLIContext) -> None:
"shell", "shell",
"create", "create",
"list", "list",
): }:
handle_repos_command(args, ctx, selected) handle_repos_command(args, ctx, selected)
return return
# ------------------------------------------------------------
# update
# ------------------------------------------------------------
if args.command == "update": if args.command == "update":
from pkgmgr.actions.update import UpdateManager from pkgmgr.actions.update import UpdateManager
UpdateManager().run( UpdateManager().run(
selected_repos=selected, selected_repos=selected,
repositories_base_dir=ctx.repositories_base_dir, repositories_base_dir=ctx.repositories_base_dir,
@@ -156,25 +117,23 @@ def dispatch_command(args, ctx: CLIContext) -> None:
quiet=args.quiet, quiet=args.quiet,
update_dependencies=args.dependencies, update_dependencies=args.dependencies,
clone_mode=args.clone_mode, clone_mode=args.clone_mode,
silent=getattr(args, "silent", False),
force_update=True, force_update=True,
) )
return return
# ------------------------------------------------------------------ #
# Tools (explore / terminal / code)
# ------------------------------------------------------------------ #
if args.command in ("explore", "terminal", "code"): if args.command in ("explore", "terminal", "code"):
handle_tools_command(args, ctx, selected) handle_tools_command(args, ctx, selected)
return return
# ------------------------------------------------------------------ #
# Release / Version / Changelog / Config / Make / Branch
# ------------------------------------------------------------------ #
if args.command == "release": if args.command == "release":
handle_release(args, ctx, selected) handle_release(args, ctx, selected)
return return
if args.command == "publish":
handle_publish(args, ctx, selected)
return
if args.command == "version": if args.command == "version":
handle_version(args, ctx, selected) handle_version(args, ctx, selected)
return return

View File

@@ -1,68 +1,73 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from __future__ import annotations from __future__ import annotations
import argparse import argparse
from pkgmgr.cli.proxy import register_proxy_commands from pkgmgr.cli.proxy import register_proxy_commands
from .common import SortedSubParsersAction
from .install_update import add_install_update_subparsers
from .config_cmd import add_config_subparsers
from .navigation_cmd import add_navigation_subparsers
from .branch_cmd import add_branch_subparsers from .branch_cmd import add_branch_subparsers
from .release_cmd import add_release_subparser
from .version_cmd import add_version_subparser
from .changelog_cmd import add_changelog_subparser from .changelog_cmd import add_changelog_subparser
from .common import SortedSubParsersAction
from .config_cmd import add_config_subparsers
from .install_update import add_install_update_subparsers
from .list_cmd import add_list_subparser from .list_cmd import add_list_subparser
from .make_cmd import add_make_subparsers from .make_cmd import add_make_subparsers
from .mirror_cmd import add_mirror_subparsers from .mirror_cmd import add_mirror_subparsers
from .navigation_cmd import add_navigation_subparsers
from .publish_cmd import add_publish_subparser
from .release_cmd import add_release_subparser
from .version_cmd import add_version_subparser
def create_parser(description_text: str) -> argparse.ArgumentParser: def create_parser(description_text: str) -> argparse.ArgumentParser:
"""
Create the top-level argument parser for pkgmgr.
"""
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(
description=description_text, description=description_text,
formatter_class=argparse.RawTextHelpFormatter, formatter_class=argparse.RawTextHelpFormatter,
) )
subparsers = parser.add_subparsers( subparsers = parser.add_subparsers(
dest="command", dest="command",
help="Subcommands", help="Subcommands",
action=SortedSubParsersAction, action=SortedSubParsersAction,
) )
# Core repo operations # create
p_create = subparsers.add_parser(
"create",
help="Create a new repository (scaffold + config).",
)
p_create.add_argument(
"identifiers",
nargs="+",
help="Repository identifier(s): URL or 'provider(:port)/owner/repo'.",
)
p_create.add_argument(
"--remote",
action="store_true",
help="Also push an initial commit to the remote (main/master).",
)
p_create.add_argument(
"--preview",
action="store_true",
help="Print actions without writing files or executing commands.",
)
add_install_update_subparsers(subparsers) add_install_update_subparsers(subparsers)
add_config_subparsers(subparsers) add_config_subparsers(subparsers)
# Navigation / tooling around repos
add_navigation_subparsers(subparsers) add_navigation_subparsers(subparsers)
# Branch & release workflow
add_branch_subparsers(subparsers) add_branch_subparsers(subparsers)
add_release_subparser(subparsers) add_release_subparser(subparsers)
add_publish_subparser(subparsers)
# Info commands
add_version_subparser(subparsers) add_version_subparser(subparsers)
add_changelog_subparser(subparsers) add_changelog_subparser(subparsers)
add_list_subparser(subparsers) add_list_subparser(subparsers)
# Make wrapper
add_make_subparsers(subparsers) add_make_subparsers(subparsers)
# Mirror management
add_mirror_subparsers(subparsers) add_mirror_subparsers(subparsers)
# Proxy commands (git, docker, docker compose, ...)
register_proxy_commands(subparsers) register_proxy_commands(subparsers)
return parser return parser
__all__ = [ __all__ = ["create_parser", "SortedSubParsersAction"]
"create_parser",
"SortedSubParsersAction",
]

View File

@@ -168,3 +168,10 @@ def add_install_update_arguments(subparser: argparse.ArgumentParser) -> None:
default="ssh", default="ssh",
help="Specify clone mode (default: ssh).", help="Specify clone mode (default: ssh).",
) )
_add_option_if_missing(
subparser,
"--silent",
action="store_true",
help="Continue with other repositories if one fails; downgrade errors to warnings.",
)

View File

@@ -0,0 +1,19 @@
from __future__ import annotations
import argparse
from .common import add_identifier_arguments
def add_publish_subparser(subparsers: argparse._SubParsersAction) -> None:
parser = subparsers.add_parser(
"publish",
help="Publish repository artifacts (e.g. PyPI) based on MIRRORS.",
)
add_identifier_arguments(parser)
parser.add_argument(
"--non-interactive",
action="store_true",
help="Disable interactive credential prompts (CI mode).",
)

View File

@@ -21,22 +21,22 @@ def add_release_subparser(
"and updating the changelog." "and updating the changelog."
), ),
) )
release_parser.add_argument( release_parser.add_argument(
"release_type", "release_type",
choices=["major", "minor", "patch"], choices=["major", "minor", "patch"],
help="Type of version increment for the release (major, minor, patch).", help="Type of version increment for the release (major, minor, patch).",
) )
release_parser.add_argument( release_parser.add_argument(
"-m", "-m",
"--message", "--message",
default=None, default=None,
help=( help="Optional release message to add to the changelog and tag.",
"Optional release message to add to the changelog and tag."
),
) )
# Generic selection / preview / list / extra_args
add_identifier_arguments(release_parser) add_identifier_arguments(release_parser)
# Close current branch after successful release
release_parser.add_argument( release_parser.add_argument(
"--close", "--close",
action="store_true", action="store_true",
@@ -45,7 +45,7 @@ def add_release_subparser(
"repository, if it is not main/master." "repository, if it is not main/master."
), ),
) )
# Force: skip preview+confirmation and run release directly
release_parser.add_argument( release_parser.add_argument(
"-f", "-f",
"--force", "--force",
@@ -55,3 +55,9 @@ def add_release_subparser(
"release directly." "release directly."
), ),
) )
release_parser.add_argument(
"--no-publish",
action="store_true",
help="Do not run publish automatically after a successful release.",
)

View File

@@ -9,15 +9,33 @@ from ..types import KeyringUnavailableError, TokenRequest, TokenResult
def _import_keyring(): def _import_keyring():
"""
Import python-keyring.
Raises:
KeyringUnavailableError if:
- library is missing
- no backend is configured / usable
- import fails for any reason
"""
try: try:
import keyring # type: ignore import keyring # type: ignore
return keyring
except Exception as exc: # noqa: BLE001 except Exception as exc: # noqa: BLE001
raise KeyringUnavailableError( raise KeyringUnavailableError(
"python-keyring is not available or no backend is configured." "python-keyring is not installed."
) from exc ) from exc
# Some environments have keyring installed but no usable backend.
# We do a lightweight "backend sanity check" by attempting to read the backend.
try:
_ = keyring.get_keyring()
except Exception as exc: # noqa: BLE001
raise KeyringUnavailableError(
"python-keyring is installed but no usable keyring backend is configured."
) from exc
return keyring
@dataclass(frozen=True) @dataclass(frozen=True)
class KeyringTokenProvider: class KeyringTokenProvider:

View File

@@ -9,6 +9,37 @@ from typing import Optional
from ..types import TokenRequest, TokenResult from ..types import TokenRequest, TokenResult
def _token_help_url(provider_kind: str, host: str) -> Optional[str]:
"""
Return a provider-specific URL where a user can create/get an API token.
Keep this conservative and stable:
- GitHub: official token settings URL
- Gitea/Forgejo: common settings path on the given host
- GitLab: common personal access token path
"""
kind = (provider_kind or "").strip().lower()
h = (host or "").strip()
# GitHub (cloud)
if kind == "github":
return "https://github.com/settings/tokens"
# Gitea / Forgejo (self-hosted)
if kind in ("gitea", "forgejo"):
# Typical UI path: Settings -> Applications -> Access Tokens
# In many installations this is available at /user/settings/applications
base = f"https://{h}".rstrip("/")
return f"{base}/user/settings/applications"
# GitLab (cloud or self-hosted)
if kind == "gitlab":
base = "https://gitlab.com" if not h else f"https://{h}".rstrip("/")
return f"{base}/-/profile/personal_access_tokens"
return None
@dataclass(frozen=True) @dataclass(frozen=True)
class PromptTokenProvider: class PromptTokenProvider:
"""Interactively prompt for a token. """Interactively prompt for a token.
@@ -25,6 +56,11 @@ class PromptTokenProvider:
return None return None
owner_info = f" (owner: {request.owner})" if request.owner else "" owner_info = f" (owner: {request.owner})" if request.owner else ""
help_url = _token_help_url(request.provider_kind, request.host)
if help_url:
print(f"[INFO] Create/get your token here: {help_url}")
prompt = f"Enter API token for {request.provider_kind} on {request.host}{owner_info}: " prompt = f"Enter API token for {request.provider_kind} on {request.host}{owner_info}: "
token = (getpass(prompt) or "").strip() token = (getpass(prompt) or "").strip()
if not token: if not token:

View File

@@ -1,13 +1,14 @@
# src/pkgmgr/core/credentials/resolver.py # src/pkgmgr/core/credentials/resolver.py
from __future__ import annotations from __future__ import annotations
import sys
from dataclasses import dataclass from dataclasses import dataclass
from typing import Optional from typing import Optional
from .providers.env import EnvTokenProvider from .providers.env import EnvTokenProvider
from .providers.keyring import KeyringTokenProvider from .providers.keyring import KeyringTokenProvider
from .providers.prompt import PromptTokenProvider from .providers.prompt import PromptTokenProvider
from .types import NoCredentialsError, TokenRequest, TokenResult from .types import KeyringUnavailableError, NoCredentialsError, TokenRequest, TokenResult
@dataclass(frozen=True) @dataclass(frozen=True)
@@ -26,6 +27,26 @@ class TokenResolver:
self._env = EnvTokenProvider() self._env = EnvTokenProvider()
self._keyring = KeyringTokenProvider() self._keyring = KeyringTokenProvider()
self._prompt = PromptTokenProvider() self._prompt = PromptTokenProvider()
self._warned_keyring: bool = False
def _warn_keyring_unavailable(self, exc: Exception) -> None:
if self._warned_keyring:
return
self._warned_keyring = True
msg = str(exc).strip() or "Keyring is unavailable."
print("[WARN] Keyring support is not available.", file=sys.stderr)
print(f" {msg}", file=sys.stderr)
print(" Tokens will NOT be persisted securely.", file=sys.stderr)
print("", file=sys.stderr)
print(" To enable secure token storage, install python-keyring:", file=sys.stderr)
print(" pip install keyring", file=sys.stderr)
print("", file=sys.stderr)
print(" Or install via system packages:", file=sys.stderr)
print(" sudo apt install python3-keyring", file=sys.stderr)
print(" sudo pacman -S python-keyring", file=sys.stderr)
print(" sudo dnf install python3-keyring", file=sys.stderr)
print("", file=sys.stderr)
def get_token( def get_token(
self, self,
@@ -47,9 +68,11 @@ class TokenResolver:
kr_res = self._keyring.get(request) kr_res = self._keyring.get(request)
if kr_res: if kr_res:
return kr_res return kr_res
except KeyringUnavailableError as exc:
# Show a helpful warning once, then continue (prompt fallback).
self._warn_keyring_unavailable(exc)
except Exception: except Exception:
# Keyring missing/unavailable: ignore to allow prompt (workstations) # Unknown keyring errors: do not block prompting; still avoid hard crash.
# or to fail cleanly below (headless CI without prompt).
pass pass
# 3) Prompt (optional) # 3) Prompt (optional)
@@ -59,6 +82,8 @@ class TokenResolver:
if opts.save_prompt_token_to_keyring: if opts.save_prompt_token_to_keyring:
try: try:
self._keyring.set(request, prompt_res.token) self._keyring.set(request, prompt_res.token)
except KeyringUnavailableError as exc:
self._warn_keyring_unavailable(exc)
except Exception: except Exception:
# If keyring cannot store, still use token for this run. # If keyring cannot store, still use token for this run.
pass pass

View File

@@ -64,10 +64,12 @@ def ensure_remote_repo(
provider = reg.resolve(spec.host) provider = reg.resolve(spec.host)
if provider_hint and provider_hint.kind: if provider_hint and provider_hint.kind:
forced = provider_hint.kind.strip().lower() forced = provider_hint.kind.strip().lower()
provider = next( forced_provider = next(
(p for p in reg.providers if getattr(p, "kind", "").lower() == forced), (p for p in reg.providers if getattr(p, "kind", "").lower() == forced),
None, None,
) )
if forced_provider is not None:
provider = forced_provider
if provider is None: if provider is None:
raise UnsupportedProviderError(f"No provider matched host: {spec.host}") raise UnsupportedProviderError(f"No provider matched host: {spec.host}")

View File

@@ -0,0 +1,5 @@
.venv/
dist/
build/
__pycache__/
*.pyc

View File

@@ -0,0 +1 @@
{{ license_text }}

View File

@@ -0,0 +1,6 @@
# {{ repository }}
Homepage: {{ homepage }}
## Author
{{ author_name }} <{{ author_email }}>

View File

@@ -0,0 +1,11 @@
{
description = "{{ repository }}";
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
outputs = { self, nixpkgs }:
let system = "x86_64-linux"; pkgs = import nixpkgs { inherit system; };
in {
devShells.${system}.default = pkgs.mkShell {
packages = with pkgs; [ python312 python312Packages.pytest python312Packages.ruff ];
};
};
}

View File

@@ -0,0 +1,21 @@
[build-system]
requires = ["setuptools>=68", "wheel"]
build-backend = "setuptools.build_meta"
[project]
name = "{{ repository }}"
version = "0.1.0"
description = ""
readme = "README.md"
requires-python = ">=3.10"
authors = [{ name = "{{ author_name }}", email = "{{ author_email }}" }]
license = { text = "{{ license_text }}" }
urls = { Homepage = "{{ homepage }}" }
dependencies = []
[tool.setuptools]
package-dir = {"" = "src"}
[tool.setuptools.packages.find]
where = ["src"]

View File

@@ -1,164 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
E2E integration tests for the `pkgmgr mirror` command family.
Covered commands:
- pkgmgr mirror --help
- pkgmgr mirror list --preview --all
- pkgmgr mirror diff --preview --all
- pkgmgr mirror merge config file --preview --all
- pkgmgr mirror setup --preview --all
- pkgmgr mirror check --preview --all
- pkgmgr mirror provision --preview --all
All commands are executed via the real CLI entry point (main module).
With --preview enabled, all operations are non-destructive and safe
to run inside CI containers.
"""
import io
import runpy
import sys
import unittest
from contextlib import redirect_stdout, redirect_stderr
class TestIntegrationMirrorCommands(unittest.TestCase):
"""
End-to-end tests for `pkgmgr mirror` commands.
"""
# ------------------------------------------------------------
# Helper
# ------------------------------------------------------------
def _run_pkgmgr(self, args):
"""
Execute pkgmgr with the given arguments and return captured output.
- Treat SystemExit(0) or SystemExit(None) as success.
- Any other exit code is considered a test failure.
"""
original_argv = list(sys.argv)
buffer = io.StringIO()
cmd_repr = "pkgmgr " + " ".join(args)
try:
sys.argv = ["pkgmgr"] + list(args)
try:
with redirect_stdout(buffer), redirect_stderr(buffer):
runpy.run_module("pkgmgr", run_name="__main__")
except SystemExit as exc:
code = exc.code if isinstance(exc.code, int) else None
if code not in (0, None):
raise AssertionError(
"%r failed with exit code %r.\n\nOutput:\n%s"
% (cmd_repr, exc.code, buffer.getvalue())
)
return buffer.getvalue()
finally:
sys.argv = original_argv
# ------------------------------------------------------------
# Tests
# ------------------------------------------------------------
def test_mirror_help(self):
"""
`pkgmgr mirror --help` should run without error and print usage info.
"""
output = self._run_pkgmgr(["mirror", "--help"])
self.assertIn("usage:", output)
self.assertIn("pkgmgr mirror", output)
def test_mirror_list_preview_all(self):
"""
`pkgmgr mirror list --preview --all`
"""
output = self._run_pkgmgr(
["mirror", "list", "--preview", "--all"]
)
self.assertTrue(
output.strip(),
"Expected output from mirror list",
)
def test_mirror_diff_preview_all(self):
"""
`pkgmgr mirror diff --preview --all`
"""
output = self._run_pkgmgr(
["mirror", "diff", "--preview", "--all"]
)
self.assertTrue(
output.strip(),
"Expected output from mirror diff",
)
def test_mirror_merge_config_to_file_preview_all(self):
"""
`pkgmgr mirror merge config file --preview --all`
"""
output = self._run_pkgmgr(
[
"mirror",
"merge",
"config",
"file",
"--preview",
"--all",
]
)
self.assertTrue(
output.strip(),
"Expected output from mirror merge (config -> file)",
)
def test_mirror_setup_preview_all(self):
"""
`pkgmgr mirror setup --preview --all`
"""
output = self._run_pkgmgr(
["mirror", "setup", "--preview", "--all"]
)
self.assertTrue(
output.strip(),
"Expected output from mirror setup",
)
def test_mirror_check_preview_all(self):
"""
`pkgmgr mirror check --preview --all`
Performs non-destructive remote checks (git ls-remote).
"""
output = self._run_pkgmgr(
["mirror", "check", "--preview", "--all"]
)
self.assertTrue(
output.strip(),
"Expected output from mirror check",
)
def test_mirror_provision_preview_all(self):
"""
`pkgmgr mirror provision --preview --all`
In preview mode this MUST NOT create remote repositories.
"""
output = self._run_pkgmgr(
["mirror", "provision", "--preview", "--all"]
)
self.assertTrue(
output.strip(),
"Expected output from mirror provision (preview)",
)
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,70 @@
from __future__ import annotations
import os
import shutil
import subprocess
import unittest
PROJECT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".."))
def _run_help(cmd: list[str], label: str) -> str:
print(f"\n[TEST] Running ({label}): {' '.join(cmd)}")
proc = subprocess.run(
cmd,
cwd=PROJECT_ROOT,
text=True,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
check=False,
env=os.environ.copy(),
)
print(proc.stdout.rstrip())
# For --help we expect success (0). Anything else is an error.
if proc.returncode != 0:
raise AssertionError(
f"[TEST] Help command failed ({label}).\n"
f"Command: {' '.join(cmd)}\n"
f"Exit code: {proc.returncode}\n"
f"--- output ---\n{proc.stdout}\n"
)
return proc.stdout
class TestPublishHelpE2E(unittest.TestCase):
def test_pkgmgr_publish_help(self) -> None:
out = _run_help(["pkgmgr", "publish", "--help"], "pkgmgr publish --help")
self.assertIn("usage:", out)
self.assertIn("publish", out)
def test_pkgmgr_help_mentions_publish(self) -> None:
out = _run_help(["pkgmgr", "--help"], "pkgmgr --help")
self.assertIn("publish", out)
def test_nix_run_pkgmgr_publish_help(self) -> None:
if shutil.which("nix") is None:
self.skipTest("nix is not available in this environment")
out = _run_help(
["nix", "run", ".#pkgmgr", "--", "publish", "--help"],
"nix run .#pkgmgr -- publish --help",
)
self.assertIn("usage:", out)
self.assertIn("publish", out)
def test_nix_run_pkgmgr_help_mentions_publish(self) -> None:
if shutil.which("nix") is None:
self.skipTest("nix is not available in this environment")
out = _run_help(
["nix", "run", ".#pkgmgr", "--", "--help"],
"nix run .#pkgmgr -- --help",
)
self.assertIn("publish", out)
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,42 @@
from __future__ import annotations
import io
import unittest
from contextlib import redirect_stdout
from unittest.mock import patch
from pkgmgr.actions.repository.create import create_repo
class TestE2ECreateRepoPreviewOutput(unittest.TestCase):
def test_create_repo_preview_prints_expected_steps(self) -> None:
cfg = {"directories": {"repositories": "/tmp/Repositories"}, "repositories": []}
out = io.StringIO()
with (
redirect_stdout(out),
patch("pkgmgr.actions.repository.create.os.path.exists", return_value=False),
patch("pkgmgr.actions.repository.create.generate_alias", return_value="repo"),
patch("pkgmgr.actions.repository.create.save_user_config"),
patch("pkgmgr.actions.repository.create.os.makedirs"),
patch("pkgmgr.actions.repository.create.render_default_templates"),
patch("pkgmgr.actions.repository.create.write_mirrors_file"),
patch("pkgmgr.actions.repository.create.setup_mirrors"),
patch("pkgmgr.actions.repository.create.subprocess.run"),
):
create_repo(
"github.com/acme/repo",
cfg,
"/tmp/user.yml",
"/tmp/bin",
remote=False,
preview=True,
)
s = out.getvalue()
self.assertIn("[Preview] Would save user config:", s)
self.assertIn("[Preview] Would ensure directory exists:", s)
if __name__ == "__main__":
unittest.main()

View File

@@ -96,6 +96,7 @@ class TestIntegrationUpdateAllshallowNoSystem(unittest.TestCase):
"--clone-mode", "--clone-mode",
"shallow", "shallow",
"--no-verification", "--no-verification",
"--silent",
] ]
self._run_cmd(["pkgmgr", *args], label="pkgmgr", env=env) self._run_cmd(["pkgmgr", *args], label="pkgmgr", env=env)
pkgmgr_help_debug() pkgmgr_help_debug()
@@ -110,6 +111,7 @@ class TestIntegrationUpdateAllshallowNoSystem(unittest.TestCase):
"--clone-mode", "--clone-mode",
"shallow", "shallow",
"--no-verification", "--no-verification",
"--silent",
] ]
self._run_cmd( self._run_cmd(
["nix", "run", ".#pkgmgr", "--", *args], ["nix", "run", ".#pkgmgr", "--", *args],

View File

@@ -0,0 +1,172 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
CLI integration tests for `pkgmgr mirror`.
These tests validate:
- CLI argument parsing
- command dispatch
- command orchestration
All side effects (git, network, remote provisioning, filesystem writes)
are patched to keep tests deterministic and CI-safe.
"""
from __future__ import annotations
import importlib
import io
import os
import runpy
import sys
import unittest
from contextlib import ExitStack, redirect_stderr, redirect_stdout
from typing import Dict, List, Optional
from unittest.mock import MagicMock, PropertyMock, patch
class TestIntegrationMirrorCommands(unittest.TestCase):
"""
Integration tests for `pkgmgr mirror` commands.
"""
def _run_pkgmgr(self, args: List[str], extra_env: Optional[Dict[str, str]] = None) -> str:
"""
Execute pkgmgr with the given arguments and return captured output.
- Treat SystemExit(0) or SystemExit(None) as success.
- Any other exit code is considered a test failure.
- Mirror commands are patched to avoid network/destructive operations.
"""
original_argv = list(sys.argv)
original_env = dict(os.environ)
buffer = io.StringIO()
cmd_repr = "pkgmgr " + " ".join(args)
# Shared dummy context used by multiple mirror commands
dummy_ctx = MagicMock()
dummy_ctx.identifier = "dummy-repo"
dummy_ctx.repo_dir = "/tmp/dummy-repo"
dummy_ctx.config_mirrors = {"origin": "git@github.com:alice/repo.git"}
dummy_ctx.file_mirrors = {"backup": "ssh://git@git.example:2201/alice/repo.git"}
type(dummy_ctx).resolved_mirrors = PropertyMock(
return_value={
"origin": "git@github.com:alice/repo.git",
"backup": "ssh://git@git.example:2201/alice/repo.git",
}
)
# Helper: patch with create=True so missing symbols don't explode.
# IMPORTANT: patch() does not auto-import submodules when resolving dotted names.
def _p(target: str, **kwargs):
module_name = target.rsplit(".", 1)[0]
try:
importlib.import_module(module_name)
except ModuleNotFoundError:
# If the module truly doesn't exist, create=True may still allow patching
# in some cases, but dotted resolution can still fail. Best-effort.
pass
return patch(target, create=True, **kwargs)
# Fake result for remote provisioning (preview-safe)
def _fake_ensure_remote_repo(spec, provider_hint=None, options=None):
# Safety: E2E should only ever call this in preview mode
if options is not None and getattr(options, "preview", False) is not True:
raise AssertionError(
f"{cmd_repr} attempted ensure_remote_repo without preview=True in E2E."
)
r = MagicMock()
r.status = "preview"
r.message = "Preview mode (E2E patched): no remote provisioning performed."
r.url = None
return r
try:
sys.argv = ["pkgmgr"] + list(args)
if extra_env:
os.environ.update(extra_env)
with ExitStack() as stack:
# build_context is imported directly in these modules:
stack.enter_context(_p("pkgmgr.actions.mirror.list_cmd.build_context", return_value=dummy_ctx))
stack.enter_context(_p("pkgmgr.actions.mirror.diff_cmd.build_context", return_value=dummy_ctx))
stack.enter_context(_p("pkgmgr.actions.mirror.merge_cmd.build_context", return_value=dummy_ctx))
stack.enter_context(_p("pkgmgr.actions.mirror.setup_cmd.build_context", return_value=dummy_ctx))
stack.enter_context(_p("pkgmgr.actions.mirror.remote_provision.build_context", return_value=dummy_ctx))
# Deterministic remote probing (covers setup + likely check implementations)
stack.enter_context(_p("pkgmgr.actions.mirror.remote_check.probe_mirror", return_value=(True, "")))
stack.enter_context(_p("pkgmgr.actions.mirror.setup_cmd.probe_mirror", return_value=(True, "")))
stack.enter_context(_p("pkgmgr.actions.mirror.git_remote.is_remote_reachable", return_value=True))
# setup_cmd imports ensure_origin_remote directly:
stack.enter_context(_p("pkgmgr.actions.mirror.setup_cmd.ensure_origin_remote", return_value=None))
# Extra safety: if any code calls git_remote.ensure_origin_remote directly
stack.enter_context(_p("pkgmgr.actions.mirror.git_remote.ensure_origin_remote", return_value=None))
# remote provisioning: remote_provision imports ensure_remote_repo directly from core:
stack.enter_context(
_p(
"pkgmgr.actions.mirror.remote_provision.ensure_remote_repo",
side_effect=_fake_ensure_remote_repo,
)
)
# Extra safety: if anything calls remote_check.run_git directly, make it inert
stack.enter_context(_p("pkgmgr.actions.mirror.remote_check.run_git", return_value="dummy"))
with redirect_stdout(buffer), redirect_stderr(buffer):
try:
runpy.run_module("pkgmgr", run_name="__main__")
except SystemExit as exc:
code = exc.code if isinstance(exc.code, int) else None
if code not in (0, None):
raise AssertionError(
"%r failed with exit code %r.\n\nOutput:\n%s"
% (cmd_repr, exc.code, buffer.getvalue())
)
return buffer.getvalue()
finally:
sys.argv = original_argv
os.environ.clear()
os.environ.update(original_env)
# ------------------------------------------------------------
# Tests
# ------------------------------------------------------------
def test_mirror_help(self) -> None:
output = self._run_pkgmgr(["mirror", "--help"])
self.assertIn("usage:", output.lower())
self.assertIn("mirror", output.lower())
def test_mirror_list_preview_all(self) -> None:
output = self._run_pkgmgr(["mirror", "list", "--preview", "--all"])
self.assertTrue(output.strip(), "Expected output from mirror list")
def test_mirror_diff_preview_all(self) -> None:
output = self._run_pkgmgr(["mirror", "diff", "--preview", "--all"])
self.assertTrue(output.strip(), "Expected output from mirror diff")
def test_mirror_merge_config_to_file_preview_all(self) -> None:
output = self._run_pkgmgr(["mirror", "merge", "config", "file", "--preview", "--all"])
self.assertTrue(output.strip(), "Expected output from mirror merge (config -> file)")
def test_mirror_setup_preview_all(self) -> None:
output = self._run_pkgmgr(["mirror", "setup", "--preview", "--all"])
self.assertTrue(output.strip(), "Expected output from mirror setup")
def test_mirror_check_preview_all(self) -> None:
output = self._run_pkgmgr(["mirror", "check", "--preview", "--all"])
self.assertTrue(output.strip(), "Expected output from mirror check")
def test_mirror_provision_preview_all(self) -> None:
output = self._run_pkgmgr(["mirror", "provision", "--preview", "--all"])
self.assertTrue(output.strip(), "Expected output from mirror provision (preview)")
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,63 @@
from __future__ import annotations
import json
import unittest
from dataclasses import dataclass
@dataclass
class FakeRunResult:
"""
Mimics your runner returning a structured result object.
"""
returncode: int
stdout: str
stderr: str = ""
class FakeRunner:
"""
Minimal runner stub: returns exactly what we configure.
"""
def __init__(self, result):
self._result = result
def run(self, ctx, cmd: str, allow_failure: bool = False):
return self._result
class TestE2ENixProfileListJsonParsing(unittest.TestCase):
"""
This test verifies that NixProfileInspector can parse `nix profile list --json`
regardless of whether the CommandRunner returns:
- raw stdout string, OR
- a RunResult-like object with a `.stdout` attribute.
"""
def test_list_json_accepts_raw_string(self) -> None:
from pkgmgr.actions.install.installers.nix.profile import NixProfileInspector
payload = {"elements": {"pkgmgr-1": {"attrPath": "packages.x86_64-linux.pkgmgr"}}}
raw = json.dumps(payload)
runner = FakeRunner(raw)
inspector = NixProfileInspector()
data = inspector.list_json(ctx=None, runner=runner)
self.assertEqual(data["elements"]["pkgmgr-1"]["attrPath"], "packages.x86_64-linux.pkgmgr")
def test_list_json_accepts_runresult_object(self) -> None:
from pkgmgr.actions.install.installers.nix.profile import NixProfileInspector
payload = {"elements": {"pkgmgr-1": {"attrPath": "packages.x86_64-linux.pkgmgr"}}}
raw = json.dumps(payload)
runner = FakeRunner(FakeRunResult(returncode=0, stdout=raw))
inspector = NixProfileInspector()
data = inspector.list_json(ctx=None, runner=runner)
self.assertEqual(data["elements"]["pkgmgr-1"]["attrPath"], "packages.x86_64-linux.pkgmgr")
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,119 @@
from __future__ import annotations
import io
import os
import shutil
import subprocess
import tempfile
import unittest
from contextlib import redirect_stdout
from types import SimpleNamespace
from pkgmgr.cli.commands.publish import handle_publish
def _run(cmd: list[str], cwd: str) -> None:
subprocess.run(
cmd,
cwd=cwd,
check=True,
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
)
class TestIntegrationPublish(unittest.TestCase):
def setUp(self) -> None:
if shutil.which("git") is None:
self.skipTest("git is required for this integration test")
self.tmp = tempfile.TemporaryDirectory()
self.repo_dir = self.tmp.name
# Initialize git repository
_run(["git", "init"], cwd=self.repo_dir)
_run(["git", "config", "user.email", "ci@example.invalid"], cwd=self.repo_dir)
_run(["git", "config", "user.name", "CI"], cwd=self.repo_dir)
with open(os.path.join(self.repo_dir, "README.md"), "w", encoding="utf-8") as f:
f.write("test\n")
_run(["git", "add", "README.md"], cwd=self.repo_dir)
_run(["git", "commit", "-m", "init"], cwd=self.repo_dir)
_run(["git", "tag", "-a", "v1.2.3", "-m", "v1.2.3"], cwd=self.repo_dir)
# Create MIRRORS file with PyPI target
with open(os.path.join(self.repo_dir, "MIRRORS"), "w", encoding="utf-8") as f:
f.write("https://pypi.org/project/pkgmgr/\n")
def tearDown(self) -> None:
self.tmp.cleanup()
def test_publish_preview_end_to_end(self) -> None:
ctx = SimpleNamespace(
repositories_base_dir=self.repo_dir,
all_repositories=[
{
"name": "pkgmgr",
"directory": self.repo_dir,
}
],
)
selected = [
{
"name": "pkgmgr",
"directory": self.repo_dir,
}
]
args = SimpleNamespace(
preview=True,
non_interactive=False,
)
buf = io.StringIO()
with redirect_stdout(buf):
handle_publish(args=args, ctx=ctx, selected=selected)
out = buf.getvalue()
self.assertIn("[pkgmgr] Publishing repository", out)
self.assertIn("[INFO] Publishing pkgmgr for tag v1.2.3", out)
self.assertIn("[PREVIEW] Would build and upload to PyPI.", out)
# Preview must not create dist/
self.assertFalse(os.path.isdir(os.path.join(self.repo_dir, "dist")))
def test_publish_skips_without_pypi_mirror(self) -> None:
with open(os.path.join(self.repo_dir, "MIRRORS"), "w", encoding="utf-8") as f:
f.write("git@github.com:example/example.git\n")
ctx = SimpleNamespace(
repositories_base_dir=self.repo_dir,
all_repositories=[
{
"name": "pkgmgr",
"directory": self.repo_dir,
}
],
)
selected = [
{
"name": "pkgmgr",
"directory": self.repo_dir,
}
]
args = SimpleNamespace(
preview=True,
non_interactive=False,
)
buf = io.StringIO()
with redirect_stdout(buf):
handle_publish(args=args, ctx=ctx, selected=selected)
out = buf.getvalue()
self.assertIn("[INFO] No PyPI mirror found. Skipping publish.", out)

View File

@@ -0,0 +1,66 @@
from __future__ import annotations
import tempfile
import unittest
from types import SimpleNamespace
from unittest.mock import patch
class TestIntegrationReleasePublishHook(unittest.TestCase):
def _ctx(self) -> SimpleNamespace:
# Minimal CLIContext shape used by handle_release()
return SimpleNamespace(
repositories_base_dir="/tmp",
all_repositories=[],
)
def _parse(self, argv: list[str]):
from pkgmgr.cli.parser import create_parser
parser = create_parser("pkgmgr test")
return parser.parse_args(argv)
def test_release_runs_publish_by_default_and_respects_tty(self) -> None:
from pkgmgr.cli.commands.release import handle_release
with tempfile.TemporaryDirectory() as td:
selected = [{"directory": td}]
# Go through real parser to ensure CLI surface is wired correctly
args = self._parse(["release", "patch"])
with patch("pkgmgr.cli.commands.release.run_release") as m_release, patch(
"pkgmgr.cli.commands.release.run_publish"
) as m_publish, patch(
"pkgmgr.cli.commands.release.sys.stdin.isatty", return_value=False
):
handle_release(args=args, ctx=self._ctx(), selected=selected)
m_release.assert_called_once()
m_publish.assert_called_once()
_, kwargs = m_publish.call_args
self.assertEqual(kwargs["repo"], selected[0])
self.assertEqual(kwargs["repo_dir"], td)
self.assertFalse(kwargs["interactive"])
self.assertFalse(kwargs["allow_prompt"])
def test_release_skips_publish_when_no_publish_flag_set(self) -> None:
from pkgmgr.cli.commands.release import handle_release
with tempfile.TemporaryDirectory() as td:
selected = [{"directory": td}]
args = self._parse(["release", "patch", "--no-publish"])
with patch("pkgmgr.cli.commands.release.run_release") as m_release, patch(
"pkgmgr.cli.commands.release.run_publish"
) as m_publish:
handle_release(args=args, ctx=self._ctx(), selected=selected)
m_release.assert_called_once()
m_publish.assert_not_called()
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,53 @@
from __future__ import annotations
import importlib
import io
import unittest
from contextlib import redirect_stdout
from types import SimpleNamespace
from unittest.mock import patch
class TestIntegrationReposCreatePreview(unittest.TestCase):
def test_repos_create_preview_wires_create_repo(self) -> None:
# Import lazily to avoid hard-failing if the CLI module/function name differs.
try:
repos_mod = importlib.import_module("pkgmgr.cli.commands.repos")
except Exception as exc:
self.skipTest(f"CLI module not available: {exc}")
handle = getattr(repos_mod, "handle_repos_command", None)
if handle is None:
self.skipTest("handle_repos_command not found in pkgmgr.cli.commands.repos")
ctx = SimpleNamespace(
repositories_base_dir="/tmp/Repositories",
binaries_dir="/tmp/bin",
all_repositories=[],
config_merged={"directories": {"repositories": "/tmp/Repositories"}, "repositories": []},
user_config_path="/tmp/user.yml",
)
args = SimpleNamespace(
command="create",
identifiers=["github.com/acme/repo"],
remote=False,
preview=True,
)
out = io.StringIO()
with (
redirect_stdout(out),
patch("pkgmgr.cli.commands.repos.create_repo") as create_repo,
):
handle(args, ctx, selected=[])
create_repo.assert_called_once()
called = create_repo.call_args.kwargs
self.assertEqual(called["remote"], False)
self.assertEqual(called["preview"], True)
self.assertEqual(create_repo.call_args.args[0], "github.com/acme/repo")
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,110 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from __future__ import annotations
import unittest
from unittest.mock import patch
from pkgmgr.actions.update.manager import UpdateManager
class TestUpdateSilentContinues(unittest.TestCase):
def test_update_continues_on_failures_and_silent_controls_exit_code(self) -> None:
"""
Integration test for UpdateManager:
- pull failure on repo A should not stop repo B/C
- install failure on repo B should not stop repo C
- without silent -> SystemExit(1) at end if any failures
- with silent -> no SystemExit even if there are failures
"""
repos = [
{"provider": "github", "account": "example", "repository": "repo-a"},
{"provider": "github", "account": "example", "repository": "repo-b"},
{"provider": "github", "account": "example", "repository": "repo-c"},
]
# We patch the internal calls used by UpdateManager:
# - pull_with_verification is called once per repo
# - install_repos is called once per repo that successfully pulled
#
# We simulate:
# repo-a: pull fails
# repo-b: pull ok, install fails
# repo-c: pull ok, install ok
pull_calls = []
install_calls = []
def pull_side_effect(selected_repos, *_args, **_kwargs):
# selected_repos is a list with exactly one repo in our implementation.
repo = selected_repos[0]
pull_calls.append(repo["repository"])
if repo["repository"] == "repo-a":
raise SystemExit(2)
return None
def install_side_effect(selected_repos, *_args, **kwargs):
repo = selected_repos[0]
install_calls.append((repo["repository"], kwargs.get("silent"), kwargs.get("emit_summary")))
if repo["repository"] == "repo-b":
raise SystemExit(3)
return None
# Patch at the exact import locations used inside UpdateManager.run()
with patch("pkgmgr.actions.repository.pull.pull_with_verification", side_effect=pull_side_effect), patch(
"pkgmgr.actions.install.install_repos", side_effect=install_side_effect
):
# 1) silent=True: should NOT raise (even though failures happened)
UpdateManager().run(
selected_repos=repos,
repositories_base_dir="/tmp/repos",
bin_dir="/tmp/bin",
all_repos=repos,
no_verification=True,
system_update=False,
preview=True,
quiet=True,
update_dependencies=False,
clone_mode="shallow",
silent=True,
force_update=True,
)
# Ensure it tried all pulls, and installs happened for B and C only.
self.assertEqual(pull_calls, ["repo-a", "repo-b", "repo-c"])
self.assertEqual([r for r, _silent, _emit in install_calls], ["repo-b", "repo-c"])
# Ensure UpdateManager suppressed install summary spam by passing emit_summary=False.
for _repo_name, _silent, emit_summary in install_calls:
self.assertFalse(emit_summary)
# Reset tracking for the non-silent run
pull_calls.clear()
install_calls.clear()
# 2) silent=False: should raise SystemExit(1) at end due to failures
with self.assertRaises(SystemExit) as cm:
UpdateManager().run(
selected_repos=repos,
repositories_base_dir="/tmp/repos",
bin_dir="/tmp/bin",
all_repos=repos,
no_verification=True,
system_update=False,
preview=True,
quiet=True,
update_dependencies=False,
clone_mode="shallow",
silent=False,
force_update=True,
)
self.assertEqual(cm.exception.code, 1)
# Still must have processed all repos (continue-on-failure behavior).
self.assertEqual(pull_calls, ["repo-a", "repo-b", "repo-c"])
self.assertEqual([r for r, _silent, _emit in install_calls], ["repo-b", "repo-c"])
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,44 @@
from __future__ import annotations
from dataclasses import dataclass
from typing import Any, Optional
@dataclass
class FakeRunResult:
returncode: int
stdout: str = ""
stderr: str = ""
class FakeRunner:
"""
Minimal runner stub compatible with:
- CommandRunner.run(ctx, cmd, allow_failure=...)
- Generic runner.run(ctx, cmd, allow_failure=...)
"""
def __init__(self, mapping: Optional[dict[str, Any]] = None, default: Any = None):
self.mapping = mapping or {}
self.default = default if default is not None else FakeRunResult(0, "", "")
self.calls: list[tuple[Any, str, bool]] = []
def run(self, ctx, cmd: str, allow_failure: bool = False):
self.calls.append((ctx, cmd, allow_failure))
return self.mapping.get(cmd, self.default)
class FakeRetry:
"""
Mimics GitHubRateLimitRetry.run_with_retry(ctx, runner, cmd)
"""
def __init__(self, results: list[FakeRunResult]):
self._results = list(results)
self.calls: list[str] = []
def run_with_retry(self, ctx, runner, cmd: str):
self.calls.append(cmd)
if self._results:
return self._results.pop(0)
return FakeRunResult(0, "", "")

View File

@@ -0,0 +1,58 @@
from __future__ import annotations
import unittest
from pkgmgr.actions.install.installers.nix.conflicts import NixConflictResolver
from ._fakes import FakeRunResult, FakeRunner, FakeRetry
class DummyCtx:
quiet = True
class TestNixConflictResolver(unittest.TestCase):
def test_resolve_removes_tokens_and_retries_success(self) -> None:
ctx = DummyCtx()
install_cmd = "nix profile install /repo#default"
stderr = '''
error: An existing package already provides the following file:
/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pkgmgr/bin/pkgmgr
'''
runner = FakeRunner(mapping={
"nix profile remove pkgmgr": FakeRunResult(0, "", ""),
})
retry = FakeRetry(results=[FakeRunResult(0, "", "")])
class FakeProfile:
def find_remove_tokens_for_store_prefixes(self, ctx, runner, prefixes):
return []
def find_remove_tokens_for_output(self, ctx, runner, output):
return ["pkgmgr"]
resolver = NixConflictResolver(runner=runner, retry=retry, profile=FakeProfile())
ok = resolver.resolve(ctx, install_cmd, stdout="", stderr=stderr, output="pkgmgr", max_rounds=2)
self.assertTrue(ok)
self.assertIn("nix profile remove pkgmgr", [c[1] for c in runner.calls])
def test_resolve_uses_textual_remove_tokens_last_resort(self) -> None:
ctx = DummyCtx()
install_cmd = "nix profile install /repo#default"
stderr = "hint: try:\n nix profile remove 'pkgmgr-1'\n"
runner = FakeRunner(mapping={
"nix profile remove pkgmgr-1": FakeRunResult(0, "", ""),
})
retry = FakeRetry(results=[FakeRunResult(0, "", "")])
class FakeProfile:
def find_remove_tokens_for_store_prefixes(self, ctx, runner, prefixes):
return []
def find_remove_tokens_for_output(self, ctx, runner, output):
return []
resolver = NixConflictResolver(runner=runner, retry=retry, profile=FakeProfile())
ok = resolver.resolve(ctx, install_cmd, stdout="", stderr=stderr, output="pkgmgr", max_rounds=2)
self.assertTrue(ok)
self.assertIn("nix profile remove pkgmgr-1", [c[1] for c in runner.calls])

View File

@@ -0,0 +1,62 @@
from __future__ import annotations
import json
import unittest
from pkgmgr.actions.install.installers.nix.profile import NixProfileInspector
from ._fakes import FakeRunResult, FakeRunner
class TestNixProfileInspector(unittest.TestCase):
def test_list_json_accepts_raw_string(self) -> None:
payload = {"elements": {"pkgmgr-1": {"attrPath": "packages.x86_64-linux.pkgmgr"}}}
raw = json.dumps(payload)
runner = FakeRunner(default=raw)
insp = NixProfileInspector()
data = insp.list_json(ctx=None, runner=runner)
self.assertEqual(data["elements"]["pkgmgr-1"]["attrPath"], "packages.x86_64-linux.pkgmgr")
def test_list_json_accepts_result_object(self) -> None:
payload = {"elements": {"pkgmgr-1": {"attrPath": "packages.x86_64-linux.pkgmgr"}}}
raw = json.dumps(payload)
runner = FakeRunner(default=FakeRunResult(0, stdout=raw))
insp = NixProfileInspector()
data = insp.list_json(ctx=None, runner=runner)
self.assertEqual(data["elements"]["pkgmgr-1"]["attrPath"], "packages.x86_64-linux.pkgmgr")
def test_find_remove_tokens_for_output_includes_output_first(self) -> None:
payload = {
"elements": {
"pkgmgr-1": {"name": "pkgmgr-1", "attrPath": "packages.x86_64-linux.pkgmgr"},
"default-1": {"name": "default-1", "attrPath": "packages.x86_64-linux.default"},
}
}
raw = json.dumps(payload)
runner = FakeRunner(default=FakeRunResult(0, stdout=raw))
insp = NixProfileInspector()
tokens = insp.find_remove_tokens_for_output(ctx=None, runner=runner, output="pkgmgr")
self.assertEqual(tokens[0], "pkgmgr")
self.assertIn("pkgmgr-1", tokens)
def test_find_remove_tokens_for_store_prefixes(self) -> None:
payload = {
"elements": {
"pkgmgr-1": {
"name": "pkgmgr-1",
"attrPath": "packages.x86_64-linux.pkgmgr",
"storePaths": ["/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pkgmgr"],
},
"something": {
"name": "other",
"attrPath": "packages.x86_64-linux.other",
"storePaths": ["/nix/store/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb-other"],
},
}
}
raw = json.dumps(payload)
runner = FakeRunner(default=FakeRunResult(0, stdout=raw))
insp = NixProfileInspector()
tokens = insp.find_remove_tokens_for_store_prefixes(
ctx=None, runner=runner, prefixes=["/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pkgmgr"]
)
self.assertIn("pkgmgr-1", tokens)

View File

@@ -0,0 +1,88 @@
from __future__ import annotations
import unittest
from unittest.mock import MagicMock
from pkgmgr.actions.install.installers.nix.installer import NixFlakeInstaller
from ._fakes import FakeRunResult
class DummyCtx:
def __init__(self, identifier: str = "x", repo_dir: str = "/repo", quiet: bool = True, force_update: bool = False):
self.identifier = identifier
self.repo_dir = repo_dir
self.quiet = quiet
self.force_update = force_update
class TestNixFlakeInstallerCore(unittest.TestCase):
def test_install_only_success_returns(self) -> None:
ins = NixFlakeInstaller()
ins.supports = MagicMock(return_value=True)
ins._retry = MagicMock()
ins._retry.run_with_retry.return_value = FakeRunResult(0, "", "")
ins._conflicts = MagicMock()
ins._profile = MagicMock()
ins._runner = MagicMock()
ctx = DummyCtx(identifier="lib", repo_dir="/repo", quiet=True)
ins.run(ctx)
ins._retry.run_with_retry.assert_called()
def test_conflict_resolver_success_short_circuits(self) -> None:
ins = NixFlakeInstaller()
ins.supports = MagicMock(return_value=True)
ins._retry = MagicMock()
ins._retry.run_with_retry.return_value = FakeRunResult(1, "out", "err")
ins._conflicts = MagicMock()
ins._conflicts.resolve.return_value = True
ins._profile = MagicMock()
ins._runner = MagicMock()
ctx = DummyCtx(identifier="lib", repo_dir="/repo", quiet=True)
ins.run(ctx)
ins._conflicts.resolve.assert_called()
def test_mandatory_failure_raises_systemexit(self) -> None:
ins = NixFlakeInstaller()
ins.supports = MagicMock(return_value=True)
ins._retry = MagicMock()
ins._retry.run_with_retry.return_value = FakeRunResult(2, "", "no")
ins._conflicts = MagicMock()
ins._conflicts.resolve.return_value = False
ins._profile = MagicMock()
ins._profile.find_installed_indices_for_output.return_value = []
ins._runner = MagicMock()
ins._runner.run.return_value = FakeRunResult(2, "", "")
ctx = DummyCtx(identifier="lib", repo_dir="/repo", quiet=True)
with self.assertRaises(SystemExit) as cm:
ins.run(ctx)
self.assertEqual(cm.exception.code, 2)
def test_optional_failure_does_not_raise(self) -> None:
ins = NixFlakeInstaller()
ins.supports = MagicMock(return_value=True)
results = [
FakeRunResult(0, "", ""),
FakeRunResult(2, "", ""),
]
def run_with_retry(ctx, runner, cmd):
return results.pop(0)
ins._retry = MagicMock()
ins._retry.run_with_retry.side_effect = run_with_retry
ins._conflicts = MagicMock()
ins._conflicts.resolve.return_value = False
ins._profile = MagicMock()
ins._profile.find_installed_indices_for_output.return_value = []
ins._runner = MagicMock()
ins._runner.run.return_value = FakeRunResult(2, "", "")
ctx = DummyCtx(identifier="pkgmgr", repo_dir="/repo", quiet=True)
ins.run(ctx) # must not raise

View File

@@ -115,105 +115,7 @@ class TestNixFlakeInstaller(unittest.TestCase):
install_cmds = self._install_cmds_from_calls(subproc_mock.call_args_list) install_cmds = self._install_cmds_from_calls(subproc_mock.call_args_list)
self.assertEqual(install_cmds, [f"nix profile install {self.repo_dir}#default"]) self.assertEqual(install_cmds, [f"nix profile install {self.repo_dir}#default"])
def test_nix_flake_run_mandatory_failure_raises(self) -> None:
"""
For a generic repository, 'default' is mandatory.
A non-zero return code must raise SystemExit with that code.
"""
ctx = DummyCtx(identifier="some-lib", repo_dir=self.repo_dir)
installer = NixFlakeInstaller()
# retry layer does one attempt (non-403), then fallback does final attempt => 2 installs
install_results = [self._cp(1), self._cp(1)]
def fake_subprocess_run(cmd, *args, **kwargs):
if isinstance(cmd, str) and cmd.startswith("nix profile list --json"):
return self._cp(0, stdout='{"elements": []}', stderr="")
if isinstance(cmd, str) and cmd.startswith("nix profile install "):
return install_results.pop(0)
return self._cp(0)
buf = io.StringIO()
with patch("pkgmgr.actions.install.installers.nix.installer.shutil.which") as which_mock, patch(
"pkgmgr.actions.install.installers.nix.installer.os.path.exists", return_value=True
), patch(
"pkgmgr.actions.install.installers.nix.runner.subprocess.run", side_effect=fake_subprocess_run
) as subproc_mock, redirect_stdout(buf):
self._enable_nix_in_module(which_mock)
self.assertTrue(installer.supports(ctx))
with self.assertRaises(SystemExit) as cm:
installer.run(ctx)
self.assertEqual(cm.exception.code, 1)
out = buf.getvalue()
self.assertIn("[nix] install: nix profile install", out)
self.assertIn("[ERROR] Failed to install Nix flake output 'default' (exit 1)", out)
install_cmds = self._install_cmds_from_calls(subproc_mock.call_args_list)
self.assertEqual(
install_cmds,
[
f"nix profile install {self.repo_dir}#default",
f"nix profile install {self.repo_dir}#default",
],
)
def test_nix_flake_run_optional_failure_does_not_raise(self) -> None:
"""
For pkgmgr/package-manager repositories:
- 'pkgmgr' output is mandatory
- 'default' output is optional
Failure of optional output must not raise.
"""
ctx = DummyCtx(identifier="pkgmgr", repo_dir=self.repo_dir)
installer = NixFlakeInstaller()
# pkgmgr success (1 call), default fails (2 calls: attempt + final)
install_results = [self._cp(0), self._cp(1), self._cp(1)]
def fake_subprocess_run(cmd, *args, **kwargs):
if isinstance(cmd, str) and cmd.startswith("nix profile list --json"):
return self._cp(0, stdout='{"elements": []}', stderr="")
if isinstance(cmd, str) and cmd.startswith("nix profile install "):
return install_results.pop(0)
return self._cp(0)
buf = io.StringIO()
with patch("pkgmgr.actions.install.installers.nix.installer.shutil.which") as which_mock, patch(
"pkgmgr.actions.install.installers.nix.installer.os.path.exists", return_value=True
), patch(
"pkgmgr.actions.install.installers.nix.runner.subprocess.run", side_effect=fake_subprocess_run
) as subproc_mock, redirect_stdout(buf):
self._enable_nix_in_module(which_mock)
self.assertTrue(installer.supports(ctx))
installer.run(ctx) # must NOT raise
out = buf.getvalue()
# Should announce both outputs
self.assertIn("ensuring outputs: pkgmgr, default", out)
# First output ok
self.assertIn("[nix] output 'pkgmgr' successfully installed.", out)
# Second output failed but no raise
self.assertIn("[ERROR] Failed to install Nix flake output 'default' (exit 1)", out)
self.assertIn("[WARNING] Continuing despite failure of optional output 'default'.", out)
install_cmds = self._install_cmds_from_calls(subproc_mock.call_args_list)
self.assertEqual(
install_cmds,
[
f"nix profile install {self.repo_dir}#pkgmgr",
f"nix profile install {self.repo_dir}#default",
f"nix profile install {self.repo_dir}#default",
],
)
def test_nix_flake_supports_respects_disable_env(self) -> None: def test_nix_flake_supports_respects_disable_env(self) -> None:
""" """
PKGMGR_DISABLE_NIX_FLAKE_INSTALLER=1 must disable the installer, PKGMGR_DISABLE_NIX_FLAKE_INSTALLER=1 must disable the installer,

View File

@@ -0,0 +1,37 @@
from __future__ import annotations
import unittest
from pkgmgr.actions.install.installers.nix.profile.models import NixProfileEntry
from pkgmgr.actions.install.installers.nix.profile.matcher import entry_matches_output, entry_matches_store_path
class TestMatcher(unittest.TestCase):
def _e(self, name: str, attr: str) -> NixProfileEntry:
return NixProfileEntry(
key="pkgmgr-1",
index=None,
name=name,
attr_path=attr,
store_paths=["/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pkgmgr"],
)
def test_matches_direct_name(self) -> None:
self.assertTrue(entry_matches_output(self._e("pkgmgr", ""), "pkgmgr"))
def test_matches_attrpath_hash(self) -> None:
self.assertTrue(entry_matches_output(self._e("", "github:me/repo#pkgmgr"), "pkgmgr"))
def test_matches_attrpath_dot_suffix(self) -> None:
self.assertTrue(entry_matches_output(self._e("", "packages.x86_64-linux.pkgmgr"), "pkgmgr"))
def test_matches_name_with_suffix_number(self) -> None:
self.assertTrue(entry_matches_output(self._e("pkgmgr-1", ""), "pkgmgr"))
def test_package_manager_special_case(self) -> None:
self.assertTrue(entry_matches_output(self._e("package-manager-2", ""), "pkgmgr"))
def test_store_path_match(self) -> None:
entry = self._e("pkgmgr-1", "")
self.assertTrue(entry_matches_store_path(entry, "/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pkgmgr"))
self.assertFalse(entry_matches_store_path(entry, "/nix/store/cccccccccccccccccccccccccccccccc-zzz"))

View File

@@ -0,0 +1,39 @@
from __future__ import annotations
import unittest
from pkgmgr.actions.install.installers.nix.profile.normalizer import coerce_index, normalize_elements
class TestNormalizer(unittest.TestCase):
def test_coerce_index_numeric_key(self) -> None:
self.assertEqual(coerce_index("3", {"name": "x"}), 3)
def test_coerce_index_explicit_field(self) -> None:
self.assertEqual(coerce_index("pkgmgr-1", {"index": 7}), 7)
self.assertEqual(coerce_index("pkgmgr-1", {"id": "8"}), 8)
def test_coerce_index_trailing_number(self) -> None:
self.assertEqual(coerce_index("pkgmgr-42", {"name": "x"}), 42)
def test_normalize_elements_handles_missing_elements(self) -> None:
self.assertEqual(normalize_elements({}), [])
def test_normalize_elements_collects_store_paths(self) -> None:
data = {
"elements": {
"pkgmgr-1": {
"name": "pkgmgr-1",
"attrPath": "packages.x86_64-linux.pkgmgr",
"storePaths": ["/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pkgmgr"],
},
"2": {
"name": "foo",
"attrPath": "packages.x86_64-linux.default",
"storePath": "/nix/store/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb-foo",
},
}
}
entries = normalize_elements(data)
self.assertEqual(len(entries), 2)
self.assertTrue(entries[0].store_paths)

View File

@@ -0,0 +1,18 @@
from __future__ import annotations
import json
import unittest
from pkgmgr.actions.install.installers.nix.profile.parser import parse_profile_list_json
class TestParseProfileListJson(unittest.TestCase):
def test_parses_valid_json(self) -> None:
payload = {"elements": {"0": {"name": "pkgmgr"}}}
raw = json.dumps(payload)
self.assertEqual(parse_profile_list_json(raw)["elements"]["0"]["name"], "pkgmgr")
def test_raises_systemexit_on_invalid_json(self) -> None:
with self.assertRaises(SystemExit) as cm:
parse_profile_list_json("{not json")
self.assertIn("Failed to parse", str(cm.exception))

View File

@@ -0,0 +1,29 @@
from __future__ import annotations
import unittest
from pkgmgr.actions.install.installers.nix.profile_list import NixProfileListReader
from ._fakes import FakeRunResult, FakeRunner
class TestNixProfileListReader(unittest.TestCase):
def test_entries_parses_indices_and_store_prefixes(self) -> None:
out = '''
0 something /nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pkgmgr
1 something /nix/store/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb-foo
'''
runner = FakeRunner(default=FakeRunResult(0, stdout=out))
reader = NixProfileListReader(runner=runner)
entries = reader.entries(ctx=None)
self.assertEqual(entries[0][0], 0)
self.assertTrue(entries[0][1].startswith("/nix/store/"))
def test_indices_matching_store_prefixes(self) -> None:
out = " 7 x /nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pkgmgr\n"
runner = FakeRunner(default=FakeRunResult(0, stdout=out))
reader = NixProfileListReader(runner=runner)
hits = reader.indices_matching_store_prefixes(
ctx=None,
prefixes=["/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pkgmgr"],
)
self.assertEqual(hits, [7])

View File

@@ -0,0 +1,29 @@
from __future__ import annotations
import unittest
from pkgmgr.actions.install.installers.nix.profile.result import extract_stdout_text
class TestExtractStdoutText(unittest.TestCase):
def test_accepts_string(self) -> None:
self.assertEqual(extract_stdout_text("hello"), "hello")
def test_accepts_bytes(self) -> None:
self.assertEqual(extract_stdout_text(b"hi"), "hi")
def test_accepts_object_with_stdout_str(self) -> None:
class R:
stdout = "ok"
self.assertEqual(extract_stdout_text(R()), "ok")
def test_accepts_object_with_stdout_bytes(self) -> None:
class R:
stdout = b"ok"
self.assertEqual(extract_stdout_text(R()), "ok")
def test_fallback_str(self) -> None:
class R:
def __str__(self) -> str:
return "repr"
self.assertEqual(extract_stdout_text(R()), "repr")

View File

@@ -0,0 +1,30 @@
from __future__ import annotations
import unittest
from pkgmgr.actions.install.installers.nix.textparse import NixConflictTextParser
class TestNixConflictTextParser(unittest.TestCase):
def test_remove_tokens_parses_unquoted_and_quoted(self) -> None:
t = NixConflictTextParser()
text = '''
nix profile remove pkgmgr
nix profile remove 'pkgmgr-1'
nix profile remove "default-2"
'''
tokens = t.remove_tokens(text)
self.assertEqual(tokens, ["pkgmgr", "pkgmgr-1", "default-2"])
def test_existing_store_prefixes_extracts_existing_section_only(self) -> None:
t = NixConflictTextParser()
text = '''
error: An existing package already provides the following file:
/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pkgmgr/bin/pkgmgr
/nix/store/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb-pkgmgr/share/doc
This is the conflicting file from the new package:
/nix/store/cccccccccccccccccccccccccccccccc-pkgmgr/bin/pkgmgr
'''
prefixes = t.existing_store_prefixes(text)
self.assertEqual(len(prefixes), 2)
self.assertTrue(prefixes[0].startswith("/nix/store/"))

View File

@@ -0,0 +1 @@
# Unit test package for pkgmgr.actions.mirror

View File

@@ -0,0 +1,51 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from __future__ import annotations
import unittest
from unittest.mock import patch
from pkgmgr.actions.mirror.context import build_context
class TestMirrorContext(unittest.TestCase):
"""
Unit tests for building RepoMirrorContext from repo + filesystem.
"""
@patch("pkgmgr.actions.mirror.context.read_mirrors_file")
@patch("pkgmgr.actions.mirror.context.load_config_mirrors")
@patch("pkgmgr.actions.mirror.context.get_repo_dir")
@patch("pkgmgr.actions.mirror.context.get_repo_identifier")
def test_build_context_bundles_config_and_file_mirrors(
self,
mock_identifier,
mock_repo_dir,
mock_load_config,
mock_read_file,
) -> None:
mock_identifier.return_value = "id"
mock_repo_dir.return_value = "/tmp/repo"
mock_load_config.return_value = {"origin": "git@github.com:alice/repo.git"}
mock_read_file.return_value = {"backup": "ssh://git@backup/alice/repo.git"}
repo = {"provider": "github.com", "account": "alice", "repository": "repo"}
ctx = build_context(repo, repositories_base_dir="/base", all_repos=[repo])
self.assertEqual(ctx.identifier, "id")
self.assertEqual(ctx.repo_dir, "/tmp/repo")
self.assertEqual(ctx.config_mirrors, {"origin": "git@github.com:alice/repo.git"})
self.assertEqual(ctx.file_mirrors, {"backup": "ssh://git@backup/alice/repo.git"})
self.assertEqual(
ctx.resolved_mirrors,
{
"origin": "git@github.com:alice/repo.git",
"backup": "ssh://git@backup/alice/repo.git",
},
)
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,77 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from __future__ import annotations
import io
import unittest
from contextlib import redirect_stdout
from unittest.mock import MagicMock, PropertyMock, patch
from pkgmgr.actions.mirror.diff_cmd import diff_mirrors
class TestDiffCmd(unittest.TestCase):
"""
Unit tests for mirror diff output.
"""
@patch("pkgmgr.actions.mirror.diff_cmd.build_context")
def test_diff_mirrors_reports_only_in_config_and_only_in_file(self, mock_build_context) -> None:
ctx = MagicMock()
ctx.identifier = "id"
ctx.repo_dir = "/tmp/repo"
ctx.config_mirrors = {"origin": "a", "cfgonly": "b"}
ctx.file_mirrors = {"origin": "a", "fileonly": "c"}
type(ctx).resolved_mirrors = PropertyMock(
return_value={"origin": "a", "cfgonly": "b", "fileonly": "c"}
)
mock_build_context.return_value = ctx
buf = io.StringIO()
with redirect_stdout(buf):
diff_mirrors(selected_repos=[{}], repositories_base_dir="/base", all_repos=[])
out = buf.getvalue()
self.assertIn("[ONLY IN CONFIG] cfgonly: b", out)
self.assertIn("[ONLY IN FILE] fileonly: c", out)
@patch("pkgmgr.actions.mirror.diff_cmd.build_context")
def test_diff_mirrors_reports_url_mismatch(self, mock_build_context) -> None:
ctx = MagicMock()
ctx.identifier = "id"
ctx.repo_dir = "/tmp/repo"
ctx.config_mirrors = {"origin": "a"}
ctx.file_mirrors = {"origin": "different"}
type(ctx).resolved_mirrors = PropertyMock(return_value={"origin": "different"})
mock_build_context.return_value = ctx
buf = io.StringIO()
with redirect_stdout(buf):
diff_mirrors(selected_repos=[{}], repositories_base_dir="/base", all_repos=[])
out = buf.getvalue()
self.assertIn("[URL MISMATCH]", out)
self.assertIn("config: a", out)
self.assertIn("file: different", out)
@patch("pkgmgr.actions.mirror.diff_cmd.build_context")
def test_diff_mirrors_reports_in_sync(self, mock_build_context) -> None:
ctx = MagicMock()
ctx.identifier = "id"
ctx.repo_dir = "/tmp/repo"
ctx.config_mirrors = {"origin": "a"}
ctx.file_mirrors = {"origin": "a"}
type(ctx).resolved_mirrors = PropertyMock(return_value={"origin": "a"})
mock_build_context.return_value = ctx
buf = io.StringIO()
with redirect_stdout(buf):
diff_mirrors(selected_repos=[{}], repositories_base_dir="/base", all_repos=[])
out = buf.getvalue()
self.assertIn("[OK] Mirrors in config and MIRRORS file are in sync.", out)
if __name__ == "__main__":
unittest.main()

View File

@@ -1,110 +1,66 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from __future__ import annotations from __future__ import annotations
import unittest import unittest
from unittest.mock import patch
from pkgmgr.actions.mirror.git_remote import ( from pkgmgr.actions.mirror.git_remote import (
build_default_ssh_url, build_default_ssh_url,
determine_primary_remote_url, determine_primary_remote_url,
has_origin_remote,
) )
from pkgmgr.actions.mirror.types import MirrorMap, Repository from pkgmgr.actions.mirror.types import RepoMirrorContext
class TestMirrorGitRemote(unittest.TestCase): class TestMirrorGitRemote(unittest.TestCase):
""" def _ctx(self, *, file=None, config=None) -> RepoMirrorContext:
Unit tests for SSH URL and primary remote selection logic. return RepoMirrorContext(
""" identifier="repo",
repo_dir="/tmp/repo",
def test_build_default_ssh_url_without_port(self) -> None: config_mirrors=config or {},
repo: Repository = { file_mirrors=file or {},
"provider": "github.com",
"account": "kevinveenbirkenbach",
"repository": "package-manager",
}
url = build_default_ssh_url(repo)
self.assertEqual(
url,
"git@github.com:kevinveenbirkenbach/package-manager.git",
) )
def test_build_default_ssh_url_with_port(self) -> None: def test_build_default_ssh_url(self) -> None:
repo: Repository = { repo = {
"provider": "code.cymais.cloud", "provider": "github.com",
"account": "kevinveenbirkenbach", "account": "alice",
"repository": "pkgmgr", "repository": "repo",
"port": 2201,
} }
url = build_default_ssh_url(repo)
self.assertEqual( self.assertEqual(
url, build_default_ssh_url(repo),
"ssh://git@code.cymais.cloud:2201/kevinveenbirkenbach/pkgmgr.git", "git@github.com:alice/repo.git",
) )
def test_build_default_ssh_url_missing_fields_returns_none(self) -> None: def test_determine_primary_prefers_origin(self) -> None:
repo: Repository = { repo = {"provider": "github.com", "account": "alice", "repository": "repo"}
"provider": "github.com", ctx = self._ctx(config={"origin": "git@github.com:alice/repo.git"})
"account": "kevinveenbirkenbach",
# "repository" fehlt absichtlich
}
url = build_default_ssh_url(repo)
self.assertIsNone(url)
def test_determine_primary_remote_url_prefers_origin_in_resolved_mirrors(
self,
) -> None:
repo: Repository = {
"provider": "github.com",
"account": "kevinveenbirkenbach",
"repository": "package-manager",
}
mirrors: MirrorMap = {
"origin": "git@github.com:kevinveenbirkenbach/package-manager.git",
"backup": "ssh://git@git.veen.world:2201/kevinveenbirkenbach/pkgmgr.git",
}
url = determine_primary_remote_url(repo, mirrors)
self.assertEqual( self.assertEqual(
url, determine_primary_remote_url(repo, ctx),
"git@github.com:kevinveenbirkenbach/package-manager.git", "git@github.com:alice/repo.git",
) )
def test_determine_primary_remote_url_uses_any_mirror_if_no_origin(self) -> None: def test_determine_primary_uses_file_order(self) -> None:
repo: Repository = { repo = {"provider": "github.com", "account": "alice", "repository": "repo"}
"provider": "github.com", ctx = self._ctx(
"account": "kevinveenbirkenbach", file={
"repository": "package-manager", "first": "git@a/first.git",
} "second": "git@a/second.git",
mirrors: MirrorMap = { }
"backup": "ssh://git@git.veen.world:2201/kevinveenbirkenbach/pkgmgr.git", )
"mirror2": "ssh://git@code.cymais.cloud:2201/kevinveenbirkenbach/pkgmgr.git",
}
url = determine_primary_remote_url(repo, mirrors)
# Alphabetisch sortiert: backup, mirror2 → backup gewinnt
self.assertEqual( self.assertEqual(
url, determine_primary_remote_url(repo, ctx),
"ssh://git@git.veen.world:2201/kevinveenbirkenbach/pkgmgr.git", "git@a/first.git",
) )
def test_determine_primary_remote_url_falls_back_to_default_ssh(self) -> None: def test_determine_primary_fallback_default(self) -> None:
repo: Repository = { repo = {"provider": "github.com", "account": "alice", "repository": "repo"}
"provider": "github.com", ctx = self._ctx()
"account": "kevinveenbirkenbach",
"repository": "package-manager",
}
mirrors: MirrorMap = {}
url = determine_primary_remote_url(repo, mirrors)
self.assertEqual( self.assertEqual(
url, determine_primary_remote_url(repo, ctx),
"git@github.com:kevinveenbirkenbach/package-manager.git", "git@github.com:alice/repo.git",
) )
@patch("pkgmgr.actions.mirror.git_remote._safe_git_output")
if __name__ == "__main__": def test_has_origin_remote(self, m_out) -> None:
unittest.main() m_out.return_value = "origin\nupstream\n"
self.assertTrue(has_origin_remote("/tmp/repo"))

View File

@@ -0,0 +1,50 @@
from __future__ import annotations
import unittest
from unittest.mock import patch
from pkgmgr.actions.mirror.git_remote import ensure_origin_remote
from pkgmgr.actions.mirror.types import RepoMirrorContext
class TestGitRemotePrimaryPush(unittest.TestCase):
def test_origin_created_and_extra_push_added(self) -> None:
repo = {"provider": "github.com", "account": "alice", "repository": "repo"}
ctx = RepoMirrorContext(
identifier="repo",
repo_dir="/tmp/repo",
config_mirrors={},
file_mirrors={
"primary": "git@github.com:alice/repo.git",
"backup": "git@github.com:alice/repo-backup.git",
},
)
executed: list[str] = []
def fake_run(cmd: str, cwd: str, preview: bool) -> None:
executed.append(cmd)
def fake_git(args, cwd):
if args == ["remote"]:
return ""
if args == ["remote", "get-url", "--push", "--all", "origin"]:
return "git@github.com:alice/repo.git\n"
return ""
with patch("os.path.isdir", return_value=True), patch(
"pkgmgr.actions.mirror.git_remote.run_command", side_effect=fake_run
), patch(
"pkgmgr.actions.mirror.git_remote._safe_git_output", side_effect=fake_git
):
ensure_origin_remote(repo, ctx, preview=False)
self.assertEqual(
executed,
[
"git remote add origin git@github.com:alice/repo.git",
"git remote set-url origin git@github.com:alice/repo.git",
"git remote set-url --push origin git@github.com:alice/repo.git",
"git remote set-url --add --push origin git@github.com:alice/repo-backup.git",
],
)

View File

@@ -7,10 +7,7 @@ import os
import tempfile import tempfile
import unittest import unittest
from pkgmgr.actions.mirror.io import ( from pkgmgr.actions.mirror.io import load_config_mirrors, read_mirrors_file, write_mirrors_file
load_config_mirrors,
read_mirrors_file,
)
class TestMirrorIO(unittest.TestCase): class TestMirrorIO(unittest.TestCase):
@@ -18,117 +15,96 @@ class TestMirrorIO(unittest.TestCase):
Unit tests for pkgmgr.actions.mirror.io helpers. Unit tests for pkgmgr.actions.mirror.io helpers.
""" """
# ------------------------------------------------------------------ def test_load_config_mirrors_from_dict_filters_empty(self) -> None:
# load_config_mirrors
# ------------------------------------------------------------------
def test_load_config_mirrors_from_dict(self) -> None:
repo = { repo = {
"mirrors": { "mirrors": {
"origin": "ssh://git@example.com/account/repo.git", "origin": "ssh://git@example.com/account/repo.git",
"backup": "ssh://git@backup/account/repo.git", "backup": "",
"empty": "", "invalid": None,
"none": None,
} }
} }
mirrors = load_config_mirrors(repo) mirrors = load_config_mirrors(repo)
self.assertEqual(mirrors, {"origin": "ssh://git@example.com/account/repo.git"})
self.assertEqual( def test_load_config_mirrors_from_list_filters_invalid_entries(self) -> None:
mirrors,
{
"origin": "ssh://git@example.com/account/repo.git",
"backup": "ssh://git@backup/account/repo.git",
},
)
def test_load_config_mirrors_from_list(self) -> None:
repo = { repo = {
"mirrors": [ "mirrors": [
{"name": "origin", "url": "ssh://git@example.com/account/repo.git"}, {"name": "origin", "url": "ssh://git@example.com/account/repo.git"},
{"name": "backup", "url": "ssh://git@backup/account/repo.git"}, {"name": "backup", "url": ""},
{"name": "", "url": "ssh://git@invalid/ignored.git"}, {"name": "", "url": "ssh://git@example.com/empty-name.git"},
{"name": "missing-url"}, {"url": "ssh://git@example.com/missing-name.git"},
"not-a-dict",
] ]
} }
mirrors = load_config_mirrors(repo) mirrors = load_config_mirrors(repo)
self.assertEqual(mirrors, {"origin": "ssh://git@example.com/account/repo.git"})
self.assertEqual(
mirrors,
{
"origin": "ssh://git@example.com/account/repo.git",
"backup": "ssh://git@backup/account/repo.git",
},
)
def test_load_config_mirrors_empty_when_missing(self) -> None: def test_load_config_mirrors_empty_when_missing(self) -> None:
repo = {} self.assertEqual(load_config_mirrors({}), {})
mirrors = load_config_mirrors(repo)
self.assertEqual(mirrors, {})
# ------------------------------------------------------------------ def test_read_mirrors_file_parses_named_entries(self) -> None:
# read_mirrors_file
# ------------------------------------------------------------------
def test_read_mirrors_file_with_named_and_url_only_entries(self) -> None:
"""
Ensure that the MIRRORS file format is parsed correctly:
- 'name url' → exact name
- 'url' → auto name derived from netloc (host[:port]),
with numeric suffix if duplicated.
"""
with tempfile.TemporaryDirectory() as tmpdir: with tempfile.TemporaryDirectory() as tmpdir:
mirrors_path = os.path.join(tmpdir, "MIRRORS") p = os.path.join(tmpdir, "MIRRORS")
content = "\n".join( with open(p, "w", encoding="utf-8") as fh:
[ fh.write("origin ssh://git@example.com/account/repo.git\n")
"# comment",
"",
"origin ssh://git@example.com/account/repo.git",
"https://github.com/kevinveenbirkenbach/package-manager",
"https://github.com/kevinveenbirkenbach/another-repo",
"ssh://git@git.veen.world:2201/kevinveenbirkenbach/pkgmgr.git",
]
)
with open(mirrors_path, "w", encoding="utf-8") as fh:
fh.write(content + "\n")
mirrors = read_mirrors_file(tmpdir) mirrors = read_mirrors_file(tmpdir)
# 'origin' is preserved as given self.assertEqual(mirrors, {"origin": "ssh://git@example.com/account/repo.git"})
self.assertIn("origin", mirrors)
self.assertEqual(
mirrors["origin"],
"ssh://git@example.com/account/repo.git",
)
# Two GitHub URLs → auto names: github.com, github.com2 def test_read_mirrors_file_url_only_uses_netloc_basename_and_suffix(self) -> None:
github_urls = { with tempfile.TemporaryDirectory() as tmpdir:
mirrors.get("github.com"), p = os.path.join(tmpdir, "MIRRORS")
mirrors.get("github.com2"), with open(p, "w", encoding="utf-8") as fh:
} fh.write(
self.assertIn( "\n".join(
"https://github.com/kevinveenbirkenbach/package-manager", [
github_urls, "https://github.com/alice/repo1",
) "https://github.com/alice/repo2",
self.assertIn( "ssh://git@git.veen.world:2201/alice/repo3.git",
"https://github.com/kevinveenbirkenbach/another-repo", ]
github_urls, )
) + "\n"
)
mirrors = read_mirrors_file(tmpdir)
self.assertIn("github.com", mirrors)
self.assertIn("github.com2", mirrors)
self.assertEqual(mirrors["github.com"], "https://github.com/alice/repo1")
self.assertEqual(mirrors["github.com2"], "https://github.com/alice/repo2")
# SSH-URL mit User-Teil → netloc ist "git@git.veen.world:2201"
# → host = "git@git.veen.world"
self.assertIn("git@git.veen.world", mirrors) self.assertIn("git@git.veen.world", mirrors)
self.assertEqual( self.assertEqual(mirrors["git@git.veen.world"], "ssh://git@git.veen.world:2201/alice/repo3.git")
mirrors["git@git.veen.world"],
"ssh://git@git.veen.world:2201/kevinveenbirkenbach/pkgmgr.git",
)
def test_read_mirrors_file_missing_returns_empty(self) -> None: def test_read_mirrors_file_missing_returns_empty(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir: with tempfile.TemporaryDirectory() as tmpdir:
mirrors = read_mirrors_file(tmpdir) # no MIRRORS file self.assertEqual(read_mirrors_file(tmpdir), {})
self.assertEqual(mirrors, {})
def test_write_mirrors_file_writes_sorted_lines(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
mirrors = {
"b": "ssh://b.example/repo.git",
"a": "ssh://a.example/repo.git",
}
write_mirrors_file(tmpdir, mirrors, preview=False)
p = os.path.join(tmpdir, "MIRRORS")
self.assertTrue(os.path.exists(p))
with open(p, "r", encoding="utf-8") as fh:
content = fh.read()
self.assertEqual(content, "a ssh://a.example/repo.git\nb ssh://b.example/repo.git\n")
def test_write_mirrors_file_preview_does_not_create_file(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
mirrors = {"a": "ssh://a.example/repo.git"}
write_mirrors_file(tmpdir, mirrors, preview=True)
p = os.path.join(tmpdir, "MIRRORS")
self.assertFalse(os.path.exists(p))
if __name__ == "__main__": if __name__ == "__main__":

View File

@@ -0,0 +1,72 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from __future__ import annotations
import io
import unittest
from contextlib import redirect_stdout
from unittest.mock import MagicMock, PropertyMock, patch
from pkgmgr.actions.mirror.list_cmd import list_mirrors
class TestListCmd(unittest.TestCase):
"""
Unit tests for mirror list output.
"""
@patch("pkgmgr.actions.mirror.list_cmd.build_context")
def test_list_mirrors_all_sources_prints_sections(self, mock_build_context) -> None:
ctx = MagicMock()
ctx.identifier = "id"
ctx.repo_dir = "/tmp/repo"
ctx.config_mirrors = {"origin": "a"}
ctx.file_mirrors = {"backup": "b"}
type(ctx).resolved_mirrors = PropertyMock(return_value={"origin": "a", "backup": "b"})
mock_build_context.return_value = ctx
buf = io.StringIO()
with redirect_stdout(buf):
list_mirrors(
selected_repos=[{}],
repositories_base_dir="/base",
all_repos=[],
source="all",
)
out = buf.getvalue()
self.assertIn("[config mirrors]", out)
self.assertIn("[MIRRORS file]", out)
self.assertIn("[resolved mirrors]", out)
self.assertIn("origin: a", out)
self.assertIn("backup: b", out)
@patch("pkgmgr.actions.mirror.list_cmd.build_context")
def test_list_mirrors_config_only(self, mock_build_context) -> None:
ctx = MagicMock()
ctx.identifier = "id"
ctx.repo_dir = "/tmp/repo"
ctx.config_mirrors = {"origin": "a"}
ctx.file_mirrors = {"backup": "b"}
type(ctx).resolved_mirrors = PropertyMock(return_value={"origin": "a", "backup": "b"})
mock_build_context.return_value = ctx
buf = io.StringIO()
with redirect_stdout(buf):
list_mirrors(
selected_repos=[{}],
repositories_base_dir="/base",
all_repos=[],
source="config",
)
out = buf.getvalue()
self.assertIn("[config mirrors]", out)
self.assertIn("origin: a", out)
self.assertNotIn("[MIRRORS file]", out)
self.assertNotIn("[resolved mirrors]", out)
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,52 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from __future__ import annotations
import unittest
from unittest.mock import patch
from pkgmgr.actions.mirror.remote_check import probe_mirror
from pkgmgr.core.git import GitError
class TestRemoteCheck(unittest.TestCase):
"""
Unit tests for non-destructive remote probing (git ls-remote).
"""
@patch("pkgmgr.actions.mirror.remote_check.run_git")
def test_probe_mirror_success_returns_true_and_empty_message(self, mock_run_git) -> None:
mock_run_git.return_value = "dummy-output"
ok, message = probe_mirror(
"ssh://git@code.example.org:2201/alice/repo.git",
"/tmp/some-repo",
)
self.assertTrue(ok)
self.assertEqual(message, "")
mock_run_git.assert_called_once_with(
["ls-remote", "ssh://git@code.example.org:2201/alice/repo.git"],
cwd="/tmp/some-repo",
)
@patch("pkgmgr.actions.mirror.remote_check.run_git")
def test_probe_mirror_failure_returns_false_and_error_message(self, mock_run_git) -> None:
mock_run_git.side_effect = GitError("Git command failed (simulated)")
ok, message = probe_mirror(
"ssh://git@code.example.org:2201/alice/repo.git",
"/tmp/some-repo",
)
self.assertFalse(ok)
self.assertIn("Git command failed", message)
mock_run_git.assert_called_once_with(
["ls-remote", "ssh://git@code.example.org:2201/alice/repo.git"],
cwd="/tmp/some-repo",
)
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,114 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from __future__ import annotations
import unittest
from unittest.mock import MagicMock, PropertyMock, patch
from pkgmgr.actions.mirror.remote_provision import ensure_remote_repository
class TestRemoteProvision(unittest.TestCase):
"""
Unit tests for remote provisioning wrapper logic (action layer).
"""
@patch("pkgmgr.actions.mirror.remote_provision.ensure_remote_repo")
@patch("pkgmgr.actions.mirror.remote_provision.determine_primary_remote_url")
@patch("pkgmgr.actions.mirror.remote_provision.build_context")
def test_ensure_remote_repository_builds_spec_from_url_and_calls_core(
self,
mock_build_context,
mock_determine_primary,
mock_ensure_remote_repo,
) -> None:
ctx = MagicMock()
type(ctx).resolved_mirrors = PropertyMock(
return_value={"origin": "ssh://git@git.veen.world:2201/alice/repo.git"}
)
ctx.identifier = "repo-id"
mock_build_context.return_value = ctx
mock_determine_primary.return_value = "ssh://git@git.veen.world:2201/alice/repo.git"
result = MagicMock()
result.status = "created"
result.message = "Repository created (user)."
result.url = "https://git.veen.world/alice/repo"
mock_ensure_remote_repo.return_value = result
repo = {
"provider": "gitea",
"account": "SHOULD_NOT_BE_USED_ANYMORE",
"repository": "SHOULD_NOT_BE_USED_ANYMORE",
"private": True,
"description": "desc",
}
ensure_remote_repository(
repo=repo,
repositories_base_dir="/base",
all_repos=[],
preview=False,
)
self.assertTrue(mock_ensure_remote_repo.called)
called_spec = mock_ensure_remote_repo.call_args[0][0]
self.assertEqual(called_spec.host, "git.veen.world")
self.assertEqual(called_spec.owner, "alice")
self.assertEqual(called_spec.name, "repo")
@patch("pkgmgr.actions.mirror.remote_provision.ensure_remote_repo")
@patch("pkgmgr.actions.mirror.remote_provision.determine_primary_remote_url")
@patch("pkgmgr.actions.mirror.remote_provision.build_context")
def test_ensure_remote_repository_skips_when_no_primary_url(
self,
mock_build_context,
mock_determine_primary,
mock_ensure_remote_repo,
) -> None:
ctx = MagicMock()
type(ctx).resolved_mirrors = PropertyMock(return_value={})
ctx.identifier = "repo-id"
mock_build_context.return_value = ctx
mock_determine_primary.return_value = None
ensure_remote_repository(
repo={"provider": "gitea"},
repositories_base_dir="/base",
all_repos=[],
preview=False,
)
mock_ensure_remote_repo.assert_not_called()
@patch("pkgmgr.actions.mirror.remote_provision.ensure_remote_repo")
@patch("pkgmgr.actions.mirror.remote_provision.determine_primary_remote_url")
@patch("pkgmgr.actions.mirror.remote_provision.build_context")
def test_ensure_remote_repository_skips_when_url_not_parseable(
self,
mock_build_context,
mock_determine_primary,
mock_ensure_remote_repo,
) -> None:
ctx = MagicMock()
type(ctx).resolved_mirrors = PropertyMock(
return_value={"origin": "ssh://git@host:2201/not-enough-parts"}
)
ctx.identifier = "repo-id"
mock_build_context.return_value = ctx
mock_determine_primary.return_value = "ssh://git@host:2201/not-enough-parts"
ensure_remote_repository(
repo={"provider": "gitea"},
repositories_base_dir="/base",
all_repos=[],
preview=False,
)
mock_ensure_remote_repo.assert_not_called()
if __name__ == "__main__":
unittest.main()

View File

@@ -1,58 +1,101 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from __future__ import annotations from __future__ import annotations
import unittest import unittest
from unittest.mock import patch from unittest.mock import patch
from pkgmgr.actions.mirror.setup_cmd import _probe_mirror from pkgmgr.actions.mirror.setup_cmd import setup_mirrors
from pkgmgr.core.git import GitError from pkgmgr.actions.mirror.types import RepoMirrorContext
class TestMirrorSetupCmd(unittest.TestCase): class TestMirrorSetupCmd(unittest.TestCase):
""" def _ctx(
Unit tests for the non-destructive remote probing logic in setup_cmd.
"""
@patch("pkgmgr.actions.mirror.setup_cmd.run_git")
def test_probe_mirror_success_returns_true_and_empty_message(
self, self,
mock_run_git, *,
) -> None: repo_dir: str = "/tmp/repo",
""" resolved: dict[str, str] | None = None,
If run_git returns successfully, _probe_mirror must report (True, ""). ) -> RepoMirrorContext:
""" # RepoMirrorContext derives resolved via property (config + file)
mock_run_git.return_value = "dummy-output" # We feed mirrors via file_mirrors to keep insertion order realistic.
return RepoMirrorContext(
ok, message = _probe_mirror( identifier="repo-id",
"ssh://git@code.cymais.cloud:2201/kevinveenbirkenbach/pkgmgr.git", repo_dir=repo_dir,
"/tmp/some-repo", config_mirrors={},
file_mirrors=resolved or {},
) )
self.assertTrue(ok) @patch("pkgmgr.actions.mirror.setup_cmd.build_context")
self.assertEqual(message, "") @patch("pkgmgr.actions.mirror.setup_cmd.ensure_origin_remote")
mock_run_git.assert_called_once() def test_setup_mirrors_local_calls_ensure_origin_remote(self, m_ensure, m_ctx) -> None:
m_ctx.return_value = self._ctx(repo_dir="/tmp/repo", resolved={"primary": "git@x/y.git"})
@patch("pkgmgr.actions.mirror.setup_cmd.run_git") repos = [{"provider": "github.com", "account": "alice", "repository": "repo"}]
def test_probe_mirror_failure_returns_false_and_error_message( setup_mirrors(
self, selected_repos=repos,
mock_run_git, repositories_base_dir="/tmp",
) -> None: all_repos=repos,
""" preview=True,
If run_git raises GitError, _probe_mirror must report (False, <message>), local=True,
and not re-raise the exception. remote=False,
""" ensure_remote=False,
mock_run_git.side_effect = GitError("Git command failed (simulated)")
ok, message = _probe_mirror(
"ssh://git@code.cymais.cloud:2201/kevinveenbirkenbach/pkgmgr.git",
"/tmp/some-repo",
) )
self.assertFalse(ok) self.assertEqual(m_ensure.call_count, 1)
self.assertIn("Git command failed", message) args, kwargs = m_ensure.call_args
mock_run_git.assert_called_once()
# ensure_origin_remote(repo, ctx, preview) may be positional or kw.
# Accept both to avoid coupling tests to call style.
if "preview" in kwargs:
self.assertTrue(kwargs["preview"])
else:
# args: (repo, ctx, preview)
self.assertTrue(args[2])
@patch("pkgmgr.actions.mirror.setup_cmd.build_context")
@patch("pkgmgr.actions.mirror.setup_cmd.probe_mirror")
@patch("pkgmgr.actions.mirror.setup_cmd.determine_primary_remote_url")
def test_setup_mirrors_remote_no_mirrors_probes_primary(self, m_primary, m_probe, m_ctx) -> None:
m_ctx.return_value = self._ctx(repo_dir="/tmp/repo", resolved={})
m_primary.return_value = "git@github.com:alice/repo.git"
m_probe.return_value = (True, "")
repos = [{"provider": "github.com", "account": "alice", "repository": "repo"}]
setup_mirrors(
selected_repos=repos,
repositories_base_dir="/tmp",
all_repos=repos,
preview=True,
local=False,
remote=True,
ensure_remote=False,
)
m_primary.assert_called()
m_probe.assert_called_with("git@github.com:alice/repo.git", "/tmp/repo")
@patch("pkgmgr.actions.mirror.setup_cmd.build_context")
@patch("pkgmgr.actions.mirror.setup_cmd.probe_mirror")
def test_setup_mirrors_remote_with_mirrors_probes_each(self, m_probe, m_ctx) -> None:
m_ctx.return_value = self._ctx(
repo_dir="/tmp/repo",
resolved={
"origin": "git@github.com:alice/repo.git",
"backup": "ssh://git@git.veen.world:2201/alice/repo.git",
},
)
m_probe.return_value = (True, "")
repos = [{"provider": "github.com", "account": "alice", "repository": "repo"}]
setup_mirrors(
selected_repos=repos,
repositories_base_dir="/tmp",
all_repos=repos,
preview=True,
local=False,
remote=True,
ensure_remote=False,
)
self.assertEqual(m_probe.call_count, 2)
if __name__ == "__main__": if __name__ == "__main__":

View File

@@ -0,0 +1,77 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from __future__ import annotations
import unittest
from pkgmgr.actions.mirror.url_utils import hostport_from_git_url, normalize_provider_host, parse_repo_from_git_url
class TestUrlUtils(unittest.TestCase):
"""
Unit tests for URL parsing helpers used in mirror setup/provisioning.
"""
def test_hostport_from_git_url_ssh_url_with_port(self) -> None:
host, port = hostport_from_git_url("ssh://git@code.example.org:2201/alice/repo.git")
self.assertEqual(host, "code.example.org")
self.assertEqual(port, "2201")
def test_hostport_from_git_url_https_url_no_port(self) -> None:
host, port = hostport_from_git_url("https://github.com/alice/repo.git")
self.assertEqual(host, "github.com")
self.assertIsNone(port)
def test_hostport_from_git_url_scp_like(self) -> None:
host, port = hostport_from_git_url("git@github.com:alice/repo.git")
self.assertEqual(host, "github.com")
self.assertIsNone(port)
def test_hostport_from_git_url_empty(self) -> None:
host, port = hostport_from_git_url("")
self.assertEqual(host, "")
self.assertIsNone(port)
def test_normalize_provider_host_strips_port_and_lowercases(self) -> None:
self.assertEqual(normalize_provider_host("GIT.VEEN.WORLD:2201"), "git.veen.world")
def test_normalize_provider_host_ipv6_brackets(self) -> None:
self.assertEqual(normalize_provider_host("[::1]"), "::1")
def test_normalize_provider_host_empty(self) -> None:
self.assertEqual(normalize_provider_host(""), "")
def test_parse_repo_from_git_url_ssh_url(self) -> None:
host, owner, name = parse_repo_from_git_url("ssh://git@code.example.org:2201/alice/repo.git")
self.assertEqual(host, "code.example.org")
self.assertEqual(owner, "alice")
self.assertEqual(name, "repo")
def test_parse_repo_from_git_url_https_url(self) -> None:
host, owner, name = parse_repo_from_git_url("https://github.com/alice/repo.git")
self.assertEqual(host, "github.com")
self.assertEqual(owner, "alice")
self.assertEqual(name, "repo")
def test_parse_repo_from_git_url_scp_like(self) -> None:
host, owner, name = parse_repo_from_git_url("git@github.com:alice/repo.git")
self.assertEqual(host, "github.com")
self.assertEqual(owner, "alice")
self.assertEqual(name, "repo")
def test_parse_repo_from_git_url_best_effort_host_owner_repo(self) -> None:
host, owner, name = parse_repo_from_git_url("git.veen.world/alice/repo.git")
self.assertEqual(host, "git.veen.world")
self.assertEqual(owner, "alice")
self.assertEqual(name, "repo")
def test_parse_repo_from_git_url_missing_owner_repo_returns_none(self) -> None:
host, owner, name = parse_repo_from_git_url("https://github.com/")
self.assertEqual(host, "github.com")
self.assertIsNone(owner)
self.assertIsNone(name)
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,20 @@
import unittest
from unittest.mock import patch
from pkgmgr.actions.publish.git_tags import head_semver_tags
class TestHeadSemverTags(unittest.TestCase):
@patch("pkgmgr.actions.publish.git_tags.run_git")
def test_no_tags(self, mock_run_git):
mock_run_git.return_value = ""
self.assertEqual(head_semver_tags(), [])
@patch("pkgmgr.actions.publish.git_tags.run_git")
def test_filters_and_sorts_semver(self, mock_run_git):
mock_run_git.return_value = "v1.0.0\nv2.0.0\nfoo\n"
self.assertEqual(
head_semver_tags(),
["v1.0.0", "v2.0.0"],
)

View File

@@ -0,0 +1,13 @@
import unittest
from pkgmgr.actions.publish.pypi_url import parse_pypi_project_url
class TestParsePyPIUrl(unittest.TestCase):
def test_valid_pypi_url(self):
t = parse_pypi_project_url("https://pypi.org/project/example/")
self.assertIsNotNone(t)
self.assertEqual(t.project, "example")
def test_invalid_url(self):
self.assertIsNone(parse_pypi_project_url("https://example.com/foo"))

View File

@@ -0,0 +1,21 @@
import unittest
from unittest.mock import patch
from pkgmgr.actions.publish.workflow import publish
class TestPublishWorkflowPreview(unittest.TestCase):
@patch("pkgmgr.actions.publish.workflow.read_mirrors_file")
@patch("pkgmgr.actions.publish.workflow.head_semver_tags")
def test_preview_does_not_build(self, mock_tags, mock_mirrors):
mock_mirrors.return_value = {
"pypi": "https://pypi.org/project/example/"
}
mock_tags.return_value = ["v1.0.0"]
publish(
repo={},
repo_dir=".",
preview=True,
)

View File

@@ -0,0 +1,62 @@
from __future__ import annotations
import unittest
from pkgmgr.actions.repository.create import (
RepoParts,
_parse_identifier,
_parse_git_url,
_strip_git_suffix,
_split_host_port,
)
class TestRepositoryCreateParsing(unittest.TestCase):
def test_strip_git_suffix(self) -> None:
self.assertEqual(_strip_git_suffix("repo.git"), "repo")
self.assertEqual(_strip_git_suffix("repo"), "repo")
def test_split_host_port(self) -> None:
self.assertEqual(_split_host_port("example.com"), ("example.com", None))
self.assertEqual(_split_host_port("example.com:2222"), ("example.com", "2222"))
self.assertEqual(_split_host_port("example.com:"), ("example.com", None))
def test_parse_identifier_plain(self) -> None:
parts = _parse_identifier("github.com/owner/repo")
self.assertIsInstance(parts, RepoParts)
self.assertEqual(parts.host, "github.com")
self.assertEqual(parts.port, None)
self.assertEqual(parts.owner, "owner")
self.assertEqual(parts.name, "repo")
def test_parse_identifier_with_port(self) -> None:
parts = _parse_identifier("gitea.example.com:2222/org/repo")
self.assertEqual(parts.host, "gitea.example.com")
self.assertEqual(parts.port, "2222")
self.assertEqual(parts.owner, "org")
self.assertEqual(parts.name, "repo")
def test_parse_git_url_scp_style(self) -> None:
parts = _parse_git_url("git@github.com:owner/repo.git")
self.assertEqual(parts.host, "github.com")
self.assertEqual(parts.port, None)
self.assertEqual(parts.owner, "owner")
self.assertEqual(parts.name, "repo")
def test_parse_git_url_https(self) -> None:
parts = _parse_git_url("https://github.com/owner/repo.git")
self.assertEqual(parts.host, "github.com")
self.assertEqual(parts.port, None)
self.assertEqual(parts.owner, "owner")
self.assertEqual(parts.name, "repo")
def test_parse_git_url_ssh_with_port(self) -> None:
parts = _parse_git_url("ssh://git@gitea.example.com:2222/org/repo.git")
self.assertEqual(parts.host, "gitea.example.com")
self.assertEqual(parts.port, "2222")
self.assertEqual(parts.owner, "org")
self.assertEqual(parts.name, "repo")
if __name__ == "__main__":
unittest.main()

Some files were not shown because too many files have changed in this diff Show More