Compare commits
23 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
94b998741f | ||
|
|
172c734866 | ||
|
|
1b483e178d | ||
|
|
78693225f1 | ||
|
|
ca08c84789 | ||
|
|
e930b422e5 | ||
|
|
0833d04376 | ||
|
|
55f36d76ec | ||
|
|
6a838ee84f | ||
|
|
4285bf4a54 | ||
|
|
640b1042c2 | ||
|
|
9357c4632e | ||
|
|
ca5d0d22f3 | ||
|
|
3875338fb7 | ||
|
|
196f55c58e | ||
|
|
9a149715f6 | ||
|
|
bf40533469 | ||
|
|
7bc7259988 | ||
|
|
66b96ac3a5 | ||
|
|
f974e0b14a | ||
|
|
de8c3f768d | ||
|
|
05ff250251 | ||
|
|
ab52d37467 |
2
.github/workflows/test-container.yml
vendored
2
.github/workflows/test-container.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: Test Distribution Containers
|
||||
name: Test OS Containers
|
||||
|
||||
on:
|
||||
push:
|
||||
|
||||
2
.github/workflows/test-e2e.yml
vendored
2
.github/workflows/test-e2e.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: Test package-manager (e2e)
|
||||
name: Test End-To-End
|
||||
|
||||
on:
|
||||
push:
|
||||
|
||||
8
.github/workflows/test-integration.yml
vendored
8
.github/workflows/test-integration.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: Test package-manager (integration)
|
||||
name: Test Code Integration
|
||||
|
||||
on:
|
||||
push:
|
||||
@@ -21,9 +21,5 @@ jobs:
|
||||
- name: Show Docker version
|
||||
run: docker version
|
||||
|
||||
# Build Arch test image (same as used in test-unit and test-e2e)
|
||||
- name: Build test images
|
||||
run: make build
|
||||
|
||||
- name: Run integration tests via make (Arch container)
|
||||
run: make test-integration
|
||||
run: make test-integration DISTROS="arch"
|
||||
|
||||
4
.github/workflows/test-unit.yml
vendored
4
.github/workflows/test-unit.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: Test package-manager (unit)
|
||||
name: Test Units
|
||||
|
||||
on:
|
||||
push:
|
||||
@@ -22,4 +22,4 @@ jobs:
|
||||
run: docker version
|
||||
|
||||
- name: Run unit tests via make (Arch container)
|
||||
run: make test-unit
|
||||
run: make test-unit DISTROS="arch"
|
||||
|
||||
50
CHANGELOG.md
50
CHANGELOG.md
@@ -1,3 +1,53 @@
|
||||
## [0.7.11] - 2025-12-09
|
||||
|
||||
* test: fix installer unit tests for OS packages and Nix dev shell
|
||||
|
||||
|
||||
## [0.7.10] - 2025-12-09
|
||||
|
||||
* Fixed test_install_pkgmgr_shallow.py
|
||||
|
||||
|
||||
## [0.7.9] - 2025-12-09
|
||||
|
||||
* 'main' and 'master' are now both accepted as branches for branch close merge
|
||||
|
||||
|
||||
## [0.7.8] - 2025-12-09
|
||||
|
||||
* Missing pyproject.toml doesn't lead to an error during release
|
||||
|
||||
|
||||
## [0.7.7] - 2025-12-09
|
||||
|
||||
* Added TEST_PATTERN parameter to execute dedicated tests
|
||||
|
||||
|
||||
## [0.7.6] - 2025-12-09
|
||||
|
||||
* Fixed pull --preview bug in e2e test
|
||||
|
||||
|
||||
## [0.7.5] - 2025-12-09
|
||||
|
||||
* Fixed wrong directory permissions for nix
|
||||
|
||||
|
||||
## [0.7.4] - 2025-12-09
|
||||
|
||||
* Fixed missing build in test workflow -> Tests pass now
|
||||
|
||||
|
||||
## [0.7.3] - 2025-12-09
|
||||
|
||||
* Fixed bug: Ignored packages are now ignored
|
||||
|
||||
|
||||
## [0.7.2] - 2025-12-09
|
||||
|
||||
* Implemented Changelog Support for Fedora and Debian
|
||||
|
||||
|
||||
## [0.7.1] - 2025-12-09
|
||||
|
||||
* Fix floating 'latest' tag logic: dereference annotated target (vX.Y.Z^{}), add tag message to avoid Git errors, ensure best-effort update without blocking releases, and update unit tests (see ChatGPT conversation: https://chatgpt.com/share/69383024-efa4-800f-a875-129b81fa40ff).
|
||||
|
||||
16
Makefile
16
Makefile
@@ -12,7 +12,7 @@ NIX_CACHE_VOLUME := pkgmgr_nix_cache
|
||||
# Distro list and base images
|
||||
# (kept for documentation/reference; actual build logic is in scripts/build)
|
||||
# ------------------------------------------------------------
|
||||
DISTROS := arch debian ubuntu fedora centos
|
||||
DISTROS := arch debian ubuntu fedora centos
|
||||
BASE_IMAGE_ARCH := archlinux:latest
|
||||
BASE_IMAGE_DEBIAN := debian:stable-slim
|
||||
BASE_IMAGE_UBUNTU := ubuntu:latest
|
||||
@@ -27,6 +27,10 @@ export BASE_IMAGE_UBUNTU
|
||||
export BASE_IMAGE_FEDORA
|
||||
export BASE_IMAGE_CENTOS
|
||||
|
||||
# PYthon Unittest Pattern
|
||||
TEST_PATTERN := test_*.py
|
||||
export TEST_PATTERN
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# PKGMGR setup (developer wrapper -> scripts/installation/main.sh)
|
||||
# ------------------------------------------------------------
|
||||
@@ -46,16 +50,16 @@ build:
|
||||
# Test targets (delegated to scripts/test)
|
||||
# ------------------------------------------------------------
|
||||
|
||||
test-unit:
|
||||
test-unit: build-missing
|
||||
@bash scripts/test/test-unit.sh
|
||||
|
||||
test-integration:
|
||||
test-integration: build-missing
|
||||
@bash scripts/test/test-integration.sh
|
||||
|
||||
test-e2e:
|
||||
test-e2e: build-missing
|
||||
@bash scripts/test/test-e2e.sh
|
||||
|
||||
test-container:
|
||||
test-container: build-missing
|
||||
@bash scripts/test/test-container.sh
|
||||
|
||||
# ------------------------------------------------------------
|
||||
@@ -65,7 +69,7 @@ build-missing:
|
||||
@bash scripts/build/build-image-missing.sh
|
||||
|
||||
# Combined test target for local + CI (unit + e2e + integration)
|
||||
test: build-missing test-container test-unit test-e2e test-integration
|
||||
test: test-container test-unit test-e2e test-integration
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# System install (native packages, calls scripts/installation/run-package.sh)
|
||||
|
||||
2
PKGBUILD
2
PKGBUILD
@@ -1,7 +1,7 @@
|
||||
# Maintainer: Kevin Veen-Birkenbach <info@veen.world>
|
||||
|
||||
pkgname=package-manager
|
||||
pkgver=0.7.1
|
||||
pkgver=0.7.11
|
||||
pkgrel=1
|
||||
pkgdesc="Local-flake wrapper for Kevin's package-manager (Nix-based)."
|
||||
arch=('any')
|
||||
|
||||
60
debian/changelog
vendored
60
debian/changelog
vendored
@@ -1,3 +1,63 @@
|
||||
package-manager (0.7.11-1) unstable; urgency=medium
|
||||
|
||||
* test: fix installer unit tests for OS packages and Nix dev shell
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 09 Dec 2025 23:16:46 +0100
|
||||
|
||||
package-manager (0.7.10-1) unstable; urgency=medium
|
||||
|
||||
* Fixed test_install_pkgmgr_shallow.py
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 09 Dec 2025 22:57:08 +0100
|
||||
|
||||
package-manager (0.7.9-1) unstable; urgency=medium
|
||||
|
||||
* 'main' and 'master' are now both accepted as branches for branch close merge
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 09 Dec 2025 21:19:13 +0100
|
||||
|
||||
package-manager (0.7.8-1) unstable; urgency=medium
|
||||
|
||||
* Missing pyproject.toml doesn't lead to an error during release
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 09 Dec 2025 21:03:24 +0100
|
||||
|
||||
package-manager (0.7.7-1) unstable; urgency=medium
|
||||
|
||||
* Added TEST_PATTERN parameter to execute dedicated tests
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 09 Dec 2025 17:54:38 +0100
|
||||
|
||||
package-manager (0.7.6-1) unstable; urgency=medium
|
||||
|
||||
* Fixed pull --preview bug in e2e test
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 09 Dec 2025 17:14:19 +0100
|
||||
|
||||
package-manager (0.7.5-1) unstable; urgency=medium
|
||||
|
||||
* Fixed wrong directory permissions for nix
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 09 Dec 2025 16:45:42 +0100
|
||||
|
||||
package-manager (0.7.4-1) unstable; urgency=medium
|
||||
|
||||
* Fixed missing build in test workflow -> Tests pass now
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 09 Dec 2025 16:22:00 +0100
|
||||
|
||||
package-manager (0.7.3-1) unstable; urgency=medium
|
||||
|
||||
* Fixed bug: Ignored packages are now ignored
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 09 Dec 2025 16:08:31 +0100
|
||||
|
||||
package-manager (0.7.2-1) unstable; urgency=medium
|
||||
|
||||
* Implemented Changelog Support for Fedora and Debian
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 09 Dec 2025 15:48:58 +0100
|
||||
|
||||
package-manager (0.7.1-1) unstable; urgency=medium
|
||||
|
||||
* Fix floating 'latest' tag logic: dereference annotated target (vX.Y.Z^{}), add tag message to avoid Git errors, ensure best-effort update without blocking releases, and update unit tests (see ChatGPT conversation: https://chatgpt.com/share/69383024-efa4-800f-a875-129b81fa40ff).
|
||||
|
||||
@@ -31,7 +31,7 @@
|
||||
rec {
|
||||
pkgmgr = pyPkgs.buildPythonApplication {
|
||||
pname = "package-manager";
|
||||
version = "0.7.1";
|
||||
version = "0.7.11";
|
||||
|
||||
# Use the git repo as source
|
||||
src = ./.;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
Name: package-manager
|
||||
Version: 0.7.1
|
||||
Version: 0.7.11
|
||||
Release: 1%{?dist}
|
||||
Summary: Wrapper that runs Kevin's package-manager via Nix flake
|
||||
|
||||
@@ -77,5 +77,35 @@ echo ">>> package-manager removed. Nix itself was not removed."
|
||||
/usr/lib/package-manager/
|
||||
|
||||
%changelog
|
||||
* Tue Dec 09 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 0.7.11-1
|
||||
- test: fix installer unit tests for OS packages and Nix dev shell
|
||||
|
||||
* Tue Dec 09 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 0.7.10-1
|
||||
- Fixed test_install_pkgmgr_shallow.py
|
||||
|
||||
* Tue Dec 09 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 0.7.9-1
|
||||
- 'main' and 'master' are now both accepted as branches for branch close merge
|
||||
|
||||
* Tue Dec 09 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 0.7.8-1
|
||||
- Missing pyproject.toml doesn't lead to an error during release
|
||||
|
||||
* Tue Dec 09 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 0.7.7-1
|
||||
- Added TEST_PATTERN parameter to execute dedicated tests
|
||||
|
||||
* Tue Dec 09 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 0.7.6-1
|
||||
- Fixed pull --preview bug in e2e test
|
||||
|
||||
* Tue Dec 09 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 0.7.5-1
|
||||
- Fixed wrong directory permissions for nix
|
||||
|
||||
* Tue Dec 09 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 0.7.4-1
|
||||
- Fixed missing build in test workflow -> Tests pass now
|
||||
|
||||
* Tue Dec 09 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 0.7.3-1
|
||||
- Fixed bug: Ignored packages are now ignored
|
||||
|
||||
* Tue Dec 09 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 0.7.2-1
|
||||
- Implemented Changelog Support for Fedora and Debian
|
||||
|
||||
* Sat Dec 06 2025 Kevin Veen-Birkenbach <info@veen.world> - 0.1.1-1
|
||||
- Initial RPM packaging for package-manager
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# pkgmgr/branch_commands.py
|
||||
# pkgmgr/actions/branch/__init__.py
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
@@ -16,30 +16,43 @@ from typing import Optional
|
||||
from pkgmgr.core.git import run_git, GitError, get_current_branch
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Branch creation (open)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def open_branch(
|
||||
name: Optional[str],
|
||||
base_branch: str = "main",
|
||||
fallback_base: str = "master",
|
||||
cwd: str = ".",
|
||||
) -> None:
|
||||
"""
|
||||
Create and push a new feature branch on top of `base_branch`.
|
||||
Create and push a new feature branch on top of a base branch.
|
||||
|
||||
The base branch is resolved by:
|
||||
1. Trying 'base_branch' (default: 'main')
|
||||
2. Falling back to 'fallback_base' (default: 'master')
|
||||
|
||||
Steps:
|
||||
1) git fetch origin
|
||||
2) git checkout <base_branch>
|
||||
3) git pull origin <base_branch>
|
||||
4) git checkout -b <name>
|
||||
5) git push -u origin <name>
|
||||
1) git fetch origin
|
||||
2) git checkout <resolved_base>
|
||||
3) git pull origin <resolved_base>
|
||||
4) git checkout -b <name>
|
||||
5) git push -u origin <name>
|
||||
|
||||
If `name` is None or empty, the user is prompted on stdin.
|
||||
If `name` is None or empty, the user is prompted to enter one.
|
||||
"""
|
||||
|
||||
# Request name interactively if not provided
|
||||
if not name:
|
||||
name = input("Enter new branch name: ").strip()
|
||||
|
||||
if not name:
|
||||
raise RuntimeError("Branch name must not be empty.")
|
||||
|
||||
# Resolve which base branch to use (main or master)
|
||||
resolved_base = _resolve_base_branch(base_branch, fallback_base, cwd=cwd)
|
||||
|
||||
# 1) Fetch from origin
|
||||
try:
|
||||
run_git(["fetch", "origin"], cwd=cwd)
|
||||
@@ -50,18 +63,18 @@ def open_branch(
|
||||
|
||||
# 2) Checkout base branch
|
||||
try:
|
||||
run_git(["checkout", base_branch], cwd=cwd)
|
||||
run_git(["checkout", resolved_base], cwd=cwd)
|
||||
except GitError as exc:
|
||||
raise RuntimeError(
|
||||
f"Failed to checkout base branch {base_branch!r}: {exc}"
|
||||
f"Failed to checkout base branch {resolved_base!r}: {exc}"
|
||||
) from exc
|
||||
|
||||
# 3) Pull latest changes on base
|
||||
# 3) Pull latest changes for base branch
|
||||
try:
|
||||
run_git(["pull", "origin", base_branch], cwd=cwd)
|
||||
run_git(["pull", "origin", resolved_base], cwd=cwd)
|
||||
except GitError as exc:
|
||||
raise RuntimeError(
|
||||
f"Failed to pull latest changes for base branch {base_branch!r}: {exc}"
|
||||
f"Failed to pull latest changes for base branch {resolved_base!r}: {exc}"
|
||||
) from exc
|
||||
|
||||
# 4) Create new branch
|
||||
@@ -69,10 +82,10 @@ def open_branch(
|
||||
run_git(["checkout", "-b", name], cwd=cwd)
|
||||
except GitError as exc:
|
||||
raise RuntimeError(
|
||||
f"Failed to create new branch {name!r} from base {base_branch!r}: {exc}"
|
||||
f"Failed to create new branch {name!r} from base {resolved_base!r}: {exc}"
|
||||
) from exc
|
||||
|
||||
# 5) Push and set upstream
|
||||
# 5) Push new branch to origin
|
||||
try:
|
||||
run_git(["push", "-u", "origin", name], cwd=cwd)
|
||||
except GitError as exc:
|
||||
@@ -81,15 +94,21 @@ def open_branch(
|
||||
) from exc
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Base branch resolver (shared by open/close)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _resolve_base_branch(
|
||||
preferred: str,
|
||||
fallback: str,
|
||||
cwd: str,
|
||||
) -> str:
|
||||
"""
|
||||
Resolve the base branch to use for merging.
|
||||
Resolve the base branch to use.
|
||||
|
||||
Try `preferred` first (default: main),
|
||||
fall back to `fallback` (default: master).
|
||||
|
||||
Try `preferred` (default: main) first, then `fallback` (default: master).
|
||||
Raise RuntimeError if neither exists.
|
||||
"""
|
||||
for candidate in (preferred, fallback):
|
||||
@@ -104,6 +123,10 @@ def _resolve_base_branch(
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Branch closing (merge + deletion)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def close_branch(
|
||||
name: Optional[str],
|
||||
base_branch: str = "main",
|
||||
@@ -111,23 +134,22 @@ def close_branch(
|
||||
cwd: str = ".",
|
||||
) -> None:
|
||||
"""
|
||||
Merge a feature branch into the main/master branch and optionally delete it.
|
||||
Merge a feature branch into the base branch and delete it afterwards.
|
||||
|
||||
Steps:
|
||||
1) Determine branch name (argument or current branch)
|
||||
2) Resolve base branch (prefers `base_branch`, falls back to `fallback_base`)
|
||||
3) Ask for confirmation (y/N)
|
||||
4) git fetch origin
|
||||
5) git checkout <base>
|
||||
6) git pull origin <base>
|
||||
7) git merge --no-ff <name>
|
||||
8) git push origin <base>
|
||||
9) Delete branch locally and on origin
|
||||
|
||||
If the user does not confirm with 'y', the operation is aborted.
|
||||
1) Determine the branch name (argument or current branch)
|
||||
2) Resolve base branch (main/master)
|
||||
3) Ask for confirmation
|
||||
4) git fetch origin
|
||||
5) git checkout <base>
|
||||
6) git pull origin <base>
|
||||
7) git merge --no-ff <name>
|
||||
8) git push origin <base>
|
||||
9) Delete branch locally
|
||||
10) Delete branch on origin (best effort)
|
||||
"""
|
||||
|
||||
# 1) Determine which branch to close
|
||||
# 1) Determine which branch should be closed
|
||||
if not name:
|
||||
try:
|
||||
name = get_current_branch(cwd=cwd)
|
||||
@@ -137,7 +159,7 @@ def close_branch(
|
||||
if not name:
|
||||
raise RuntimeError("Branch name must not be empty.")
|
||||
|
||||
# 2) Resolve base branch (main/master)
|
||||
# 2) Resolve base branch
|
||||
target_base = _resolve_base_branch(base_branch, fallback_base, cwd=cwd)
|
||||
|
||||
if name == target_base:
|
||||
@@ -146,7 +168,7 @@ def close_branch(
|
||||
"Please specify a feature branch."
|
||||
)
|
||||
|
||||
# 3) Confirmation prompt
|
||||
# 3) Ask user for confirmation
|
||||
prompt = (
|
||||
f"Merge branch '{name}' into '{target_base}' and delete it afterwards? "
|
||||
"(y/N): "
|
||||
@@ -164,7 +186,7 @@ def close_branch(
|
||||
f"Failed to fetch from origin before closing branch {name!r}: {exc}"
|
||||
) from exc
|
||||
|
||||
# 5) Checkout base branch
|
||||
# 5) Checkout base
|
||||
try:
|
||||
run_git(["checkout", target_base], cwd=cwd)
|
||||
except GitError as exc:
|
||||
@@ -172,7 +194,7 @@ def close_branch(
|
||||
f"Failed to checkout base branch {target_base!r}: {exc}"
|
||||
) from exc
|
||||
|
||||
# 6) Pull latest base
|
||||
# 6) Pull latest base state
|
||||
try:
|
||||
run_git(["pull", "origin", target_base], cwd=cwd)
|
||||
except GitError as exc:
|
||||
@@ -180,7 +202,7 @@ def close_branch(
|
||||
f"Failed to pull latest changes for base branch {target_base!r}: {exc}"
|
||||
) from exc
|
||||
|
||||
# 7) Merge feature branch into base
|
||||
# 7) Merge the feature branch
|
||||
try:
|
||||
run_git(["merge", "--no-ff", name], cwd=cwd)
|
||||
except GitError as exc:
|
||||
@@ -193,22 +215,21 @@ def close_branch(
|
||||
run_git(["push", "origin", target_base], cwd=cwd)
|
||||
except GitError as exc:
|
||||
raise RuntimeError(
|
||||
f"Failed to push base branch {target_base!r} to origin after merge: {exc}"
|
||||
f"Failed to push base branch {target_base!r} after merge: {exc}"
|
||||
) from exc
|
||||
|
||||
# 9) Delete feature branch locally
|
||||
# 9) Delete branch locally
|
||||
try:
|
||||
run_git(["branch", "-d", name], cwd=cwd)
|
||||
except GitError as exc:
|
||||
raise RuntimeError(
|
||||
f"Failed to delete local branch {name!r} after merge: {exc}"
|
||||
f"Failed to delete local branch {name!r}: {exc}"
|
||||
) from exc
|
||||
|
||||
# 10) Delete feature branch on origin (best effort)
|
||||
# 10) Delete branch on origin (best effort)
|
||||
try:
|
||||
run_git(["push", "origin", "--delete", name], cwd=cwd)
|
||||
except GitError as exc:
|
||||
# Remote delete is nice-to-have; surface as RuntimeError for clarity.
|
||||
raise RuntimeError(
|
||||
f"Branch {name!r} was deleted locally, but remote deletion failed: {exc}"
|
||||
) from exc
|
||||
|
||||
@@ -1,761 +0,0 @@
|
||||
# pkgmgr/release.py
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
pkgmgr/release.py
|
||||
|
||||
Release helper for pkgmgr.
|
||||
|
||||
Responsibilities (Milestone 7):
|
||||
- Determine the next semantic version based on existing Git tags.
|
||||
- Update pyproject.toml with the new version.
|
||||
- Update additional packaging files (flake.nix, PKGBUILD,
|
||||
debian/changelog, RPM spec) where present.
|
||||
- Prepend a basic entry to CHANGELOG.md.
|
||||
- Commit, tag, and push the release on the current branch.
|
||||
|
||||
Additional behaviour:
|
||||
- If `preview=True` (from --preview), no files are written and no
|
||||
Git commands are executed. Instead, a detailed summary of the
|
||||
planned changes and commands is printed.
|
||||
- If `preview=False` and not forced, the release is executed in two
|
||||
phases:
|
||||
1) Preview-only run (dry-run).
|
||||
2) Interactive confirmation, then real release if confirmed.
|
||||
This confirmation can be skipped with the `force=True` flag.
|
||||
- If `close=True` is used and the current branch is not main/master,
|
||||
the branch will be closed via branch_commands.close_branch() after
|
||||
a successful release.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from datetime import date, datetime
|
||||
from typing import Optional, Tuple
|
||||
|
||||
from pkgmgr.core.git import get_tags, get_current_branch, GitError
|
||||
from pkgmgr.actions.branch import close_branch
|
||||
from pkgmgr.core.version.semver import (
|
||||
SemVer,
|
||||
find_latest_version,
|
||||
bump_major,
|
||||
bump_minor,
|
||||
bump_patch,
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helpers for Git + version discovery
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _determine_current_version() -> SemVer:
|
||||
"""
|
||||
Determine the current semantic version from Git tags.
|
||||
|
||||
Behaviour:
|
||||
- If there are no tags or no SemVer-compatible tags, return 0.0.0.
|
||||
- Otherwise, use the latest SemVer tag as current version.
|
||||
"""
|
||||
tags = get_tags()
|
||||
if not tags:
|
||||
return SemVer(0, 0, 0)
|
||||
|
||||
latest = find_latest_version(tags)
|
||||
if latest is None:
|
||||
return SemVer(0, 0, 0)
|
||||
|
||||
_tag, ver = latest
|
||||
return ver
|
||||
|
||||
|
||||
def _bump_semver(current: SemVer, release_type: str) -> SemVer:
|
||||
"""
|
||||
Bump the given SemVer according to the release type.
|
||||
|
||||
release_type must be one of: "major", "minor", "patch".
|
||||
"""
|
||||
if release_type == "major":
|
||||
return bump_major(current)
|
||||
if release_type == "minor":
|
||||
return bump_minor(current)
|
||||
if release_type == "patch":
|
||||
return bump_patch(current)
|
||||
|
||||
raise ValueError(f"Unknown release type: {release_type!r}")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Low-level Git command helper
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _run_git_command(cmd: str) -> None:
|
||||
"""
|
||||
Run a Git (or shell) command with basic error reporting.
|
||||
|
||||
The command is executed via the shell, primarily for readability
|
||||
when printed (as in 'git commit -am "msg"').
|
||||
"""
|
||||
print(f"[GIT] {cmd}")
|
||||
try:
|
||||
subprocess.run(cmd, shell=True, check=True)
|
||||
except subprocess.CalledProcessError as exc:
|
||||
print(f"[ERROR] Git command failed: {cmd}")
|
||||
print(f" Exit code: {exc.returncode}")
|
||||
if exc.stdout:
|
||||
print("--- stdout ---")
|
||||
print(exc.stdout)
|
||||
if exc.stderr:
|
||||
print("--- stderr ---")
|
||||
print(exc.stderr)
|
||||
raise GitError(f"Git command failed: {cmd}") from exc
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Editor helper for interactive changelog messages
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _open_editor_for_changelog(initial_message: Optional[str] = None) -> str:
|
||||
"""
|
||||
Open $EDITOR (fallback 'nano') so the user can enter a changelog message.
|
||||
|
||||
The temporary file is pre-filled with commented instructions and an
|
||||
optional initial_message. Lines starting with '#' are ignored when the
|
||||
message is read back.
|
||||
|
||||
Returns the final message (may be empty string if user leaves it blank).
|
||||
"""
|
||||
editor = os.environ.get("EDITOR", "nano")
|
||||
|
||||
with tempfile.NamedTemporaryFile(
|
||||
mode="w+",
|
||||
delete=False,
|
||||
encoding="utf-8",
|
||||
) as tmp:
|
||||
tmp_path = tmp.name
|
||||
tmp.write(
|
||||
"# Write the changelog entry for this release.\n"
|
||||
"# Lines starting with '#' will be ignored.\n"
|
||||
"# Empty result will fall back to a generic message.\n\n"
|
||||
)
|
||||
if initial_message:
|
||||
tmp.write(initial_message.strip() + "\n")
|
||||
tmp.flush()
|
||||
|
||||
try:
|
||||
subprocess.call([editor, tmp_path])
|
||||
except FileNotFoundError:
|
||||
print(
|
||||
f"[WARN] Editor {editor!r} not found; proceeding without "
|
||||
"interactive changelog message."
|
||||
)
|
||||
|
||||
try:
|
||||
with open(tmp_path, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
finally:
|
||||
try:
|
||||
os.remove(tmp_path)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
lines = [
|
||||
line for line in content.splitlines()
|
||||
if not line.strip().startswith("#")
|
||||
]
|
||||
return "\n".join(lines).strip()
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# File update helpers (pyproject + extra packaging + changelog)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def update_pyproject_version(
|
||||
pyproject_path: str,
|
||||
new_version: str,
|
||||
preview: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
Update the version in pyproject.toml with the new version.
|
||||
|
||||
The function looks for a line matching:
|
||||
|
||||
version = "X.Y.Z"
|
||||
|
||||
and replaces the version part with the given new_version string.
|
||||
"""
|
||||
try:
|
||||
with open(pyproject_path, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
except FileNotFoundError:
|
||||
print(f"[ERROR] pyproject.toml not found at: {pyproject_path}")
|
||||
sys.exit(1)
|
||||
|
||||
pattern = r'^(version\s*=\s*")([^"]+)(")'
|
||||
new_content, count = re.subn(
|
||||
pattern,
|
||||
lambda m: f'{m.group(1)}{new_version}{m.group(3)}',
|
||||
content,
|
||||
flags=re.MULTILINE,
|
||||
)
|
||||
|
||||
if count == 0:
|
||||
print("[ERROR] Could not find version line in pyproject.toml")
|
||||
sys.exit(1)
|
||||
|
||||
if preview:
|
||||
print(f"[PREVIEW] Would update pyproject.toml version to {new_version}")
|
||||
return
|
||||
|
||||
with open(pyproject_path, "w", encoding="utf-8") as f:
|
||||
f.write(new_content)
|
||||
|
||||
print(f"Updated pyproject.toml version to {new_version}")
|
||||
|
||||
|
||||
def update_flake_version(
|
||||
flake_path: str,
|
||||
new_version: str,
|
||||
preview: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
Update the version in flake.nix, if present.
|
||||
"""
|
||||
if not os.path.exists(flake_path):
|
||||
print("[INFO] flake.nix not found, skipping.")
|
||||
return
|
||||
|
||||
try:
|
||||
with open(flake_path, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
except Exception as exc:
|
||||
print(f"[WARN] Could not read flake.nix: {exc}")
|
||||
return
|
||||
|
||||
pattern = r'(version\s*=\s*")([^"]+)(")'
|
||||
new_content, count = re.subn(
|
||||
pattern,
|
||||
lambda m: f'{m.group(1)}{new_version}{m.group(3)}',
|
||||
content,
|
||||
)
|
||||
|
||||
if count == 0:
|
||||
print("[WARN] No version assignment found in flake.nix, skipping.")
|
||||
return
|
||||
|
||||
if preview:
|
||||
print(f"[PREVIEW] Would update flake.nix version to {new_version}")
|
||||
return
|
||||
|
||||
with open(flake_path, "w", encoding="utf-8") as f:
|
||||
f.write(new_content)
|
||||
|
||||
print(f"Updated flake.nix version to {new_version}")
|
||||
|
||||
|
||||
def update_pkgbuild_version(
|
||||
pkgbuild_path: str,
|
||||
new_version: str,
|
||||
preview: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
Update the version in PKGBUILD, if present.
|
||||
|
||||
Expects:
|
||||
pkgver=1.2.3
|
||||
pkgrel=1
|
||||
"""
|
||||
if not os.path.exists(pkgbuild_path):
|
||||
print("[INFO] PKGBUILD not found, skipping.")
|
||||
return
|
||||
|
||||
try:
|
||||
with open(pkgbuild_path, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
except Exception as exc:
|
||||
print(f"[WARN] Could not read PKGBUILD: {exc}")
|
||||
return
|
||||
|
||||
ver_pattern = r"^(pkgver\s*=\s*)(.+)$"
|
||||
new_content, ver_count = re.subn(
|
||||
ver_pattern,
|
||||
lambda m: f"{m.group(1)}{new_version}",
|
||||
content,
|
||||
flags=re.MULTILINE,
|
||||
)
|
||||
|
||||
if ver_count == 0:
|
||||
print("[WARN] No pkgver line found in PKGBUILD.")
|
||||
new_content = content
|
||||
|
||||
rel_pattern = r"^(pkgrel\s*=\s*)(.+)$"
|
||||
new_content, rel_count = re.subn(
|
||||
rel_pattern,
|
||||
lambda m: f"{m.group(1)}1",
|
||||
new_content,
|
||||
flags=re.MULTILINE,
|
||||
)
|
||||
|
||||
if rel_count == 0:
|
||||
print("[WARN] No pkgrel line found in PKGBUILD.")
|
||||
|
||||
if preview:
|
||||
print(f"[PREVIEW] Would update PKGBUILD to pkgver={new_version}, pkgrel=1")
|
||||
return
|
||||
|
||||
with open(pkgbuild_path, "w", encoding="utf-8") as f:
|
||||
f.write(new_content)
|
||||
|
||||
print(f"Updated PKGBUILD to pkgver={new_version}, pkgrel=1")
|
||||
|
||||
|
||||
def update_spec_version(
|
||||
spec_path: str,
|
||||
new_version: str,
|
||||
preview: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
Update the version in an RPM spec file, if present.
|
||||
"""
|
||||
if not os.path.exists(spec_path):
|
||||
print("[INFO] RPM spec file not found, skipping.")
|
||||
return
|
||||
|
||||
try:
|
||||
with open(spec_path, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
except Exception as exc:
|
||||
print(f"[WARN] Could not read spec file: {exc}")
|
||||
return
|
||||
|
||||
ver_pattern = r"^(Version:\s*)(.+)$"
|
||||
new_content, ver_count = re.subn(
|
||||
ver_pattern,
|
||||
lambda m: f"{m.group(1)}{new_version}",
|
||||
content,
|
||||
flags=re.MULTILINE,
|
||||
)
|
||||
|
||||
if ver_count == 0:
|
||||
print("[WARN] No 'Version:' line found in spec file.")
|
||||
|
||||
rel_pattern = r"^(Release:\s*)(.+)$"
|
||||
|
||||
def _release_repl(m: re.Match[str]) -> str: # type: ignore[name-defined]
|
||||
rest = m.group(2).strip()
|
||||
match = re.match(r"^(\d+)(.*)$", rest)
|
||||
if match:
|
||||
suffix = match.group(2)
|
||||
else:
|
||||
suffix = ""
|
||||
return f"{m.group(1)}1{suffix}"
|
||||
|
||||
new_content, rel_count = re.subn(
|
||||
rel_pattern,
|
||||
_release_repl,
|
||||
new_content,
|
||||
flags=re.MULTILINE,
|
||||
)
|
||||
|
||||
if rel_count == 0:
|
||||
print("[WARN] No 'Release:' line found in spec file.")
|
||||
|
||||
if preview:
|
||||
print(
|
||||
f"[PREVIEW] Would update spec file "
|
||||
f"{os.path.basename(spec_path)} to Version: {new_version}, Release: 1..."
|
||||
)
|
||||
return
|
||||
|
||||
with open(spec_path, "w", encoding="utf-8") as f:
|
||||
f.write(new_content)
|
||||
|
||||
print(
|
||||
f"Updated spec file {os.path.basename(spec_path)} "
|
||||
f"to Version: {new_version}, Release: 1..."
|
||||
)
|
||||
|
||||
|
||||
def update_changelog(
|
||||
changelog_path: str,
|
||||
new_version: str,
|
||||
message: Optional[str] = None,
|
||||
preview: bool = False,
|
||||
) -> str:
|
||||
"""
|
||||
Prepend a new release section to CHANGELOG.md with the new version,
|
||||
current date, and a message.
|
||||
"""
|
||||
today = date.today().isoformat()
|
||||
|
||||
if message is None:
|
||||
if preview:
|
||||
message = "Automated release."
|
||||
else:
|
||||
print(
|
||||
"\n[INFO] No release message provided, opening editor for "
|
||||
"changelog entry...\n"
|
||||
)
|
||||
editor_message = _open_editor_for_changelog()
|
||||
if not editor_message:
|
||||
message = "Automated release."
|
||||
else:
|
||||
message = editor_message
|
||||
|
||||
header = f"## [{new_version}] - {today}\n"
|
||||
header += f"\n* {message}\n\n"
|
||||
|
||||
if os.path.exists(changelog_path):
|
||||
try:
|
||||
with open(changelog_path, "r", encoding="utf-8") as f:
|
||||
changelog = f.read()
|
||||
except Exception as exc:
|
||||
print(f"[WARN] Could not read existing CHANGELOG.md: {exc}")
|
||||
changelog = ""
|
||||
else:
|
||||
changelog = ""
|
||||
|
||||
new_changelog = header + "\n" + changelog if changelog else header
|
||||
|
||||
print("\n================ CHANGELOG ENTRY ================")
|
||||
print(header.rstrip())
|
||||
print("=================================================\n")
|
||||
|
||||
if preview:
|
||||
print(f"[PREVIEW] Would prepend new entry for {new_version} to CHANGELOG.md")
|
||||
return message
|
||||
|
||||
with open(changelog_path, "w", encoding="utf-8") as f:
|
||||
f.write(new_changelog)
|
||||
|
||||
print(f"Updated CHANGELOG.md with version {new_version}")
|
||||
|
||||
return message
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Debian changelog helpers (with Git config fallback for maintainer)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _get_git_config_value(key: str) -> Optional[str]:
|
||||
"""
|
||||
Try to read a value from `git config --get <key>`.
|
||||
"""
|
||||
try:
|
||||
result = subprocess.run(
|
||||
["git", "config", "--get", key],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=False,
|
||||
)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
value = result.stdout.strip()
|
||||
return value or None
|
||||
|
||||
|
||||
def _get_debian_author() -> Tuple[str, str]:
|
||||
"""
|
||||
Determine the maintainer name/email for debian/changelog entries.
|
||||
"""
|
||||
name = os.environ.get("DEBFULLNAME")
|
||||
email = os.environ.get("DEBEMAIL")
|
||||
|
||||
if not name:
|
||||
name = os.environ.get("GIT_AUTHOR_NAME")
|
||||
if not email:
|
||||
email = os.environ.get("GIT_AUTHOR_EMAIL")
|
||||
|
||||
if not name:
|
||||
name = _get_git_config_value("user.name")
|
||||
if not email:
|
||||
email = _get_git_config_value("user.email")
|
||||
|
||||
if not name:
|
||||
name = "Unknown Maintainer"
|
||||
if not email:
|
||||
email = "unknown@example.com"
|
||||
|
||||
return name, email
|
||||
|
||||
|
||||
def update_debian_changelog(
|
||||
debian_changelog_path: str,
|
||||
package_name: str,
|
||||
new_version: str,
|
||||
message: Optional[str] = None,
|
||||
preview: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
Prepend a new entry to debian/changelog, if it exists.
|
||||
"""
|
||||
if not os.path.exists(debian_changelog_path):
|
||||
print("[INFO] debian/changelog not found, skipping.")
|
||||
return
|
||||
|
||||
debian_version = f"{new_version}-1"
|
||||
now = datetime.now().astimezone()
|
||||
date_str = now.strftime("%a, %d %b %Y %H:%M:%S %z")
|
||||
|
||||
author_name, author_email = _get_debian_author()
|
||||
|
||||
first_line = f"{package_name} ({debian_version}) unstable; urgency=medium"
|
||||
body_line = message.strip() if message else f"Automated release {new_version}."
|
||||
stanza = (
|
||||
f"{first_line}\n\n"
|
||||
f" * {body_line}\n\n"
|
||||
f" -- {author_name} <{author_email}> {date_str}\n\n"
|
||||
)
|
||||
|
||||
if preview:
|
||||
print(
|
||||
"[PREVIEW] Would prepend the following stanza to debian/changelog:\n"
|
||||
f"{stanza}"
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
with open(debian_changelog_path, "r", encoding="utf-8") as f:
|
||||
existing = f.read()
|
||||
except Exception as exc:
|
||||
print(f"[WARN] Could not read debian/changelog: {exc}")
|
||||
existing = ""
|
||||
|
||||
new_content = stanza + existing
|
||||
|
||||
with open(debian_changelog_path, "w", encoding="utf-8") as f:
|
||||
f.write(new_content)
|
||||
|
||||
print(f"Updated debian/changelog with version {debian_version}")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Internal implementation (single-phase, preview or real)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _release_impl(
|
||||
pyproject_path: str = "pyproject.toml",
|
||||
changelog_path: str = "CHANGELOG.md",
|
||||
release_type: str = "patch",
|
||||
message: Optional[str] = None,
|
||||
preview: bool = False,
|
||||
close: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
Internal implementation that performs a single-phase release.
|
||||
"""
|
||||
current_ver = _determine_current_version()
|
||||
new_ver = _bump_semver(current_ver, release_type)
|
||||
new_ver_str = str(new_ver)
|
||||
new_tag = new_ver.to_tag(with_prefix=True)
|
||||
|
||||
mode = "PREVIEW" if preview else "REAL"
|
||||
print(f"Release mode: {mode}")
|
||||
print(f"Current version: {current_ver}")
|
||||
print(f"New version: {new_ver_str} ({release_type})")
|
||||
|
||||
repo_root = os.path.dirname(os.path.abspath(pyproject_path))
|
||||
|
||||
update_pyproject_version(pyproject_path, new_ver_str, preview=preview)
|
||||
changelog_message = update_changelog(
|
||||
changelog_path,
|
||||
new_ver_str,
|
||||
message=message,
|
||||
preview=preview,
|
||||
)
|
||||
|
||||
flake_path = os.path.join(repo_root, "flake.nix")
|
||||
update_flake_version(flake_path, new_ver_str, preview=preview)
|
||||
|
||||
pkgbuild_path = os.path.join(repo_root, "PKGBUILD")
|
||||
update_pkgbuild_version(pkgbuild_path, new_ver_str, preview=preview)
|
||||
|
||||
spec_path = os.path.join(repo_root, "package-manager.spec")
|
||||
update_spec_version(spec_path, new_ver_str, preview=preview)
|
||||
|
||||
effective_message: Optional[str] = message
|
||||
if effective_message is None and isinstance(changelog_message, str):
|
||||
if changelog_message.strip():
|
||||
effective_message = changelog_message.strip()
|
||||
|
||||
debian_changelog_path = os.path.join(repo_root, "debian", "changelog")
|
||||
package_name = os.path.basename(repo_root) or "package-manager"
|
||||
update_debian_changelog(
|
||||
debian_changelog_path,
|
||||
package_name=package_name,
|
||||
new_version=new_ver_str,
|
||||
message=effective_message,
|
||||
preview=preview,
|
||||
)
|
||||
|
||||
commit_msg = f"Release version {new_ver_str}"
|
||||
tag_msg = effective_message or commit_msg
|
||||
|
||||
try:
|
||||
branch = get_current_branch() or "main"
|
||||
except GitError:
|
||||
branch = "main"
|
||||
print(f"Releasing on branch: {branch}")
|
||||
|
||||
files_to_add = [
|
||||
pyproject_path,
|
||||
changelog_path,
|
||||
flake_path,
|
||||
pkgbuild_path,
|
||||
spec_path,
|
||||
debian_changelog_path,
|
||||
]
|
||||
existing_files = [p for p in files_to_add if p and os.path.exists(p)]
|
||||
|
||||
if preview:
|
||||
for path in existing_files:
|
||||
print(f"[PREVIEW] Would run: git add {path}")
|
||||
print(f'[PREVIEW] Would run: git commit -am "{commit_msg}"')
|
||||
print(f'[PREVIEW] Would run: git tag -a {new_tag} -m "{tag_msg}"')
|
||||
print(f"[PREVIEW] Would run: git push origin {branch}")
|
||||
print("[PREVIEW] Would run: git push origin --tags")
|
||||
|
||||
if close and branch not in ("main", "master"):
|
||||
print(
|
||||
f"[PREVIEW] Would also close branch {branch} after the release "
|
||||
"(close=True and branch is not main/master)."
|
||||
)
|
||||
elif close:
|
||||
print(
|
||||
f"[PREVIEW] close=True but current branch is {branch}; "
|
||||
"no branch would be closed."
|
||||
)
|
||||
|
||||
print("Preview completed. No changes were made.")
|
||||
return
|
||||
|
||||
for path in existing_files:
|
||||
_run_git_command(f"git add {path}")
|
||||
|
||||
_run_git_command(f'git commit -am "{commit_msg}"')
|
||||
_run_git_command(f'git tag -a {new_tag} -m "{tag_msg}"')
|
||||
_run_git_command(f"git push origin {branch}")
|
||||
_run_git_command("git push origin --tags")
|
||||
|
||||
print(f"Release {new_ver_str} completed.")
|
||||
|
||||
if close:
|
||||
if branch in ("main", "master"):
|
||||
print(
|
||||
f"[INFO] close=True but current branch is {branch}; "
|
||||
"nothing to close."
|
||||
)
|
||||
return
|
||||
|
||||
print(
|
||||
f"[INFO] Closing branch {branch} after successful release "
|
||||
"(close=True and branch is not main/master)..."
|
||||
)
|
||||
try:
|
||||
close_branch(name=branch, base_branch="main", cwd=".")
|
||||
except Exception as exc: # pragma: no cover
|
||||
print(f"[WARN] Failed to close branch {branch} automatically: {exc}")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Public release entry point
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def release(
|
||||
pyproject_path: str = "pyproject.toml",
|
||||
changelog_path: str = "CHANGELOG.md",
|
||||
release_type: str = "patch",
|
||||
message: Optional[str] = None,
|
||||
preview: bool = False,
|
||||
force: bool = False,
|
||||
close: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
High-level release entry point.
|
||||
|
||||
Modes:
|
||||
|
||||
- preview=True:
|
||||
* Single-phase PREVIEW only.
|
||||
|
||||
- preview=False, force=True:
|
||||
* Single-phase REAL release, no interactive preview.
|
||||
|
||||
- preview=False, force=False:
|
||||
* Two-phase flow (intended default for interactive CLI use).
|
||||
"""
|
||||
if preview:
|
||||
_release_impl(
|
||||
pyproject_path=pyproject_path,
|
||||
changelog_path=changelog_path,
|
||||
release_type=release_type,
|
||||
message=message,
|
||||
preview=True,
|
||||
close=close,
|
||||
)
|
||||
return
|
||||
|
||||
if force:
|
||||
_release_impl(
|
||||
pyproject_path=pyproject_path,
|
||||
changelog_path=changelog_path,
|
||||
release_type=release_type,
|
||||
message=message,
|
||||
preview=False,
|
||||
close=close,
|
||||
)
|
||||
return
|
||||
|
||||
if not sys.stdin.isatty():
|
||||
_release_impl(
|
||||
pyproject_path=pyproject_path,
|
||||
changelog_path=changelog_path,
|
||||
release_type=release_type,
|
||||
message=message,
|
||||
preview=False,
|
||||
close=close,
|
||||
)
|
||||
return
|
||||
|
||||
print("[INFO] Running preview before actual release...\n")
|
||||
_release_impl(
|
||||
pyproject_path=pyproject_path,
|
||||
changelog_path=changelog_path,
|
||||
release_type=release_type,
|
||||
message=message,
|
||||
preview=True,
|
||||
close=close,
|
||||
)
|
||||
|
||||
try:
|
||||
answer = input("Proceed with the actual release? [y/N]: ").strip().lower()
|
||||
except (EOFError, KeyboardInterrupt):
|
||||
print("\n[INFO] Release aborted (no confirmation).")
|
||||
return
|
||||
|
||||
if answer not in ("y", "yes"):
|
||||
print("Release aborted by user. No changes were made.")
|
||||
return
|
||||
|
||||
print("\n[INFO] Running REAL release...\n")
|
||||
_release_impl(
|
||||
pyproject_path=pyproject_path,
|
||||
changelog_path=changelog_path,
|
||||
release_type=release_type,
|
||||
message=message,
|
||||
preview=False,
|
||||
close=close,
|
||||
)
|
||||
@@ -49,6 +49,7 @@ from .files import (
|
||||
update_spec_version,
|
||||
update_changelog,
|
||||
update_debian_changelog,
|
||||
update_spec_changelog,
|
||||
)
|
||||
|
||||
|
||||
@@ -98,6 +99,8 @@ def _release_impl(
|
||||
spec_path = os.path.join(repo_root, "package-manager.spec")
|
||||
update_spec_version(spec_path, new_ver_str, preview=preview)
|
||||
|
||||
# Determine a single effective_message to be reused across all
|
||||
# changelog targets (project, Debian, Fedora).
|
||||
effective_message: Optional[str] = message
|
||||
if effective_message is None and isinstance(changelog_message, str):
|
||||
if changelog_message.strip():
|
||||
@@ -105,6 +108,8 @@ def _release_impl(
|
||||
|
||||
debian_changelog_path = os.path.join(repo_root, "debian", "changelog")
|
||||
package_name = os.path.basename(repo_root) or "package-manager"
|
||||
|
||||
# Debian changelog
|
||||
update_debian_changelog(
|
||||
debian_changelog_path,
|
||||
package_name=package_name,
|
||||
@@ -113,6 +118,15 @@ def _release_impl(
|
||||
preview=preview,
|
||||
)
|
||||
|
||||
# Fedora / RPM %changelog
|
||||
update_spec_changelog(
|
||||
spec_path=spec_path,
|
||||
package_name=package_name,
|
||||
new_version=new_ver_str,
|
||||
message=effective_message,
|
||||
preview=preview,
|
||||
)
|
||||
|
||||
commit_msg = f"Release version {new_ver_str}"
|
||||
tag_msg = effective_message or commit_msg
|
||||
|
||||
|
||||
@@ -8,7 +8,10 @@ Responsibilities:
|
||||
- Update pyproject.toml with the new version.
|
||||
- Update flake.nix, PKGBUILD, RPM spec files where present.
|
||||
- Prepend release entries to CHANGELOG.md.
|
||||
- Maintain debian/changelog entries, including maintainer metadata.
|
||||
- Maintain distribution-specific changelog files:
|
||||
* debian/changelog
|
||||
* RPM spec %changelog section
|
||||
including maintainer metadata where applicable.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
@@ -82,7 +85,6 @@ def _open_editor_for_changelog(initial_message: Optional[str] = None) -> str:
|
||||
# File update helpers (pyproject + extra packaging + changelog)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def update_pyproject_version(
|
||||
pyproject_path: str,
|
||||
new_version: str,
|
||||
@@ -96,13 +98,25 @@ def update_pyproject_version(
|
||||
version = "X.Y.Z"
|
||||
|
||||
and replaces the version part with the given new_version string.
|
||||
|
||||
If the file does not exist, it is skipped without failing the release.
|
||||
"""
|
||||
if not os.path.exists(pyproject_path):
|
||||
print(
|
||||
f"[INFO] pyproject.toml not found at: {pyproject_path}, "
|
||||
"skipping version update."
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
with open(pyproject_path, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
except FileNotFoundError:
|
||||
print(f"[ERROR] pyproject.toml not found at: {pyproject_path}")
|
||||
sys.exit(1)
|
||||
except OSError as exc:
|
||||
print(
|
||||
f"[WARN] Could not read pyproject.toml at {pyproject_path}: {exc}. "
|
||||
"Skipping version update."
|
||||
)
|
||||
return
|
||||
|
||||
pattern = r'^(version\s*=\s*")([^"]+)(")'
|
||||
new_content, count = re.subn(
|
||||
@@ -442,3 +456,82 @@ def update_debian_changelog(
|
||||
f.write(new_content)
|
||||
|
||||
print(f"Updated debian/changelog with version {debian_version}")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Fedora / RPM spec %changelog helper
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def update_spec_changelog(
|
||||
spec_path: str,
|
||||
package_name: str,
|
||||
new_version: str,
|
||||
message: Optional[str] = None,
|
||||
preview: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
Prepend a new entry to the %changelog section of an RPM spec file,
|
||||
if present.
|
||||
|
||||
Typical RPM-style entry:
|
||||
|
||||
* Tue Dec 09 2025 John Doe <john@example.com> - 0.5.1-1
|
||||
- Your changelog message
|
||||
"""
|
||||
if not os.path.exists(spec_path):
|
||||
print("[INFO] RPM spec file not found, skipping spec changelog update.")
|
||||
return
|
||||
|
||||
try:
|
||||
with open(spec_path, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
except Exception as exc:
|
||||
print(f"[WARN] Could not read spec file for changelog update: {exc}")
|
||||
return
|
||||
|
||||
debian_version = f"{new_version}-1"
|
||||
now = datetime.now().astimezone()
|
||||
date_str = now.strftime("%a %b %d %Y")
|
||||
|
||||
# Reuse Debian maintainer discovery for author name/email.
|
||||
author_name, author_email = _get_debian_author()
|
||||
|
||||
body_line = message.strip() if message else f"Automated release {new_version}."
|
||||
|
||||
stanza = (
|
||||
f"* {date_str} {author_name} <{author_email}> - {debian_version}\n"
|
||||
f"- {body_line}\n\n"
|
||||
)
|
||||
|
||||
marker = "%changelog"
|
||||
idx = content.find(marker)
|
||||
|
||||
if idx == -1:
|
||||
# No %changelog section yet: append one at the end.
|
||||
new_content = content.rstrip() + "\n\n%changelog\n" + stanza
|
||||
else:
|
||||
# Insert stanza right after the %changelog line.
|
||||
before = content[: idx + len(marker)]
|
||||
after = content[idx + len(marker) :]
|
||||
new_content = before + "\n" + stanza + after.lstrip("\n")
|
||||
|
||||
if preview:
|
||||
print(
|
||||
"[PREVIEW] Would update RPM %changelog section with the following "
|
||||
"stanza:\n"
|
||||
f"{stanza}"
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
with open(spec_path, "w", encoding="utf-8") as f:
|
||||
f.write(new_content)
|
||||
except Exception as exc:
|
||||
print(f"[WARN] Failed to write updated spec changelog section: {exc}")
|
||||
return
|
||||
|
||||
print(
|
||||
f"Updated RPM %changelog section in {os.path.basename(spec_path)} "
|
||||
f"for {package_name} {debian_version}"
|
||||
)
|
||||
|
||||
@@ -13,6 +13,13 @@ Behavior:
|
||||
* Then install the flake outputs (`pkgmgr`, `default`) via `nix profile install`.
|
||||
- Failure installing `pkgmgr` is treated as fatal.
|
||||
- Failure installing `default` is logged as an error/warning but does not abort.
|
||||
|
||||
Special handling for dev shells / CI:
|
||||
- If IN_NIX_SHELL is set (e.g. inside `nix develop`), the installer is
|
||||
disabled. In that environment the flake outputs are already provided
|
||||
by the dev shell and we must not touch the user profile.
|
||||
- If PKGMGR_DISABLE_NIX_FLAKE_INSTALLER=1 is set, the installer is
|
||||
globally disabled (useful for CI or debugging).
|
||||
"""
|
||||
|
||||
import os
|
||||
@@ -36,14 +43,45 @@ class NixFlakeInstaller(BaseInstaller):
|
||||
FLAKE_FILE = "flake.nix"
|
||||
PROFILE_NAME = "package-manager"
|
||||
|
||||
def _in_nix_shell(self) -> bool:
|
||||
"""
|
||||
Return True if we appear to be running inside a Nix dev shell.
|
||||
|
||||
Nix sets IN_NIX_SHELL in `nix develop` environments. In that case
|
||||
the flake outputs are already available, and touching the user
|
||||
profile (nix profile install/remove) is undesirable.
|
||||
"""
|
||||
return bool(os.environ.get("IN_NIX_SHELL"))
|
||||
|
||||
def supports(self, ctx: "RepoContext") -> bool:
|
||||
"""
|
||||
Only support repositories that:
|
||||
- Have a flake.nix
|
||||
- Are NOT inside a Nix dev shell (IN_NIX_SHELL unset),
|
||||
- Are NOT explicitly disabled via PKGMGR_DISABLE_NIX_FLAKE_INSTALLER=1,
|
||||
- Have a flake.nix,
|
||||
- And have the `nix` command available.
|
||||
"""
|
||||
# 1) Skip when running inside a dev shell – flake is already active.
|
||||
if self._in_nix_shell():
|
||||
print(
|
||||
"[INFO] IN_NIX_SHELL detected; skipping NixFlakeInstaller. "
|
||||
"Flake outputs are provided by the development shell."
|
||||
)
|
||||
return False
|
||||
|
||||
# 2) Optional global kill-switch for CI or debugging.
|
||||
if os.environ.get("PKGMGR_DISABLE_NIX_FLAKE_INSTALLER") == "1":
|
||||
print(
|
||||
"[INFO] PKGMGR_DISABLE_NIX_FLAKE_INSTALLER=1 – "
|
||||
"NixFlakeInstaller is disabled."
|
||||
)
|
||||
return False
|
||||
|
||||
# 3) Nix must be available.
|
||||
if shutil.which("nix") is None:
|
||||
return False
|
||||
|
||||
# 4) flake.nix must exist in the repository.
|
||||
flake_path = os.path.join(ctx.repo_dir, self.FLAKE_FILE)
|
||||
return os.path.exists(flake_path)
|
||||
|
||||
@@ -76,6 +114,14 @@ class NixFlakeInstaller(BaseInstaller):
|
||||
Any failure installing `pkgmgr` is treated as fatal (SystemExit).
|
||||
A failure installing `default` is logged but does not abort.
|
||||
"""
|
||||
# Extra guard in case run() is called directly without supports().
|
||||
if self._in_nix_shell():
|
||||
print(
|
||||
"[INFO] IN_NIX_SHELL detected in run(); "
|
||||
"skipping Nix flake profile installation."
|
||||
)
|
||||
return
|
||||
|
||||
# Reuse supports() to keep logic in one place
|
||||
if not self.supports(ctx): # type: ignore[arg-type]
|
||||
return
|
||||
@@ -91,7 +137,12 @@ class NixFlakeInstaller(BaseInstaller):
|
||||
try:
|
||||
# For 'default' we don't want the process to exit on error
|
||||
allow_failure = output == "default"
|
||||
run_command(cmd, cwd=ctx.repo_dir, preview=ctx.preview, allow_failure=allow_failure)
|
||||
run_command(
|
||||
cmd,
|
||||
cwd=ctx.repo_dir,
|
||||
preview=ctx.preview,
|
||||
allow_failure=allow_failure,
|
||||
)
|
||||
print(f"Nix flake output '{output}' successfully installed.")
|
||||
except SystemExit as e:
|
||||
print(f"[Error] Failed to install Nix flake output '{output}': {e}")
|
||||
|
||||
@@ -17,7 +17,6 @@ apt/dpkg tooling are available.
|
||||
import glob
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from typing import List
|
||||
|
||||
from pkgmgr.actions.repository.install.context import RepoContext
|
||||
@@ -68,6 +67,32 @@ class DebianControlInstaller(BaseInstaller):
|
||||
pattern = os.path.join(parent, "*.deb")
|
||||
return sorted(glob.glob(pattern))
|
||||
|
||||
def _privileged_prefix(self) -> str | None:
|
||||
"""
|
||||
Determine how to run privileged commands:
|
||||
|
||||
- If 'sudo' is available, return 'sudo '.
|
||||
- If we are running as root (e.g. inside CI/container), return ''.
|
||||
- Otherwise, return None, meaning we cannot safely elevate.
|
||||
|
||||
Callers are responsible for handling the None case (usually by
|
||||
warning and skipping automatic installation).
|
||||
"""
|
||||
sudo_path = shutil.which("sudo")
|
||||
|
||||
is_root = False
|
||||
try:
|
||||
is_root = os.geteuid() == 0
|
||||
except AttributeError: # pragma: no cover - non-POSIX platforms
|
||||
# On non-POSIX systems, fall back to assuming "not root".
|
||||
is_root = False
|
||||
|
||||
if sudo_path is not None:
|
||||
return "sudo "
|
||||
if is_root:
|
||||
return ""
|
||||
return None
|
||||
|
||||
def _install_build_dependencies(self, ctx: RepoContext) -> None:
|
||||
"""
|
||||
Install build dependencies using `apt-get build-dep ./`.
|
||||
@@ -86,12 +111,25 @@ class DebianControlInstaller(BaseInstaller):
|
||||
)
|
||||
return
|
||||
|
||||
prefix = self._privileged_prefix()
|
||||
if prefix is None:
|
||||
print(
|
||||
"[Warning] Neither 'sudo' is available nor running as root. "
|
||||
"Skipping automatic build-dep installation for Debian. "
|
||||
"Please install build dependencies from debian/control manually."
|
||||
)
|
||||
return
|
||||
|
||||
# Update package lists first for reliable build-dep resolution.
|
||||
run_command("sudo apt-get update", cwd=ctx.repo_dir, preview=ctx.preview)
|
||||
run_command(
|
||||
f"{prefix}apt-get update",
|
||||
cwd=ctx.repo_dir,
|
||||
preview=ctx.preview,
|
||||
)
|
||||
|
||||
# Install build dependencies based on debian/control in the current tree.
|
||||
# `apt-get build-dep ./` uses the source in the current directory.
|
||||
builddep_cmd = "sudo apt-get build-dep -y ./"
|
||||
builddep_cmd = f"{prefix}apt-get build-dep -y ./"
|
||||
run_command(builddep_cmd, cwd=ctx.repo_dir, preview=ctx.preview)
|
||||
|
||||
def run(self, ctx: RepoContext) -> None:
|
||||
@@ -101,7 +139,7 @@ class DebianControlInstaller(BaseInstaller):
|
||||
Steps:
|
||||
1. apt-get build-dep ./ (automatic build dependency installation)
|
||||
2. dpkg-buildpackage -b -us -uc
|
||||
3. sudo dpkg -i ../*.deb
|
||||
3. sudo dpkg -i ../*.deb (or plain dpkg -i when running as root)
|
||||
"""
|
||||
control_path = self._control_path(ctx)
|
||||
if not os.path.exists(control_path):
|
||||
@@ -123,7 +161,17 @@ class DebianControlInstaller(BaseInstaller):
|
||||
)
|
||||
return
|
||||
|
||||
prefix = self._privileged_prefix()
|
||||
if prefix is None:
|
||||
print(
|
||||
"[Warning] Neither 'sudo' is available nor running as root. "
|
||||
"Skipping automatic .deb installation. "
|
||||
"You can manually install the following files with dpkg -i:\n "
|
||||
+ "\n ".join(debs)
|
||||
)
|
||||
return
|
||||
|
||||
# 4) Install .deb files
|
||||
install_cmd = "sudo dpkg -i " + " ".join(os.path.basename(d) for d in debs)
|
||||
install_cmd = prefix + "dpkg -i " + " ".join(os.path.basename(d) for d in debs)
|
||||
parent = os.path.dirname(ctx.repo_dir)
|
||||
run_command(install_cmd, cwd=parent, preview=ctx.preview)
|
||||
|
||||
@@ -7,8 +7,10 @@ Installer for RPM-based packages defined in *.spec files.
|
||||
This installer:
|
||||
|
||||
1. Installs build dependencies via dnf/yum builddep (where available)
|
||||
2. Uses rpmbuild to build RPMs from the provided .spec file
|
||||
3. Installs the resulting RPMs via `rpm -i`
|
||||
2. Prepares a source tarball in ~/rpmbuild/SOURCES based on the .spec
|
||||
3. Uses rpmbuild to build RPMs from the provided .spec file
|
||||
4. Installs the resulting RPMs via the system package manager (dnf/yum)
|
||||
or rpm as a fallback.
|
||||
|
||||
It targets RPM-based systems (Fedora / RHEL / CentOS / Rocky / Alma, etc.).
|
||||
"""
|
||||
@@ -16,8 +18,8 @@ It targets RPM-based systems (Fedora / RHEL / CentOS / Rocky / Alma, etc.).
|
||||
import glob
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from typing import List, Optional
|
||||
import tarfile
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
from pkgmgr.actions.repository.install.context import RepoContext
|
||||
from pkgmgr.actions.repository.install.installers.base import BaseInstaller
|
||||
@@ -59,6 +61,117 @@ class RpmSpecInstaller(BaseInstaller):
|
||||
return None
|
||||
return matches[0]
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Helpers for preparing rpmbuild topdir and source tarball
|
||||
# ------------------------------------------------------------------
|
||||
def _rpmbuild_topdir(self) -> str:
|
||||
"""
|
||||
Return the rpmbuild topdir that rpmbuild will use by default.
|
||||
|
||||
By default this is: ~/rpmbuild
|
||||
|
||||
In the self-install tests, $HOME is set to /tmp/pkgmgr-self-install,
|
||||
so this becomes /tmp/pkgmgr-self-install/rpmbuild which matches the
|
||||
paths in the RPM build logs.
|
||||
"""
|
||||
home = os.path.expanduser("~")
|
||||
return os.path.join(home, "rpmbuild")
|
||||
|
||||
def _ensure_rpmbuild_tree(self, topdir: str) -> None:
|
||||
"""
|
||||
Ensure the standard rpmbuild directory tree exists:
|
||||
|
||||
<topdir>/
|
||||
BUILD/
|
||||
BUILDROOT/
|
||||
RPMS/
|
||||
SOURCES/
|
||||
SPECS/
|
||||
SRPMS/
|
||||
"""
|
||||
for sub in ("BUILD", "BUILDROOT", "RPMS", "SOURCES", "SPECS", "SRPMS"):
|
||||
os.makedirs(os.path.join(topdir, sub), exist_ok=True)
|
||||
|
||||
def _parse_name_version(self, spec_path: str) -> Optional[Tuple[str, str]]:
|
||||
"""
|
||||
Parse Name and Version from the given .spec file.
|
||||
|
||||
Returns (name, version) or None if either cannot be determined.
|
||||
"""
|
||||
name = None
|
||||
version = None
|
||||
|
||||
with open(spec_path, "r", encoding="utf-8") as f:
|
||||
for raw_line in f:
|
||||
line = raw_line.strip()
|
||||
# Ignore comments
|
||||
if not line or line.startswith("#"):
|
||||
continue
|
||||
|
||||
lower = line.lower()
|
||||
if lower.startswith("name:"):
|
||||
# e.g. "Name: package-manager"
|
||||
parts = line.split(":", 1)
|
||||
if len(parts) == 2:
|
||||
name = parts[1].strip()
|
||||
elif lower.startswith("version:"):
|
||||
# e.g. "Version: 0.7.7"
|
||||
parts = line.split(":", 1)
|
||||
if len(parts) == 2:
|
||||
version = parts[1].strip()
|
||||
|
||||
if name and version:
|
||||
break
|
||||
|
||||
if not name or not version:
|
||||
print(
|
||||
"[Warning] Could not determine Name/Version from spec file "
|
||||
f"'{spec_path}'. Skipping RPM source tarball preparation."
|
||||
)
|
||||
return None
|
||||
|
||||
return name, version
|
||||
|
||||
def _prepare_source_tarball(self, ctx: RepoContext, spec_path: str) -> None:
|
||||
"""
|
||||
Prepare a source tarball in <HOME>/rpmbuild/SOURCES that matches
|
||||
the Name/Version in the .spec file.
|
||||
"""
|
||||
parsed = self._parse_name_version(spec_path)
|
||||
if parsed is None:
|
||||
return
|
||||
|
||||
name, version = parsed
|
||||
topdir = self._rpmbuild_topdir()
|
||||
self._ensure_rpmbuild_tree(topdir)
|
||||
|
||||
build_dir = os.path.join(topdir, "BUILD")
|
||||
sources_dir = os.path.join(topdir, "SOURCES")
|
||||
|
||||
source_root = os.path.join(build_dir, f"{name}-{version}")
|
||||
tarball_path = os.path.join(sources_dir, f"{name}-{version}.tar.gz")
|
||||
|
||||
# Clean any previous build directory for this name/version.
|
||||
if os.path.exists(source_root):
|
||||
shutil.rmtree(source_root)
|
||||
|
||||
# Copy the repository tree into BUILD/<name>-<version>.
|
||||
shutil.copytree(ctx.repo_dir, source_root)
|
||||
|
||||
# Create the tarball with the top-level directory <name>-<version>.
|
||||
if os.path.exists(tarball_path):
|
||||
os.remove(tarball_path)
|
||||
|
||||
with tarfile.open(tarball_path, "w:gz") as tar:
|
||||
tar.add(source_root, arcname=f"{name}-{version}")
|
||||
|
||||
print(
|
||||
f"[INFO] Prepared RPM source tarball at '{tarball_path}' "
|
||||
f"from '{ctx.repo_dir}'."
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def supports(self, ctx: RepoContext) -> bool:
|
||||
"""
|
||||
This installer is supported if:
|
||||
@@ -77,26 +190,13 @@ class RpmSpecInstaller(BaseInstaller):
|
||||
By default, rpmbuild outputs RPMs into:
|
||||
~/rpmbuild/RPMS/*/*.rpm
|
||||
"""
|
||||
home = os.path.expanduser("~")
|
||||
pattern = os.path.join(home, "rpmbuild", "RPMS", "**", "*.rpm")
|
||||
topdir = self._rpmbuild_topdir()
|
||||
pattern = os.path.join(topdir, "RPMS", "**", "*.rpm")
|
||||
return sorted(glob.glob(pattern, recursive=True))
|
||||
|
||||
def _install_build_dependencies(self, ctx: RepoContext, spec_path: str) -> None:
|
||||
"""
|
||||
Install build dependencies for the given .spec file.
|
||||
|
||||
Strategy (best-effort):
|
||||
|
||||
1. If dnf is available:
|
||||
sudo dnf builddep -y <spec>
|
||||
2. Else if yum-builddep is available:
|
||||
sudo yum-builddep -y <spec>
|
||||
3. Else if yum is available:
|
||||
sudo yum-builddep -y <spec> # Some systems provide it via yum plugin
|
||||
4. Otherwise: print a warning and skip automatic builddep install.
|
||||
|
||||
Any failure in builddep installation is treated as fatal (SystemExit),
|
||||
consistent with other installer steps.
|
||||
"""
|
||||
spec_basename = os.path.basename(spec_path)
|
||||
|
||||
@@ -105,7 +205,6 @@ class RpmSpecInstaller(BaseInstaller):
|
||||
elif shutil.which("yum-builddep") is not None:
|
||||
cmd = f"sudo yum-builddep -y {spec_basename}"
|
||||
elif shutil.which("yum") is not None:
|
||||
# Some distributions ship yum-builddep as a plugin.
|
||||
cmd = f"sudo yum-builddep -y {spec_basename}"
|
||||
else:
|
||||
print(
|
||||
@@ -114,33 +213,17 @@ class RpmSpecInstaller(BaseInstaller):
|
||||
)
|
||||
return
|
||||
|
||||
# Run builddep in the repository directory so relative spec paths work.
|
||||
run_command(cmd, cwd=ctx.repo_dir, preview=ctx.preview)
|
||||
|
||||
def run(self, ctx: RepoContext) -> None:
|
||||
def _install_built_rpms(self, ctx: RepoContext, rpms: List[str]) -> None:
|
||||
"""
|
||||
Build and install RPM-based packages.
|
||||
Install or upgrade the built RPMs.
|
||||
|
||||
Steps:
|
||||
1. dnf/yum builddep <spec> (automatic build dependency installation)
|
||||
2. rpmbuild -ba path/to/spec
|
||||
3. sudo rpm -i ~/rpmbuild/RPMS/*/*.rpm
|
||||
Strategy:
|
||||
- Prefer dnf install -y <rpms> (handles upgrades cleanly)
|
||||
- Else yum install -y <rpms>
|
||||
- Else fallback to rpm -Uvh <rpms> (upgrade/replace existing)
|
||||
"""
|
||||
spec_path = self._spec_path(ctx)
|
||||
if not spec_path:
|
||||
return
|
||||
|
||||
# 1) Install build dependencies
|
||||
self._install_build_dependencies(ctx, spec_path)
|
||||
|
||||
# 2) Build RPMs
|
||||
# Use the full spec path, but run in the repo directory.
|
||||
spec_basename = os.path.basename(spec_path)
|
||||
build_cmd = f"rpmbuild -ba {spec_basename}"
|
||||
run_command(build_cmd, cwd=ctx.repo_dir, preview=ctx.preview)
|
||||
|
||||
# 3) Find built RPMs
|
||||
rpms = self._find_built_rpms()
|
||||
if not rpms:
|
||||
print(
|
||||
"[Warning] No RPM files found after rpmbuild. "
|
||||
@@ -148,13 +231,52 @@ class RpmSpecInstaller(BaseInstaller):
|
||||
)
|
||||
return
|
||||
|
||||
# 4) Install RPMs
|
||||
if shutil.which("rpm") is None:
|
||||
dnf = shutil.which("dnf")
|
||||
yum = shutil.which("yum")
|
||||
rpm = shutil.which("rpm")
|
||||
|
||||
if dnf is not None:
|
||||
install_cmd = "sudo dnf install -y " + " ".join(rpms)
|
||||
elif yum is not None:
|
||||
install_cmd = "sudo yum install -y " + " ".join(rpms)
|
||||
elif rpm is not None:
|
||||
# Fallback: use rpm in upgrade mode so an existing older
|
||||
# version is replaced instead of causing file conflicts.
|
||||
install_cmd = "sudo rpm -Uvh " + " ".join(rpms)
|
||||
else:
|
||||
print(
|
||||
"[Warning] rpm binary not found on PATH. "
|
||||
"[Warning] No suitable RPM installer (dnf/yum/rpm) found. "
|
||||
"Cannot install built RPMs."
|
||||
)
|
||||
return
|
||||
|
||||
install_cmd = "sudo rpm -i " + " ".join(rpms)
|
||||
run_command(install_cmd, cwd=ctx.repo_dir, preview=ctx.preview)
|
||||
|
||||
def run(self, ctx: RepoContext) -> None:
|
||||
"""
|
||||
Build and install RPM-based packages.
|
||||
|
||||
Steps:
|
||||
1. Prepare source tarball in ~/rpmbuild/SOURCES matching Name/Version
|
||||
2. dnf/yum builddep <spec> (automatic build dependency installation)
|
||||
3. rpmbuild -ba path/to/spec
|
||||
4. Install built RPMs via dnf/yum (or rpm as fallback)
|
||||
"""
|
||||
spec_path = self._spec_path(ctx)
|
||||
if not spec_path:
|
||||
return
|
||||
|
||||
# 1) Prepare source tarball so rpmbuild finds Source0 in SOURCES.
|
||||
self._prepare_source_tarball(ctx, spec_path)
|
||||
|
||||
# 2) Install build dependencies
|
||||
self._install_build_dependencies(ctx, spec_path)
|
||||
|
||||
# 3) Build RPMs
|
||||
spec_basename = os.path.basename(spec_path)
|
||||
build_cmd = f"rpmbuild -ba {spec_basename}"
|
||||
run_command(build_cmd, cwd=ctx.repo_dir, preview=ctx.preview)
|
||||
|
||||
# 4) Find and install built RPMs
|
||||
rpms = self._find_built_rpms()
|
||||
self._install_built_rpms(ctx, rpms)
|
||||
|
||||
@@ -11,15 +11,28 @@ Strategy:
|
||||
3. "pip" from PATH as last resort
|
||||
- If pyproject.toml exists: pip install .
|
||||
|
||||
All installation failures are treated as fatal errors (SystemExit).
|
||||
All installation failures are treated as fatal errors (SystemExit),
|
||||
except when we explicitly skip the installer:
|
||||
|
||||
- If IN_NIX_SHELL is set, we assume Python is managed by Nix and
|
||||
skip this installer entirely.
|
||||
- If PKGMGR_DISABLE_PYTHON_INSTALLER=1 is set, the installer is
|
||||
globally disabled (useful for CI or debugging).
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sys
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from pkgmgr.actions.repository.install.installers.base import BaseInstaller
|
||||
from pkgmgr.core.command.run import run_command
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pkgmgr.actions.repository.install.context import RepoContext
|
||||
from pkgmgr.actions.repository.install import InstallContext
|
||||
|
||||
|
||||
class PythonInstaller(BaseInstaller):
|
||||
"""Install Python projects and dependencies via pip."""
|
||||
@@ -27,19 +40,54 @@ class PythonInstaller(BaseInstaller):
|
||||
# Logical layer name, used by capability matchers.
|
||||
layer = "python"
|
||||
|
||||
def supports(self, ctx) -> bool:
|
||||
def _in_nix_shell(self) -> bool:
|
||||
"""
|
||||
Return True if we appear to be running inside a Nix dev shell.
|
||||
|
||||
Nix sets IN_NIX_SHELL in `nix develop` environments. In that case
|
||||
the Python environment is already provided by Nix, so we must not
|
||||
attempt an additional pip-based installation.
|
||||
"""
|
||||
return bool(os.environ.get("IN_NIX_SHELL"))
|
||||
|
||||
def supports(self, ctx: "RepoContext") -> bool:
|
||||
"""
|
||||
Return True if this installer should handle the given repository.
|
||||
|
||||
Only pyproject.toml is supported as the single source of truth
|
||||
for Python dependencies and packaging metadata.
|
||||
|
||||
The installer is *disabled* when:
|
||||
- IN_NIX_SHELL is set (Python managed by Nix dev shell), or
|
||||
- PKGMGR_DISABLE_PYTHON_INSTALLER=1 is set.
|
||||
"""
|
||||
# 1) Skip in Nix dev shells – Python is managed by the flake/devShell.
|
||||
if self._in_nix_shell():
|
||||
print(
|
||||
"[INFO] IN_NIX_SHELL detected; skipping PythonInstaller. "
|
||||
"Python runtime is provided by the Nix dev shell."
|
||||
)
|
||||
return False
|
||||
|
||||
# 2) Optional global kill-switch.
|
||||
if os.environ.get("PKGMGR_DISABLE_PYTHON_INSTALLER") == "1":
|
||||
print(
|
||||
"[INFO] PKGMGR_DISABLE_PYTHON_INSTALLER=1 – "
|
||||
"PythonInstaller is disabled."
|
||||
)
|
||||
return False
|
||||
|
||||
repo_dir = ctx.repo_dir
|
||||
return os.path.exists(os.path.join(repo_dir, "pyproject.toml"))
|
||||
|
||||
def _pip_cmd(self) -> str:
|
||||
"""
|
||||
Resolve the pip command to use.
|
||||
|
||||
Order:
|
||||
1) PKGMGR_PIP (explicit override)
|
||||
2) sys.executable -m pip
|
||||
3) plain "pip"
|
||||
"""
|
||||
explicit = os.environ.get("PKGMGR_PIP", "").strip()
|
||||
if explicit:
|
||||
@@ -50,12 +98,23 @@ class PythonInstaller(BaseInstaller):
|
||||
|
||||
return "pip"
|
||||
|
||||
def run(self, ctx) -> None:
|
||||
def run(self, ctx: "InstallContext") -> None:
|
||||
"""
|
||||
Install Python project defined via pyproject.toml.
|
||||
|
||||
Any pip failure is propagated as SystemExit.
|
||||
"""
|
||||
# Extra guard in case run() is called directly without supports().
|
||||
if self._in_nix_shell():
|
||||
print(
|
||||
"[INFO] IN_NIX_SHELL detected in PythonInstaller.run(); "
|
||||
"skipping pip-based installation."
|
||||
)
|
||||
return
|
||||
|
||||
if not self.supports(ctx): # type: ignore[arg-type]
|
||||
return
|
||||
|
||||
pip_cmd = self._pip_cmd()
|
||||
|
||||
pyproject = os.path.join(ctx.repo_dir, "pyproject.toml")
|
||||
|
||||
@@ -1,35 +1,57 @@
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from pkgmgr.core.repository.identifier import get_repo_identifier
|
||||
from pkgmgr.core.repository.dir import get_repo_dir
|
||||
from pkgmgr.core.repository.verify import verify_repository
|
||||
|
||||
|
||||
def pull_with_verification(
|
||||
selected_repos,
|
||||
repositories_base_dir,
|
||||
all_repos,
|
||||
extra_args,
|
||||
no_verification,
|
||||
preview:bool):
|
||||
preview: bool,
|
||||
) -> None:
|
||||
"""
|
||||
Executes "git pull" for each repository with verification.
|
||||
|
||||
Uses the verify_repository function in "pull" mode.
|
||||
If verification fails (and verification info is set) and --no-verification is not enabled,
|
||||
the user is prompted to confirm the pull.
|
||||
Execute `git pull` for each repository with verification.
|
||||
|
||||
- Uses verify_repository() in "pull" mode.
|
||||
- If verification fails (and verification info is set) and
|
||||
--no-verification is not enabled, the user is prompted to confirm
|
||||
the pull.
|
||||
- In preview mode, no interactive prompts are performed and no
|
||||
Git commands are executed; only the would-be command is printed.
|
||||
"""
|
||||
for repo in selected_repos:
|
||||
repo_identifier = get_repo_identifier(repo, all_repos)
|
||||
repo_dir = get_repo_dir(repositories_base_dir, repo)
|
||||
|
||||
if not os.path.exists(repo_dir):
|
||||
print(f"Repository directory '{repo_dir}' not found for {repo_identifier}.")
|
||||
continue
|
||||
|
||||
verified_info = repo.get("verified")
|
||||
verified_ok, errors, commit_hash, signing_key = verify_repository(repo, repo_dir, mode="pull", no_verification=no_verification)
|
||||
verified_ok, errors, commit_hash, signing_key = verify_repository(
|
||||
repo,
|
||||
repo_dir,
|
||||
mode="pull",
|
||||
no_verification=no_verification,
|
||||
)
|
||||
|
||||
if not no_verification and verified_info and not verified_ok:
|
||||
# Only prompt the user if:
|
||||
# - we are NOT in preview mode
|
||||
# - verification is enabled
|
||||
# - the repo has verification info configured
|
||||
# - verification failed
|
||||
if (
|
||||
not preview
|
||||
and not no_verification
|
||||
and verified_info
|
||||
and not verified_ok
|
||||
):
|
||||
print(f"Warning: Verification failed for {repo_identifier}:")
|
||||
for err in errors:
|
||||
print(f" - {err}")
|
||||
@@ -37,12 +59,19 @@ def pull_with_verification(
|
||||
if choice != "y":
|
||||
continue
|
||||
|
||||
full_cmd = f"git pull {' '.join(extra_args)}"
|
||||
# Build the git pull command (include extra args if present)
|
||||
args_part = " ".join(extra_args) if extra_args else ""
|
||||
full_cmd = f"git pull{(' ' + args_part) if args_part else ''}"
|
||||
|
||||
if preview:
|
||||
# Preview mode: only show the command, do not execute or prompt.
|
||||
print(f"[Preview] In '{repo_dir}': {full_cmd}")
|
||||
else:
|
||||
print(f"Running in '{repo_dir}': {full_cmd}")
|
||||
result = subprocess.run(full_cmd, cwd=repo_dir, shell=True)
|
||||
if result.returncode != 0:
|
||||
print(f"'git pull' for {repo_identifier} failed with exit code {result.returncode}.")
|
||||
print(
|
||||
f"'git pull' for {repo_identifier} failed "
|
||||
f"with exit code {result.returncode}."
|
||||
)
|
||||
sys.exit(result.returncode)
|
||||
|
||||
@@ -8,6 +8,7 @@ import re
|
||||
from typing import Any, Dict, List, Sequence
|
||||
|
||||
from pkgmgr.core.repository.resolve import resolve_repos
|
||||
from pkgmgr.core.repository.ignored import filter_ignored
|
||||
|
||||
Repository = Dict[str, Any]
|
||||
|
||||
@@ -88,7 +89,7 @@ def _apply_filters(
|
||||
if not _match_pattern(ident_str, string_pattern):
|
||||
continue
|
||||
|
||||
# Category filter: nur echte Kategorien, KEINE Tags
|
||||
# Category filter: only real categories, NOT tags
|
||||
if category_patterns:
|
||||
cats: List[str] = []
|
||||
cats.extend(map(str, repo.get("category_files", [])))
|
||||
@@ -106,7 +107,7 @@ def _apply_filters(
|
||||
if not ok:
|
||||
continue
|
||||
|
||||
# Tag filter: ausschließlich YAML-Tags
|
||||
# Tag filter: YAML tags only
|
||||
if tag_patterns:
|
||||
tags: List[str] = list(map(str, repo.get("tags", [])))
|
||||
if not tags:
|
||||
@@ -124,16 +125,38 @@ def _apply_filters(
|
||||
|
||||
return filtered
|
||||
|
||||
|
||||
def _maybe_filter_ignored(args, repos: List[Repository]) -> List[Repository]:
|
||||
"""
|
||||
Apply ignore filtering unless the caller explicitly opted to include ignored
|
||||
repositories (via args.include_ignored).
|
||||
|
||||
Note: this helper is used only for *implicit* selections (all / filters /
|
||||
by-directory). For *explicit* identifiers we do NOT filter ignored repos,
|
||||
so the user can still target them directly if desired.
|
||||
"""
|
||||
include_ignored: bool = bool(getattr(args, "include_ignored", False))
|
||||
if include_ignored:
|
||||
return repos
|
||||
return filter_ignored(repos)
|
||||
|
||||
|
||||
def get_selected_repos(args, all_repositories: List[Repository]) -> List[Repository]:
|
||||
"""
|
||||
Compute the list of repositories selected by CLI arguments.
|
||||
|
||||
Modes:
|
||||
- If identifiers are given: select via resolve_repos() from all_repositories.
|
||||
- Else if any of --category/--string/--tag is used: start from all_repositories
|
||||
and apply filters.
|
||||
- Else if --all is set: select all_repositories.
|
||||
- Else: try to select the repository of the current working directory.
|
||||
Ignored repositories are *not* filtered here, so explicit identifiers
|
||||
always win.
|
||||
- Else if any of --category/--string/--tag is used: start from
|
||||
all_repositories, apply filters and then drop ignored repos.
|
||||
- Else if --all is set: select all_repositories and then drop ignored repos.
|
||||
- Else: try to select the repository of the current working directory
|
||||
and then drop it if it is ignored.
|
||||
|
||||
The ignore filter can be bypassed by setting args.include_ignored = True
|
||||
(e.g. via a CLI flag --include-ignored).
|
||||
"""
|
||||
identifiers: List[str] = getattr(args, "identifiers", []) or []
|
||||
use_all: bool = bool(getattr(args, "all", False))
|
||||
@@ -143,18 +166,25 @@ def get_selected_repos(args, all_repositories: List[Repository]) -> List[Reposit
|
||||
|
||||
has_filters = bool(category_patterns or string_pattern or tag_patterns)
|
||||
|
||||
# 1) Explicit identifiers win
|
||||
# 1) Explicit identifiers win and bypass ignore filtering
|
||||
if identifiers:
|
||||
base = resolve_repos(identifiers, all_repositories)
|
||||
return _apply_filters(base, string_pattern, category_patterns, tag_patterns)
|
||||
|
||||
# 2) Filter-only mode: start from all repositories
|
||||
if has_filters:
|
||||
return _apply_filters(list(all_repositories), string_pattern, category_patterns, tag_patterns)
|
||||
base = _apply_filters(
|
||||
list(all_repositories),
|
||||
string_pattern,
|
||||
category_patterns,
|
||||
tag_patterns,
|
||||
)
|
||||
return _maybe_filter_ignored(args, base)
|
||||
|
||||
# 3) --all (no filters): all repos
|
||||
if use_all:
|
||||
return list(all_repositories)
|
||||
base = list(all_repositories)
|
||||
return _maybe_filter_ignored(args, base)
|
||||
|
||||
# 4) Fallback: try to select repository of current working directory
|
||||
cwd = os.path.abspath(os.getcwd())
|
||||
@@ -164,7 +194,7 @@ def get_selected_repos(args, all_repositories: List[Repository]) -> List[Reposit
|
||||
if os.path.abspath(str(repo.get("directory", ""))) == cwd
|
||||
]
|
||||
if by_dir:
|
||||
return by_dir
|
||||
return _maybe_filter_ignored(args, by_dir)
|
||||
|
||||
# No specific match -> empty list
|
||||
return []
|
||||
|
||||
@@ -7,7 +7,7 @@ build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "package-manager"
|
||||
version = "0.7.1"
|
||||
version = "0.7.11"
|
||||
description = "Kevin's package-manager tool (pkgmgr)"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.11"
|
||||
|
||||
@@ -2,28 +2,59 @@
|
||||
set -euo pipefail
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Ensure Nix has access to a valid CA bundle (TLS trust store)
|
||||
# Detect and export a valid CA bundle so Nix, Git, curl and Python tooling
|
||||
# can successfully perform HTTPS requests on all distros (Debian, Ubuntu,
|
||||
# Fedora, RHEL, CentOS, etc.)
|
||||
# ---------------------------------------------------------------------------
|
||||
if [[ -z "${NIX_SSL_CERT_FILE:-}" ]]; then
|
||||
if [[ -f /etc/ssl/certs/ca-certificates.crt ]]; then
|
||||
# Debian/Ubuntu-style path
|
||||
export NIX_SSL_CERT_FILE=/etc/ssl/certs/ca-certificates.crt
|
||||
echo "[docker] Using CA bundle: ${NIX_SSL_CERT_FILE}"
|
||||
elif [[ -f /etc/pki/tls/certs/ca-bundle.crt ]]; then
|
||||
# Fedora/RHEL/CentOS-style path
|
||||
export NIX_SSL_CERT_FILE=/etc/pki/tls/certs/ca-bundle.crt
|
||||
echo "[docker] Using CA bundle: ${NIX_SSL_CERT_FILE}"
|
||||
else
|
||||
echo "[docker] WARNING: No CA bundle found for Nix (NIX_SSL_CERT_FILE not set)."
|
||||
echo "[docker] HTTPS access for Nix flakes may fail."
|
||||
fi
|
||||
detect_ca_bundle() {
|
||||
# Common CA bundle locations across major Linux distributions
|
||||
local candidates=(
|
||||
/etc/ssl/certs/ca-certificates.crt # Debian/Ubuntu
|
||||
/etc/ssl/cert.pem # Some distros
|
||||
/etc/pki/tls/certs/ca-bundle.crt # Fedora/RHEL/CentOS
|
||||
/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem # CentOS/RHEL extracted bundle
|
||||
/etc/ssl/ca-bundle.pem # Generic fallback
|
||||
)
|
||||
|
||||
for path in "${candidates[@]}"; do
|
||||
if [[ -f "$path" ]]; then
|
||||
echo "$path"
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
|
||||
return 1
|
||||
}
|
||||
|
||||
# Use existing NIX_SSL_CERT_FILE if provided, otherwise auto-detect
|
||||
CA_BUNDLE="${NIX_SSL_CERT_FILE:-}"
|
||||
|
||||
if [[ -z "${CA_BUNDLE}" ]]; then
|
||||
CA_BUNDLE="$(detect_ca_bundle || true)"
|
||||
fi
|
||||
|
||||
if [[ -n "${CA_BUNDLE}" ]]; then
|
||||
# Export for Nix (critical)
|
||||
export NIX_SSL_CERT_FILE="${CA_BUNDLE}"
|
||||
|
||||
# Export for Git, Python requests, curl, etc.
|
||||
export SSL_CERT_FILE="${CA_BUNDLE}"
|
||||
export REQUESTS_CA_BUNDLE="${CA_BUNDLE}"
|
||||
export GIT_SSL_CAINFO="${CA_BUNDLE}"
|
||||
|
||||
echo "[docker] Using CA bundle: ${CA_BUNDLE}"
|
||||
else
|
||||
echo "[docker] WARNING: No CA certificate bundle found."
|
||||
echo "[docker] HTTPS access for Nix flakes and other tools may fail."
|
||||
fi
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
|
||||
echo "[docker] Starting package-manager container"
|
||||
|
||||
# Distro info for logging
|
||||
# ---------------------------------------------------------------------------
|
||||
# Log distribution info
|
||||
# ---------------------------------------------------------------------------
|
||||
if [[ -f /etc/os-release ]]; then
|
||||
# shellcheck disable=SC1091
|
||||
. /etc/os-release
|
||||
@@ -34,9 +65,9 @@ fi
|
||||
echo "[docker] Using /src as working directory"
|
||||
cd /src
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# DEV mode: build/install package-manager from current /src
|
||||
# ------------------------------------------------------------
|
||||
# ---------------------------------------------------------------------------
|
||||
# DEV mode: rebuild package-manager from the mounted /src tree
|
||||
# ---------------------------------------------------------------------------
|
||||
if [[ "${PKGMGR_DEV:-0}" == "1" ]]; then
|
||||
echo "[docker] DEV mode enabled (PKGMGR_DEV=1)"
|
||||
echo "[docker] Rebuilding package-manager from /src via scripts/installation/run-package.sh..."
|
||||
@@ -49,9 +80,9 @@ if [[ "${PKGMGR_DEV:-0}" == "1" ]]; then
|
||||
fi
|
||||
fi
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# Hand-off to pkgmgr / arbitrary command
|
||||
# ------------------------------------------------------------
|
||||
# ---------------------------------------------------------------------------
|
||||
# Hand off to pkgmgr or arbitrary command
|
||||
# ---------------------------------------------------------------------------
|
||||
if [[ $# -eq 0 ]]; then
|
||||
echo "[docker] No arguments provided. Showing pkgmgr help..."
|
||||
exec pkgmgr --help
|
||||
|
||||
@@ -97,11 +97,32 @@ if [[ "${IN_CONTAINER}" -eq 1 && "${EUID:-0}" -eq 0 ]]; then
|
||||
useradd -m -r -g nixbld -s /usr/bin/bash nix
|
||||
fi
|
||||
|
||||
# Create /nix directory and hand it to nix user (prevents installer sudo prompt)
|
||||
# Ensure /nix exists and is writable by the "nix" user.
|
||||
#
|
||||
# In some base images (or previous runs), /nix may already exist and be
|
||||
# owned by root. In that case the Nix single-user installer will abort with:
|
||||
#
|
||||
# "directory /nix exists, but is not writable by you"
|
||||
#
|
||||
# To keep container runs idempotent and robust, we always enforce
|
||||
# ownership nix:nixbld here.
|
||||
if [[ ! -d /nix ]]; then
|
||||
echo "[init-nix] Creating /nix with owner nix:nixbld..."
|
||||
mkdir -m 0755 /nix
|
||||
chown nix:nixbld /nix
|
||||
else
|
||||
current_owner="$(stat -c '%U' /nix 2>/dev/null || echo '?')"
|
||||
current_group="$(stat -c '%G' /nix 2>/dev/null || echo '?')"
|
||||
if [[ "${current_owner}" != "nix" || "${current_group}" != "nixbld" ]]; then
|
||||
echo "[init-nix] /nix already exists with owner ${current_owner}:${current_group} – fixing to nix:nixbld..."
|
||||
chown -R nix:nixbld /nix
|
||||
else
|
||||
echo "[init-nix] /nix already exists with correct owner nix:nixbld."
|
||||
fi
|
||||
|
||||
if [[ ! -w /nix ]]; then
|
||||
echo "[init-nix] WARNING: /nix is still not writable after chown; Nix installer may fail."
|
||||
fi
|
||||
fi
|
||||
|
||||
# Run Nix single-user installer as "nix"
|
||||
|
||||
@@ -13,6 +13,7 @@ dnf -y install \
|
||||
bash \
|
||||
curl-minimal \
|
||||
ca-certificates \
|
||||
sudo \
|
||||
xz
|
||||
|
||||
dnf clean all
|
||||
|
||||
@@ -19,6 +19,7 @@ for distro in $DISTROS; do
|
||||
# Run the command and capture the output
|
||||
if OUTPUT=$(docker run --rm \
|
||||
-e PKGMGR_DEV=1 \
|
||||
-v pkgmgr_nix_store:/nix \
|
||||
-v "$(pwd):/src" \
|
||||
-v "pkgmgr_nix_cache:/root/.cache/nix" \
|
||||
"$IMAGE" 2>&1); then
|
||||
|
||||
@@ -8,16 +8,12 @@ for distro in $DISTROS; do
|
||||
echo ">>> Running E2E tests: $distro"
|
||||
echo "============================================================"
|
||||
|
||||
MOUNT_NIX=""
|
||||
if [[ "$distro" == "arch" ]]; then
|
||||
MOUNT_NIX="-v pkgmgr_nix_store:/nix"
|
||||
fi
|
||||
|
||||
docker run --rm \
|
||||
-v "$(pwd):/src" \
|
||||
$MOUNT_NIX \
|
||||
-v pkgmgr_nix_store:/nix \
|
||||
-v "pkgmgr_nix_cache:/root/.cache/nix" \
|
||||
-e PKGMGR_DEV=1 \
|
||||
-e TEST_PATTERN="${TEST_PATTERN}" \
|
||||
--workdir /src \
|
||||
--entrypoint bash \
|
||||
"package-manager-test-$distro" \
|
||||
@@ -51,6 +47,6 @@ for distro in $DISTROS; do
|
||||
nix develop .#default --no-write-lock-file -c \
|
||||
python3 -m unittest discover \
|
||||
-s /src/tests/e2e \
|
||||
-p "test_*.py";
|
||||
-p "$TEST_PATTERN";
|
||||
'
|
||||
done
|
||||
|
||||
@@ -7,9 +7,11 @@ echo "============================================================"
|
||||
|
||||
docker run --rm \
|
||||
-v "$(pwd):/src" \
|
||||
-v pkgmgr_nix_store:/nix \
|
||||
-v "pkgmgr_nix_cache:/root/.cache/nix" \
|
||||
--workdir /src \
|
||||
-e PKGMGR_DEV=1 \
|
||||
-e TEST_PATTERN="${TEST_PATTERN}" \
|
||||
--entrypoint bash \
|
||||
"package-manager-test-arch" \
|
||||
-c '
|
||||
@@ -19,5 +21,5 @@ docker run --rm \
|
||||
python -m unittest discover \
|
||||
-s tests/integration \
|
||||
-t /src \
|
||||
-p "test_*.py";
|
||||
-p "$TEST_PATTERN";
|
||||
'
|
||||
|
||||
@@ -8,8 +8,10 @@ echo "============================================================"
|
||||
docker run --rm \
|
||||
-v "$(pwd):/src" \
|
||||
-v "pkgmgr_nix_cache:/root/.cache/nix" \
|
||||
-v pkgmgr_nix_store:/nix \
|
||||
--workdir /src \
|
||||
-e PKGMGR_DEV=1 \
|
||||
-e TEST_PATTERN="${TEST_PATTERN}" \
|
||||
--entrypoint bash \
|
||||
"package-manager-test-arch" \
|
||||
-c '
|
||||
@@ -19,5 +21,5 @@ docker run --rm \
|
||||
python -m unittest discover \
|
||||
-s tests/unit \
|
||||
-t /src \
|
||||
-p "test_*.py";
|
||||
-p "$TEST_PATTERN";
|
||||
'
|
||||
|
||||
@@ -35,8 +35,8 @@ def remove_pkgmgr_from_nix_profile() -> None:
|
||||
prints a descriptive format without an index column inside the container.
|
||||
|
||||
Instead, we directly try to remove possible names:
|
||||
- 'pkgmgr' (the actual name shown in `nix profile list`)
|
||||
- 'package-manager' (the name mentioned in Nix's own error hints)
|
||||
- 'pkgmgr'
|
||||
- 'package-manager'
|
||||
"""
|
||||
for spec in ("pkgmgr", "package-manager"):
|
||||
subprocess.run(
|
||||
@@ -45,18 +45,34 @@ def remove_pkgmgr_from_nix_profile() -> None:
|
||||
)
|
||||
|
||||
|
||||
def configure_git_safe_directory() -> None:
|
||||
"""
|
||||
Configure Git to treat /src as a safe directory.
|
||||
|
||||
Needed because /src is a bind-mounted repository in CI, often owned by a
|
||||
different UID. Modern Git aborts with:
|
||||
'fatal: detected dubious ownership in repository at /src/.git'
|
||||
|
||||
This fix applies ONLY inside this test container.
|
||||
"""
|
||||
try:
|
||||
subprocess.run(
|
||||
["git", "config", "--global", "--add", "safe.directory", "/src"],
|
||||
check=False,
|
||||
)
|
||||
except FileNotFoundError:
|
||||
print("[WARN] git not found – skipping safe.directory configuration")
|
||||
|
||||
|
||||
def pkgmgr_help_debug() -> None:
|
||||
"""
|
||||
Run `pkgmgr --help` after installation *inside an interactive bash shell*,
|
||||
print its output and return code, but never fail the test.
|
||||
|
||||
Reason:
|
||||
- The installer adds venv/alias setup into shell rc files (~/.bashrc, ~/.zshrc)
|
||||
- Those changes are only applied in a new interactive shell session.
|
||||
This ensures the installer’s shell RC changes are actually loaded.
|
||||
"""
|
||||
print("\n--- PKGMGR HELP (after installation, via bash -i) ---")
|
||||
|
||||
# Simulate a fresh interactive bash, so ~/.bashrc gets sourced
|
||||
proc = subprocess.run(
|
||||
["bash", "-i", "-c", "pkgmgr --help"],
|
||||
capture_output=True,
|
||||
@@ -76,29 +92,43 @@ def pkgmgr_help_debug() -> None:
|
||||
print(f"returncode: {proc.returncode}")
|
||||
print("--- END ---\n")
|
||||
|
||||
if proc.returncode != 0:
|
||||
raise AssertionError(f"'pkgmgr --help' failed with exit code {proc.returncode}")
|
||||
|
||||
# Wichtig: Hier KEIN AssertionError mehr – das ist reine Debug-Ausgabe.
|
||||
# Falls du später hart testen willst, kannst du optional:
|
||||
# if proc.returncode != 0:
|
||||
# self.fail("...")
|
||||
# aber aktuell nur Sichtprüfung.
|
||||
|
||||
|
||||
class TestIntegrationInstalPKGMGRShallow(unittest.TestCase):
|
||||
def test_install_pkgmgr_self_install(self) -> None:
|
||||
# Debug before cleanup
|
||||
nix_profile_list_debug("BEFORE CLEANUP")
|
||||
"""
|
||||
End-to-end test that runs "python main.py install pkgmgr ..." inside
|
||||
the test container.
|
||||
|
||||
# Cleanup: aggressively try to drop any pkgmgr/profile entries
|
||||
remove_pkgmgr_from_nix_profile()
|
||||
|
||||
# Debug after cleanup
|
||||
nix_profile_list_debug("AFTER CLEANUP")
|
||||
HOME is isolated to avoid permission problems with Nix & repositories.
|
||||
"""
|
||||
temp_home = "/tmp/pkgmgr-self-install"
|
||||
os.makedirs(temp_home, exist_ok=True)
|
||||
|
||||
original_argv = sys.argv
|
||||
original_environ = os.environ.copy()
|
||||
|
||||
try:
|
||||
# Isolate HOME so that ~ expands to /tmp/pkgmgr-self-install
|
||||
os.environ["HOME"] = temp_home
|
||||
|
||||
# Optional XDG override for a fully isolated environment
|
||||
os.environ.setdefault("XDG_CONFIG_HOME", os.path.join(temp_home, ".config"))
|
||||
os.environ.setdefault("XDG_CACHE_HOME", os.path.join(temp_home, ".cache"))
|
||||
os.environ.setdefault("XDG_DATA_HOME", os.path.join(temp_home, ".local", "share"))
|
||||
|
||||
# 🔧 IMPORTANT FIX: allow Git to access /src safely
|
||||
configure_git_safe_directory()
|
||||
|
||||
# Debug before cleanup
|
||||
nix_profile_list_debug("BEFORE CLEANUP")
|
||||
|
||||
# Cleanup: drop any pkgmgr entries from nix profile
|
||||
remove_pkgmgr_from_nix_profile()
|
||||
|
||||
# Debug after cleanup
|
||||
nix_profile_list_debug("AFTER CLEANUP")
|
||||
|
||||
# Prepare argv for module execution
|
||||
sys.argv = [
|
||||
"python",
|
||||
"install",
|
||||
@@ -107,13 +137,18 @@ class TestIntegrationInstalPKGMGRShallow(unittest.TestCase):
|
||||
"shallow",
|
||||
"--no-verification",
|
||||
]
|
||||
# Führt die Installation via main.py aus
|
||||
|
||||
# Execute installation via main.py
|
||||
runpy.run_module("main", run_name="__main__")
|
||||
|
||||
# Nach erfolgreicher Installation: pkgmgr --help anzeigen (Debug)
|
||||
# Debug: interactive shell test
|
||||
pkgmgr_help_debug()
|
||||
|
||||
finally:
|
||||
# Restore system state
|
||||
sys.argv = original_argv
|
||||
os.environ.clear()
|
||||
os.environ.update(original_environ)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
# tests/unit/pkgmgr/installers/os_packages/test_debian_control.py
|
||||
|
||||
import os
|
||||
import unittest
|
||||
from unittest.mock import patch
|
||||
|
||||
from pkgmgr.actions.repository.install.context import RepoContext
|
||||
from pkgmgr.actions.repository.install.installers.os_packages.debian_control import DebianControlInstaller
|
||||
from pkgmgr.actions.repository.install.installers.os_packages.debian_control import (
|
||||
DebianControlInstaller,
|
||||
)
|
||||
|
||||
|
||||
class TestDebianControlInstaller(unittest.TestCase):
|
||||
@@ -29,14 +28,24 @@ class TestDebianControlInstaller(unittest.TestCase):
|
||||
@patch("os.path.exists", return_value=True)
|
||||
@patch("shutil.which", return_value="/usr/bin/dpkg-buildpackage")
|
||||
def test_supports_true(self, mock_which, mock_exists):
|
||||
"""
|
||||
supports() should return True when dpkg-buildpackage is available
|
||||
and a debian/control file exists in the repository.
|
||||
"""
|
||||
self.assertTrue(self.installer.supports(self.ctx))
|
||||
|
||||
@patch("os.path.exists", return_value=True)
|
||||
@patch("shutil.which", return_value=None)
|
||||
def test_supports_false_without_dpkg_buildpackage(self, mock_which, mock_exists):
|
||||
"""
|
||||
supports() should return False when dpkg-buildpackage is not available,
|
||||
even if a debian/control file exists.
|
||||
"""
|
||||
self.assertFalse(self.installer.supports(self.ctx))
|
||||
|
||||
@patch("pkgmgr.actions.repository.install.installers.os_packages.debian_control.run_command")
|
||||
@patch(
|
||||
"pkgmgr.actions.repository.install.installers.os_packages.debian_control.run_command"
|
||||
)
|
||||
@patch("glob.glob", return_value=["/tmp/package-manager_0.1.1_all.deb"])
|
||||
@patch("os.path.exists", return_value=True)
|
||||
@patch("shutil.which")
|
||||
@@ -47,7 +56,19 @@ class TestDebianControlInstaller(unittest.TestCase):
|
||||
mock_glob,
|
||||
mock_run_command,
|
||||
):
|
||||
# dpkg-buildpackage + apt-get vorhanden
|
||||
"""
|
||||
run() should:
|
||||
|
||||
1. Install build dependencies (apt-get build-dep).
|
||||
2. Build the package using dpkg-buildpackage -b -us -uc.
|
||||
3. Discover built .deb files via glob.
|
||||
4. Install the resulting .deb packages using a suitable tool:
|
||||
- dpkg -i
|
||||
- sudo dpkg -i
|
||||
- or sudo apt-get install -y
|
||||
"""
|
||||
|
||||
# Simulate dpkg-buildpackage and apt-get being available.
|
||||
def which_side_effect(name):
|
||||
if name == "dpkg-buildpackage":
|
||||
return "/usr/bin/dpkg-buildpackage"
|
||||
@@ -64,16 +85,35 @@ class TestDebianControlInstaller(unittest.TestCase):
|
||||
# 1) apt-get update
|
||||
self.assertTrue(any("apt-get update" in cmd for cmd in cmds))
|
||||
|
||||
# 2) apt-get build-dep ./
|
||||
self.assertTrue(any("apt-get build-dep -y ./ " in cmd or
|
||||
"apt-get build-dep -y ./"
|
||||
in cmd for cmd in cmds))
|
||||
# 2) apt-get build-dep -y ./ (with or without trailing space)
|
||||
self.assertTrue(
|
||||
any(
|
||||
"apt-get build-dep -y ./ " in cmd
|
||||
or "apt-get build-dep -y ./"
|
||||
in cmd
|
||||
for cmd in cmds
|
||||
)
|
||||
)
|
||||
|
||||
# 3) dpkg-buildpackage -b -us -uc
|
||||
self.assertTrue(any("dpkg-buildpackage -b -us -uc" in cmd for cmd in cmds))
|
||||
|
||||
# 4) dpkg -i ../*.deb
|
||||
self.assertTrue(any(cmd.startswith("sudo dpkg -i ") for cmd in cmds))
|
||||
# 4) final installation of .deb packages:
|
||||
# accept dpkg -i, sudo dpkg -i, or sudo apt-get install -y
|
||||
has_plain_dpkg_install = any(cmd.startswith("dpkg -i ") for cmd in cmds)
|
||||
has_sudo_dpkg_install = any(cmd.startswith("sudo dpkg -i ") for cmd in cmds)
|
||||
has_apt_install = any(
|
||||
cmd.startswith("sudo apt-get install -y ") for cmd in cmds
|
||||
)
|
||||
|
||||
self.assertTrue(
|
||||
has_plain_dpkg_install or has_sudo_dpkg_install or has_apt_install,
|
||||
msg=(
|
||||
"Expected one of 'dpkg -i', 'sudo dpkg -i' or "
|
||||
"'sudo apt-get install -y', but got commands: "
|
||||
f"{cmds}"
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
# tests/unit/pkgmgr/installers/os_packages/test_rpm_spec.py
|
||||
|
||||
import unittest
|
||||
from unittest.mock import patch
|
||||
|
||||
from pkgmgr.actions.repository.install.context import RepoContext
|
||||
from pkgmgr.actions.repository.install.installers.os_packages.rpm_spec import RpmSpecInstaller
|
||||
from pkgmgr.actions.repository.install.installers.os_packages.rpm_spec import (
|
||||
RpmSpecInstaller,
|
||||
)
|
||||
|
||||
|
||||
class TestRpmSpecInstaller(unittest.TestCase):
|
||||
@@ -28,6 +28,13 @@ class TestRpmSpecInstaller(unittest.TestCase):
|
||||
@patch("glob.glob", return_value=["/tmp/repo/test.spec"])
|
||||
@patch("shutil.which")
|
||||
def test_supports_true(self, mock_which, mock_glob):
|
||||
"""
|
||||
supports() should return True when:
|
||||
- rpmbuild is available, and
|
||||
- at least one of dnf/yum/yum-builddep is available, and
|
||||
- a *.spec file is present in the repo.
|
||||
"""
|
||||
|
||||
def which_side_effect(name):
|
||||
if name == "rpmbuild":
|
||||
return "/usr/bin/rpmbuild"
|
||||
@@ -42,9 +49,14 @@ class TestRpmSpecInstaller(unittest.TestCase):
|
||||
@patch("glob.glob", return_value=[])
|
||||
@patch("shutil.which")
|
||||
def test_supports_false_missing_spec(self, mock_which, mock_glob):
|
||||
"""
|
||||
supports() should return False if no *.spec file is found,
|
||||
even if rpmbuild is present.
|
||||
"""
|
||||
mock_which.return_value = "/usr/bin/rpmbuild"
|
||||
self.assertFalse(self.installer.supports(self.ctx))
|
||||
|
||||
@patch.object(RpmSpecInstaller, "_prepare_source_tarball")
|
||||
@patch("pkgmgr.actions.repository.install.installers.os_packages.rpm_spec.run_command")
|
||||
@patch("glob.glob")
|
||||
@patch("shutil.which")
|
||||
@@ -53,8 +65,20 @@ class TestRpmSpecInstaller(unittest.TestCase):
|
||||
mock_which,
|
||||
mock_glob,
|
||||
mock_run_command,
|
||||
mock_prepare_source_tarball,
|
||||
):
|
||||
# glob.glob wird zweimal benutzt: einmal für *.spec, einmal für gebaute RPMs
|
||||
"""
|
||||
run() should:
|
||||
|
||||
1. Determine the .spec file in the repo.
|
||||
2. Call _prepare_source_tarball() once with ctx and spec path.
|
||||
3. Install build dependencies via dnf/yum-builddep/yum.
|
||||
4. Call rpmbuild -ba <spec>.
|
||||
5. Find built RPMs via glob.
|
||||
6. Install built RPMs via dnf/yum/rpm (here: dnf).
|
||||
"""
|
||||
|
||||
# glob.glob is used twice: once for *.spec, once for built RPMs.
|
||||
def glob_side_effect(pattern, recursive=False):
|
||||
if pattern.endswith("*.spec"):
|
||||
return ["/tmp/repo/package-manager.spec"]
|
||||
@@ -77,16 +101,23 @@ class TestRpmSpecInstaller(unittest.TestCase):
|
||||
|
||||
self.installer.run(self.ctx)
|
||||
|
||||
# _prepare_source_tarball must have been called with the resolved spec path.
|
||||
mock_prepare_source_tarball.assert_called_once_with(
|
||||
self.ctx,
|
||||
"/tmp/repo/package-manager.spec",
|
||||
)
|
||||
|
||||
# Collect all command strings passed to run_command.
|
||||
cmds = [c[0][0] for c in mock_run_command.call_args_list]
|
||||
|
||||
# 1) builddep
|
||||
# 1) build dependencies (dnf builddep)
|
||||
self.assertTrue(any("builddep -y" in cmd for cmd in cmds))
|
||||
|
||||
# 2) rpmbuild -ba
|
||||
# 2) rpmbuild -ba <spec>
|
||||
self.assertTrue(any(cmd.startswith("rpmbuild -ba ") for cmd in cmds))
|
||||
|
||||
# 3) rpm -i …
|
||||
self.assertTrue(any(cmd.startswith("sudo rpm -i ") for cmd in cmds))
|
||||
# 3) installation via dnf: "sudo dnf install -y <rpms>"
|
||||
self.assertTrue(any(cmd.startswith("sudo dnf install -y ") for cmd in cmds))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -28,14 +28,27 @@ class TestNixFlakeInstaller(unittest.TestCase):
|
||||
@patch("shutil.which", return_value="/usr/bin/nix")
|
||||
@patch("os.path.exists", return_value=True)
|
||||
def test_supports_true_when_nix_and_flake_exist(self, mock_exists, mock_which):
|
||||
self.assertTrue(self.installer.supports(self.ctx))
|
||||
"""
|
||||
supports() should return True when:
|
||||
- nix is available,
|
||||
- flake.nix exists in the repo,
|
||||
- and we are not inside a Nix dev shell.
|
||||
"""
|
||||
with patch.dict(os.environ, {"IN_NIX_SHELL": ""}, clear=False):
|
||||
self.assertTrue(self.installer.supports(self.ctx))
|
||||
|
||||
mock_which.assert_called_with("nix")
|
||||
mock_exists.assert_called_with(os.path.join(self.ctx.repo_dir, "flake.nix"))
|
||||
|
||||
@patch("shutil.which", return_value=None)
|
||||
@patch("os.path.exists", return_value=True)
|
||||
def test_supports_false_when_nix_missing(self, mock_exists, mock_which):
|
||||
self.assertFalse(self.installer.supports(self.ctx))
|
||||
"""
|
||||
supports() should return False if nix is not available,
|
||||
even if a flake.nix file exists.
|
||||
"""
|
||||
with patch.dict(os.environ, {"IN_NIX_SHELL": ""}, clear=False):
|
||||
self.assertFalse(self.installer.supports(self.ctx))
|
||||
|
||||
@patch("os.path.exists", return_value=True)
|
||||
@patch("shutil.which", return_value="/usr/bin/nix")
|
||||
@@ -47,10 +60,12 @@ class TestNixFlakeInstaller(unittest.TestCase):
|
||||
mock_exists,
|
||||
):
|
||||
"""
|
||||
Ensure that run():
|
||||
- first tries to remove the old 'package-manager' profile entry
|
||||
- then installs both 'pkgmgr' and 'default' outputs.
|
||||
run() should:
|
||||
|
||||
1. attempt to remove the old 'package-manager' profile entry, and
|
||||
2. install both 'pkgmgr' and 'default' flake outputs.
|
||||
"""
|
||||
|
||||
cmds = []
|
||||
|
||||
def side_effect(cmd, cwd=None, preview=False, *args, **kwargs):
|
||||
@@ -59,18 +74,24 @@ class TestNixFlakeInstaller(unittest.TestCase):
|
||||
|
||||
mock_run_command.side_effect = side_effect
|
||||
|
||||
self.installer.run(self.ctx)
|
||||
# Simulate a normal environment (not inside nix develop, installer enabled).
|
||||
with patch.dict(
|
||||
os.environ,
|
||||
{"IN_NIX_SHELL": "", "PKGMGR_DISABLE_NIX_FLAKE_INSTALLER": ""},
|
||||
clear=False,
|
||||
):
|
||||
self.installer.run(self.ctx)
|
||||
|
||||
remove_cmd = f"nix profile remove {self.installer.PROFILE_NAME} || true"
|
||||
install_pkgmgr_cmd = f"nix profile install {self.ctx.repo_dir}#pkgmgr"
|
||||
install_default_cmd = f"nix profile install {self.ctx.repo_dir}#default"
|
||||
|
||||
# Mindestens diese drei Kommandos müssen aufgerufen worden sein
|
||||
# At least these three commands must have been issued.
|
||||
self.assertIn(remove_cmd, cmds)
|
||||
self.assertIn(install_pkgmgr_cmd, cmds)
|
||||
self.assertIn(install_default_cmd, cmds)
|
||||
|
||||
# Optional: sicherstellen, dass der remove-Aufruf zuerst kam
|
||||
# Optional: ensure the remove call came first.
|
||||
self.assertEqual(cmds[0], remove_cmd)
|
||||
|
||||
@patch("shutil.which", return_value="/usr/bin/nix")
|
||||
@@ -90,8 +111,13 @@ class TestNixFlakeInstaller(unittest.TestCase):
|
||||
|
||||
mock_run_command.side_effect = side_effect
|
||||
|
||||
# Should not raise, SystemExit is swallowed internally.
|
||||
self.installer._ensure_old_profile_removed(self.ctx)
|
||||
with patch.dict(
|
||||
os.environ,
|
||||
{"IN_NIX_SHELL": "", "PKGMGR_DISABLE_NIX_FLAKE_INSTALLER": ""},
|
||||
clear=False,
|
||||
):
|
||||
# Should not raise, SystemExit is swallowed internally.
|
||||
self.installer._ensure_old_profile_removed(self.ctx)
|
||||
|
||||
remove_cmd = f"nix profile remove {self.installer.PROFILE_NAME} || true"
|
||||
mock_run_command.assert_called_with(
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
# tests/unit/pkgmgr/installers/test_python_installer.py
|
||||
|
||||
import os
|
||||
import unittest
|
||||
from unittest.mock import patch
|
||||
@@ -28,18 +26,40 @@ class TestPythonInstaller(unittest.TestCase):
|
||||
|
||||
@patch("os.path.exists", side_effect=lambda path: path.endswith("pyproject.toml"))
|
||||
def test_supports_true_when_pyproject_exists(self, mock_exists):
|
||||
self.assertTrue(self.installer.supports(self.ctx))
|
||||
"""
|
||||
supports() should return True when a pyproject.toml exists in the repo
|
||||
and we are not inside a Nix dev shell.
|
||||
"""
|
||||
with patch.dict(os.environ, {"IN_NIX_SHELL": ""}, clear=False):
|
||||
self.assertTrue(self.installer.supports(self.ctx))
|
||||
|
||||
@patch("os.path.exists", return_value=False)
|
||||
def test_supports_false_when_no_pyproject(self, mock_exists):
|
||||
self.assertFalse(self.installer.supports(self.ctx))
|
||||
"""
|
||||
supports() should return False when no pyproject.toml exists.
|
||||
"""
|
||||
with patch.dict(os.environ, {"IN_NIX_SHELL": ""}, clear=False):
|
||||
self.assertFalse(self.installer.supports(self.ctx))
|
||||
|
||||
@patch("pkgmgr.actions.repository.install.installers.python.run_command")
|
||||
@patch("os.path.exists", side_effect=lambda path: path.endswith("pyproject.toml"))
|
||||
def test_run_installs_project_from_pyproject(self, mock_exists, mock_run_command):
|
||||
self.installer.run(self.ctx)
|
||||
"""
|
||||
run() should invoke pip to install the project from pyproject.toml
|
||||
when we are not inside a Nix dev shell.
|
||||
"""
|
||||
# Simulate a normal environment (not inside nix develop).
|
||||
with patch.dict(os.environ, {"IN_NIX_SHELL": ""}, clear=False):
|
||||
self.installer.run(self.ctx)
|
||||
|
||||
# Ensure run_command was actually called.
|
||||
mock_run_command.assert_called()
|
||||
|
||||
# Extract the command string.
|
||||
cmd = mock_run_command.call_args[0][0]
|
||||
self.assertIn("pip install .", cmd)
|
||||
|
||||
# Ensure the working directory is the repo dir.
|
||||
self.assertEqual(
|
||||
mock_run_command.call_args[1].get("cwd"),
|
||||
self.ctx.repo_dir,
|
||||
|
||||
@@ -13,6 +13,7 @@ from pkgmgr.actions.release.files import (
|
||||
update_spec_version,
|
||||
update_changelog,
|
||||
update_debian_changelog,
|
||||
update_spec_changelog,
|
||||
)
|
||||
|
||||
|
||||
@@ -79,6 +80,21 @@ class TestUpdatePyprojectVersion(unittest.TestCase):
|
||||
|
||||
self.assertNotEqual(cm.exception.code, 0)
|
||||
|
||||
def test_update_pyproject_version_missing_file_is_skipped(self) -> None:
|
||||
"""
|
||||
If pyproject.toml does not exist, the function must not raise
|
||||
and must not create the file. It should simply be skipped.
|
||||
"""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
path = os.path.join(tmpdir, "pyproject.toml")
|
||||
self.assertFalse(os.path.exists(path))
|
||||
|
||||
# Must not raise an exception
|
||||
update_pyproject_version(path, "1.2.3", preview=False)
|
||||
|
||||
# Still no file created
|
||||
self.assertFalse(os.path.exists(path))
|
||||
|
||||
|
||||
class TestUpdateFlakeVersion(unittest.TestCase):
|
||||
def test_update_flake_version_normal(self) -> None:
|
||||
@@ -310,5 +326,94 @@ class TestUpdateDebianChangelog(unittest.TestCase):
|
||||
self.assertEqual(content, original)
|
||||
|
||||
|
||||
class TestUpdateSpecChangelog(unittest.TestCase):
|
||||
def test_update_spec_changelog_inserts_stanza_after_changelog_marker(self) -> None:
|
||||
original = textwrap.dedent(
|
||||
"""
|
||||
Name: package-manager
|
||||
Version: 0.1.0
|
||||
Release: 5%{?dist}
|
||||
|
||||
%description
|
||||
Some description.
|
||||
|
||||
%changelog
|
||||
* Mon Jan 01 2024 Old Maintainer <old@example.com> - 0.1.0-1
|
||||
- Old entry
|
||||
"""
|
||||
).strip() + "\n"
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
path = os.path.join(tmpdir, "package-manager.spec")
|
||||
with open(path, "w", encoding="utf-8") as f:
|
||||
f.write(original)
|
||||
|
||||
with patch.dict(
|
||||
os.environ,
|
||||
{
|
||||
"DEBFULLNAME": "Test Maintainer",
|
||||
"DEBEMAIL": "test@example.com",
|
||||
},
|
||||
clear=False,
|
||||
):
|
||||
update_spec_changelog(
|
||||
spec_path=path,
|
||||
package_name="package-manager",
|
||||
new_version="1.2.3",
|
||||
message="Fedora changelog entry",
|
||||
preview=False,
|
||||
)
|
||||
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
|
||||
# New stanza must appear after the %changelog marker
|
||||
self.assertIn("%changelog", content)
|
||||
self.assertIn("Fedora changelog entry", content)
|
||||
self.assertIn("Test Maintainer <test@example.com>", content)
|
||||
# Old entries must still be present
|
||||
self.assertIn("Old Maintainer <old@example.com>", content)
|
||||
|
||||
def test_update_spec_changelog_preview_does_not_write(self) -> None:
|
||||
original = textwrap.dedent(
|
||||
"""
|
||||
Name: package-manager
|
||||
Version: 0.1.0
|
||||
Release: 5%{?dist}
|
||||
|
||||
%changelog
|
||||
* Mon Jan 01 2024 Old Maintainer <old@example.com> - 0.1.0-1
|
||||
- Old entry
|
||||
"""
|
||||
).strip() + "\n"
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
path = os.path.join(tmpdir, "package-manager.spec")
|
||||
with open(path, "w", encoding="utf-8") as f:
|
||||
f.write(original)
|
||||
|
||||
with patch.dict(
|
||||
os.environ,
|
||||
{
|
||||
"DEBFULLNAME": "Test Maintainer",
|
||||
"DEBEMAIL": "test@example.com",
|
||||
},
|
||||
clear=False,
|
||||
):
|
||||
update_spec_changelog(
|
||||
spec_path=path,
|
||||
package_name="package-manager",
|
||||
new_version="1.2.3",
|
||||
message="Fedora changelog entry",
|
||||
preview=True,
|
||||
)
|
||||
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
|
||||
# In preview mode, the spec file must not change
|
||||
self.assertEqual(content, original)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
||||
@@ -19,6 +19,7 @@ class TestReleaseOrchestration(unittest.TestCase):
|
||||
patch("pkgmgr.actions.release.update_pkgbuild_version") as mock_update_pkgbuild, \
|
||||
patch("pkgmgr.actions.release.update_spec_version") as mock_update_spec, \
|
||||
patch("pkgmgr.actions.release.update_debian_changelog") as mock_update_debian_changelog, \
|
||||
patch("pkgmgr.actions.release.update_spec_changelog") as mock_update_spec_changelog, \
|
||||
patch("pkgmgr.actions.release.run_git_command") as mock_run_git_command, \
|
||||
patch("pkgmgr.actions.release.sync_branch_with_remote") as mock_sync_branch, \
|
||||
patch("pkgmgr.actions.release.update_latest_tag") as mock_update_latest_tag:
|
||||
@@ -48,7 +49,7 @@ class TestReleaseOrchestration(unittest.TestCase):
|
||||
self.assertEqual(args[1], "1.2.4")
|
||||
self.assertEqual(kwargs.get("preview"), False)
|
||||
|
||||
# changelog update
|
||||
# changelog update (Projekt)
|
||||
mock_update_changelog.assert_called_once()
|
||||
args, kwargs = mock_update_changelog.call_args
|
||||
self.assertEqual(args[0], "CHANGELOG.md")
|
||||
@@ -72,6 +73,13 @@ class TestReleaseOrchestration(unittest.TestCase):
|
||||
False,
|
||||
)
|
||||
|
||||
# Fedora / RPM %changelog helper
|
||||
mock_update_spec_changelog.assert_called_once()
|
||||
self.assertEqual(
|
||||
mock_update_spec_changelog.call_args[1].get("preview"),
|
||||
False,
|
||||
)
|
||||
|
||||
# Git operations
|
||||
mock_get_current_branch.assert_called_once()
|
||||
self.assertEqual(mock_get_current_branch.return_value, "develop")
|
||||
@@ -96,6 +104,7 @@ class TestReleaseOrchestration(unittest.TestCase):
|
||||
patch("pkgmgr.actions.release.update_pkgbuild_version") as mock_update_pkgbuild, \
|
||||
patch("pkgmgr.actions.release.update_spec_version") as mock_update_spec, \
|
||||
patch("pkgmgr.actions.release.update_debian_changelog") as mock_update_debian_changelog, \
|
||||
patch("pkgmgr.actions.release.update_spec_changelog") as mock_update_spec_changelog, \
|
||||
patch("pkgmgr.actions.release.run_git_command") as mock_run_git_command, \
|
||||
patch("pkgmgr.actions.release.sync_branch_with_remote") as mock_sync_branch, \
|
||||
patch("pkgmgr.actions.release.update_latest_tag") as mock_update_latest_tag:
|
||||
@@ -129,6 +138,10 @@ class TestReleaseOrchestration(unittest.TestCase):
|
||||
mock_update_debian_changelog.assert_called_once()
|
||||
self.assertTrue(mock_update_debian_changelog.call_args[1].get("preview"))
|
||||
|
||||
# Fedora / RPM spec changelog helper in preview mode
|
||||
mock_update_spec_changelog.assert_called_once()
|
||||
self.assertTrue(mock_update_spec_changelog.call_args[1].get("preview"))
|
||||
|
||||
# In preview mode no real git commands must be executed
|
||||
mock_run_git_command.assert_not_called()
|
||||
|
||||
|
||||
309
tests/unit/pkgmgr/actions/repos/test_pull_with_verification.py
Normal file
309
tests/unit/pkgmgr/actions/repos/test_pull_with_verification.py
Normal file
@@ -0,0 +1,309 @@
|
||||
import io
|
||||
import unittest
|
||||
from unittest.mock import patch, MagicMock
|
||||
|
||||
from pkgmgr.actions.repository.pull import pull_with_verification
|
||||
|
||||
|
||||
class TestPullWithVerification(unittest.TestCase):
|
||||
"""
|
||||
Comprehensive unit tests for pull_with_verification().
|
||||
|
||||
These tests verify:
|
||||
- Preview mode behaviour
|
||||
- Verification logic (prompting, bypassing, skipping)
|
||||
- subprocess.run invocation
|
||||
- Repository directory existence checks
|
||||
- Handling of extra git pull arguments
|
||||
"""
|
||||
|
||||
def _setup_mocks(self, mock_exists, mock_get_repo_id, mock_get_repo_dir,
|
||||
mock_verify, exists=True, verified_ok=True,
|
||||
errors=None, verified_info=True):
|
||||
"""Helper to configure repetitive mock behavior."""
|
||||
repo = {
|
||||
"name": "pkgmgr",
|
||||
"verified": {"gpg_keys": ["ABCDEF"]} if verified_info else None,
|
||||
}
|
||||
mock_exists.return_value = exists
|
||||
mock_get_repo_id.return_value = "pkgmgr"
|
||||
mock_get_repo_dir.return_value = "/fake/base/pkgmgr"
|
||||
mock_verify.return_value = (
|
||||
verified_ok,
|
||||
errors or [],
|
||||
"deadbeef", # commit hash
|
||||
"ABCDEF", # signing key
|
||||
)
|
||||
return repo
|
||||
|
||||
# ---------------------------------------------------------------------
|
||||
@patch("pkgmgr.actions.repository.pull.subprocess.run")
|
||||
@patch("pkgmgr.actions.repository.pull.verify_repository")
|
||||
@patch("pkgmgr.actions.repository.pull.get_repo_dir")
|
||||
@patch("pkgmgr.actions.repository.pull.get_repo_identifier")
|
||||
@patch("pkgmgr.actions.repository.pull.os.path.exists")
|
||||
@patch("builtins.input")
|
||||
def test_preview_mode_non_interactive(
|
||||
self,
|
||||
mock_input,
|
||||
mock_exists,
|
||||
mock_get_repo_id,
|
||||
mock_get_repo_dir,
|
||||
mock_verify,
|
||||
mock_subprocess,
|
||||
):
|
||||
"""
|
||||
Preview mode must NEVER request user input and must NEVER execute git.
|
||||
It must only print the preview command.
|
||||
"""
|
||||
repo = self._setup_mocks(
|
||||
mock_exists,
|
||||
mock_get_repo_id,
|
||||
mock_get_repo_dir,
|
||||
mock_verify,
|
||||
exists=True,
|
||||
verified_ok=False,
|
||||
errors=["bad signature"],
|
||||
verified_info=True,
|
||||
)
|
||||
|
||||
buf = io.StringIO()
|
||||
with patch("sys.stdout", new=buf):
|
||||
pull_with_verification(
|
||||
selected_repos=[repo],
|
||||
repositories_base_dir="/fake/base",
|
||||
all_repos=[repo],
|
||||
extra_args=["--ff-only"],
|
||||
no_verification=False,
|
||||
preview=True,
|
||||
)
|
||||
|
||||
output = buf.getvalue()
|
||||
self.assertIn(
|
||||
"[Preview] In '/fake/base/pkgmgr': git pull --ff-only",
|
||||
output,
|
||||
)
|
||||
|
||||
mock_input.assert_not_called()
|
||||
mock_subprocess.assert_not_called()
|
||||
|
||||
# ---------------------------------------------------------------------
|
||||
@patch("pkgmgr.actions.repository.pull.subprocess.run")
|
||||
@patch("pkgmgr.actions.repository.pull.verify_repository")
|
||||
@patch("pkgmgr.actions.repository.pull.get_repo_dir")
|
||||
@patch("pkgmgr.actions.repository.pull.get_repo_identifier")
|
||||
@patch("pkgmgr.actions.repository.pull.os.path.exists")
|
||||
@patch("builtins.input")
|
||||
def test_verification_failure_user_declines(
|
||||
self,
|
||||
mock_input,
|
||||
mock_exists,
|
||||
mock_get_repo_id,
|
||||
mock_get_repo_dir,
|
||||
mock_verify,
|
||||
mock_subprocess,
|
||||
):
|
||||
"""
|
||||
If verification fails and preview=False, the user is prompted.
|
||||
If the user declines ('n'), no git command is executed.
|
||||
"""
|
||||
repo = self._setup_mocks(
|
||||
mock_exists,
|
||||
mock_get_repo_id,
|
||||
mock_get_repo_dir,
|
||||
mock_verify,
|
||||
verified_ok=False,
|
||||
errors=["signature invalid"],
|
||||
)
|
||||
|
||||
mock_input.return_value = "n"
|
||||
|
||||
buf = io.StringIO()
|
||||
with patch("sys.stdout", new=buf):
|
||||
pull_with_verification(
|
||||
selected_repos=[repo],
|
||||
repositories_base_dir="/fake/base",
|
||||
all_repos=[repo],
|
||||
extra_args=[],
|
||||
no_verification=False,
|
||||
preview=False,
|
||||
)
|
||||
|
||||
mock_input.assert_called_once()
|
||||
mock_subprocess.assert_not_called()
|
||||
|
||||
# ---------------------------------------------------------------------
|
||||
@patch("pkgmgr.actions.repository.pull.subprocess.run")
|
||||
@patch("pkgmgr.actions.repository.pull.verify_repository")
|
||||
@patch("pkgmgr.actions.repository.pull.get_repo_dir")
|
||||
@patch("pkgmgr.actions.repository.pull.get_repo_identifier")
|
||||
@patch("pkgmgr.actions.repository.pull.os.path.exists")
|
||||
@patch("builtins.input")
|
||||
def test_verification_failure_user_accepts_runs_git(
|
||||
self,
|
||||
mock_input,
|
||||
mock_exists,
|
||||
mock_get_repo_id,
|
||||
mock_get_repo_dir,
|
||||
mock_verify,
|
||||
mock_subprocess,
|
||||
):
|
||||
"""
|
||||
If verification fails and the user accepts ('y'),
|
||||
then the git pull should be executed.
|
||||
"""
|
||||
repo = self._setup_mocks(
|
||||
mock_exists,
|
||||
mock_get_repo_id,
|
||||
mock_get_repo_dir,
|
||||
mock_verify,
|
||||
verified_ok=False,
|
||||
errors=["invalid"],
|
||||
)
|
||||
|
||||
mock_input.return_value = "y"
|
||||
mock_subprocess.return_value = MagicMock(returncode=0)
|
||||
|
||||
pull_with_verification(
|
||||
selected_repos=[repo],
|
||||
repositories_base_dir="/fake/base",
|
||||
all_repos=[repo],
|
||||
extra_args=[],
|
||||
no_verification=False,
|
||||
preview=False,
|
||||
)
|
||||
|
||||
mock_subprocess.assert_called_once()
|
||||
mock_input.assert_called_once()
|
||||
|
||||
# ---------------------------------------------------------------------
|
||||
@patch("pkgmgr.actions.repository.pull.subprocess.run")
|
||||
@patch("pkgmgr.actions.repository.pull.verify_repository")
|
||||
@patch("pkgmgr.actions.repository.pull.get_repo_dir")
|
||||
@patch("pkgmgr.actions.repository.pull.get_repo_identifier")
|
||||
@patch("pkgmgr.actions.repository.pull.os.path.exists")
|
||||
@patch("builtins.input")
|
||||
def test_verification_success_no_prompt(
|
||||
self,
|
||||
mock_input,
|
||||
mock_exists,
|
||||
mock_get_repo_id,
|
||||
mock_get_repo_dir,
|
||||
mock_verify,
|
||||
mock_subprocess,
|
||||
):
|
||||
"""
|
||||
If verification is successful, the user should NOT be prompted,
|
||||
and git pull should run immediately.
|
||||
"""
|
||||
repo = self._setup_mocks(
|
||||
mock_exists,
|
||||
mock_get_repo_id,
|
||||
mock_get_repo_dir,
|
||||
mock_verify,
|
||||
verified_ok=True,
|
||||
)
|
||||
|
||||
mock_subprocess.return_value = MagicMock(returncode=0)
|
||||
|
||||
pull_with_verification(
|
||||
selected_repos=[repo],
|
||||
repositories_base_dir="/fake/base",
|
||||
all_repos=[repo],
|
||||
extra_args=["--rebase"],
|
||||
no_verification=False,
|
||||
preview=False,
|
||||
)
|
||||
|
||||
mock_input.assert_not_called()
|
||||
mock_subprocess.assert_called_once()
|
||||
cmd = mock_subprocess.call_args[0][0]
|
||||
self.assertIn("git pull --rebase", cmd)
|
||||
|
||||
# ---------------------------------------------------------------------
|
||||
@patch("pkgmgr.actions.repository.pull.subprocess.run")
|
||||
@patch("pkgmgr.actions.repository.pull.verify_repository")
|
||||
@patch("pkgmgr.actions.repository.pull.get_repo_dir")
|
||||
@patch("pkgmgr.actions.repository.pull.get_repo_identifier")
|
||||
@patch("pkgmgr.actions.repository.pull.os.path.exists")
|
||||
@patch("builtins.input")
|
||||
def test_directory_missing_skips_repo(
|
||||
self,
|
||||
mock_input,
|
||||
mock_exists,
|
||||
mock_get_repo_id,
|
||||
mock_get_repo_dir,
|
||||
mock_verify,
|
||||
mock_subprocess,
|
||||
):
|
||||
"""
|
||||
If the repository directory does not exist, the repo must be skipped
|
||||
silently and no git command executed.
|
||||
"""
|
||||
repo = self._setup_mocks(
|
||||
mock_exists,
|
||||
mock_get_repo_id,
|
||||
mock_get_repo_dir,
|
||||
mock_verify,
|
||||
exists=False,
|
||||
)
|
||||
|
||||
buf = io.StringIO()
|
||||
with patch("sys.stdout", new=buf):
|
||||
pull_with_verification(
|
||||
selected_repos=[repo],
|
||||
repositories_base_dir="/fake/base",
|
||||
all_repos=[repo],
|
||||
extra_args=[],
|
||||
no_verification=False,
|
||||
preview=False,
|
||||
)
|
||||
|
||||
output = buf.getvalue()
|
||||
self.assertIn("not found", output)
|
||||
|
||||
mock_input.assert_not_called()
|
||||
mock_subprocess.assert_not_called()
|
||||
|
||||
# ---------------------------------------------------------------------
|
||||
@patch("pkgmgr.actions.repository.pull.subprocess.run")
|
||||
@patch("pkgmgr.actions.repository.pull.verify_repository")
|
||||
@patch("pkgmgr.actions.repository.pull.get_repo_dir")
|
||||
@patch("pkgmgr.actions.repository.pull.get_repo_identifier")
|
||||
@patch("pkgmgr.actions.repository.pull.os.path.exists")
|
||||
@patch("builtins.input")
|
||||
def test_no_verification_flag_skips_prompt(
|
||||
self,
|
||||
mock_input,
|
||||
mock_exists,
|
||||
mock_get_repo_id,
|
||||
mock_get_repo_dir,
|
||||
mock_verify,
|
||||
mock_subprocess,
|
||||
):
|
||||
"""
|
||||
If no_verification=True, verification failures must NOT prompt.
|
||||
Git pull should run directly.
|
||||
"""
|
||||
repo = self._setup_mocks(
|
||||
mock_exists,
|
||||
mock_get_repo_id,
|
||||
mock_get_repo_dir,
|
||||
mock_verify,
|
||||
verified_ok=False,
|
||||
errors=["invalid"],
|
||||
)
|
||||
|
||||
mock_subprocess.return_value = MagicMock(returncode=0)
|
||||
|
||||
pull_with_verification(
|
||||
selected_repos=[repo],
|
||||
repositories_base_dir="/fake/base",
|
||||
all_repos=[repo],
|
||||
extra_args=[],
|
||||
no_verification=True,
|
||||
preview=False,
|
||||
)
|
||||
|
||||
mock_input.assert_not_called()
|
||||
mock_subprocess.assert_called_once()
|
||||
@@ -1,7 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import unittest
|
||||
from types import SimpleNamespace
|
||||
from unittest.mock import patch
|
||||
|
||||
from pkgmgr.actions.branch import open_branch
|
||||
@@ -13,9 +12,10 @@ class TestOpenBranch(unittest.TestCase):
|
||||
def test_open_branch_with_explicit_name_and_default_base(self, mock_run_git) -> None:
|
||||
"""
|
||||
open_branch(name, base='main') should:
|
||||
- resolve base branch (prefers 'main', falls back to 'master')
|
||||
- fetch origin
|
||||
- checkout base
|
||||
- pull base
|
||||
- checkout resolved base
|
||||
- pull resolved base
|
||||
- create new branch
|
||||
- push with upstream
|
||||
"""
|
||||
@@ -25,6 +25,7 @@ class TestOpenBranch(unittest.TestCase):
|
||||
|
||||
# We expect a specific sequence of Git calls.
|
||||
expected_calls = [
|
||||
(["rev-parse", "--verify", "main"], "/repo"),
|
||||
(["fetch", "origin"], "/repo"),
|
||||
(["checkout", "main"], "/repo"),
|
||||
(["pull", "origin", "main"], "/repo"),
|
||||
@@ -50,7 +51,7 @@ class TestOpenBranch(unittest.TestCase):
|
||||
) -> None:
|
||||
"""
|
||||
If name is None/empty, open_branch should prompt via input()
|
||||
and still perform the full Git sequence.
|
||||
and still perform the full Git sequence on the resolved base.
|
||||
"""
|
||||
mock_run_git.return_value = ""
|
||||
|
||||
@@ -60,6 +61,7 @@ class TestOpenBranch(unittest.TestCase):
|
||||
mock_input.assert_called_once()
|
||||
|
||||
expected_calls = [
|
||||
(["rev-parse", "--verify", "develop"], "/repo"),
|
||||
(["fetch", "origin"], "/repo"),
|
||||
(["checkout", "develop"], "/repo"),
|
||||
(["pull", "origin", "develop"], "/repo"),
|
||||
@@ -76,15 +78,20 @@ class TestOpenBranch(unittest.TestCase):
|
||||
self.assertEqual(kwargs.get("cwd"), cwd_expected)
|
||||
|
||||
@patch("pkgmgr.actions.branch.run_git")
|
||||
def test_open_branch_raises_runtimeerror_on_git_failure(self, mock_run_git) -> None:
|
||||
def test_open_branch_raises_runtimeerror_on_fetch_failure(self, mock_run_git) -> None:
|
||||
"""
|
||||
If a GitError occurs (e.g. fetch fails), open_branch should
|
||||
raise a RuntimeError with a helpful message.
|
||||
If a GitError occurs on fetch, open_branch should raise a RuntimeError
|
||||
with a helpful message.
|
||||
"""
|
||||
|
||||
def side_effect(args, cwd="."):
|
||||
# Simulate a failure on the first call (fetch)
|
||||
raise GitError("simulated fetch failure")
|
||||
# First call: base resolution (rev-parse) should succeed
|
||||
if args == ["rev-parse", "--verify", "main"]:
|
||||
return ""
|
||||
# Second call: fetch should fail
|
||||
if args == ["fetch", "origin"]:
|
||||
raise GitError("simulated fetch failure")
|
||||
return ""
|
||||
|
||||
mock_run_git.side_effect = side_effect
|
||||
|
||||
@@ -95,6 +102,45 @@ class TestOpenBranch(unittest.TestCase):
|
||||
self.assertIn("Failed to fetch from origin", msg)
|
||||
self.assertIn("simulated fetch failure", msg)
|
||||
|
||||
@patch("pkgmgr.actions.branch.run_git")
|
||||
def test_open_branch_uses_fallback_master_if_main_missing(self, mock_run_git) -> None:
|
||||
"""
|
||||
If the preferred base (e.g. 'main') does not exist, open_branch should
|
||||
fall back to the fallback base (default: 'master').
|
||||
"""
|
||||
|
||||
def side_effect(args, cwd="."):
|
||||
# First: rev-parse main -> fails
|
||||
if args == ["rev-parse", "--verify", "main"]:
|
||||
raise GitError("main does not exist")
|
||||
# Second: rev-parse master -> succeeds
|
||||
if args == ["rev-parse", "--verify", "master"]:
|
||||
return ""
|
||||
# Then normal flow on master
|
||||
return ""
|
||||
|
||||
mock_run_git.side_effect = side_effect
|
||||
|
||||
open_branch(name="feature/fallback", base_branch="main", cwd="/repo")
|
||||
|
||||
expected_calls = [
|
||||
(["rev-parse", "--verify", "main"], "/repo"),
|
||||
(["rev-parse", "--verify", "master"], "/repo"),
|
||||
(["fetch", "origin"], "/repo"),
|
||||
(["checkout", "master"], "/repo"),
|
||||
(["pull", "origin", "master"], "/repo"),
|
||||
(["checkout", "-b", "feature/fallback"], "/repo"),
|
||||
(["push", "-u", "origin", "feature/fallback"], "/repo"),
|
||||
]
|
||||
|
||||
self.assertEqual(mock_run_git.call_count, len(expected_calls))
|
||||
for call, (args_expected, cwd_expected) in zip(
|
||||
mock_run_git.call_args_list, expected_calls
|
||||
):
|
||||
args, kwargs = call
|
||||
self.assertEqual(args[0], args_expected)
|
||||
self.assertEqual(kwargs.get("cwd"), cwd_expected)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
||||
29
tests/unit/pkgmgr/core/repository/test_ignored.py
Normal file
29
tests/unit/pkgmgr/core/repository/test_ignored.py
Normal file
@@ -0,0 +1,29 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import unittest
|
||||
|
||||
from pkgmgr.core.repository.ignored import filter_ignored
|
||||
|
||||
|
||||
class TestFilterIgnored(unittest.TestCase):
|
||||
def test_filter_ignored_removes_repos_with_ignore_true(self) -> None:
|
||||
repos = [
|
||||
{"provider": "github.com", "account": "user", "repository": "a", "ignore": True},
|
||||
{"provider": "github.com", "account": "user", "repository": "b", "ignore": False},
|
||||
{"provider": "github.com", "account": "user", "repository": "c"},
|
||||
]
|
||||
|
||||
result = filter_ignored(repos)
|
||||
|
||||
identifiers = {(r["provider"], r["account"], r["repository"]) for r in result}
|
||||
self.assertNotIn(("github.com", "user", "a"), identifiers)
|
||||
self.assertIn(("github.com", "user", "b"), identifiers)
|
||||
self.assertIn(("github.com", "user", "c"), identifiers)
|
||||
|
||||
def test_filter_ignored_empty_list_returns_empty_list(self) -> None:
|
||||
self.assertEqual(filter_ignored([]), [])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
180
tests/unit/pkgmgr/core/repository/test_selected.py
Normal file
180
tests/unit/pkgmgr/core/repository/test_selected.py
Normal file
@@ -0,0 +1,180 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
import unittest
|
||||
from types import SimpleNamespace
|
||||
from unittest.mock import patch
|
||||
|
||||
from pkgmgr.core.repository.selected import get_selected_repos
|
||||
|
||||
|
||||
def _repo(
|
||||
provider: str,
|
||||
account: str,
|
||||
repository: str,
|
||||
ignore: bool | None = None,
|
||||
directory: str | None = None,
|
||||
):
|
||||
repo = {
|
||||
"provider": provider,
|
||||
"account": account,
|
||||
"repository": repository,
|
||||
}
|
||||
if ignore is not None:
|
||||
repo["ignore"] = ignore
|
||||
if directory is not None:
|
||||
repo["directory"] = directory
|
||||
return repo
|
||||
|
||||
|
||||
class TestGetSelectedRepos(unittest.TestCase):
|
||||
def setUp(self) -> None:
|
||||
self.repo_ignored = _repo(
|
||||
"github.com",
|
||||
"user",
|
||||
"ignored-repo",
|
||||
ignore=True,
|
||||
directory="/repos/github.com/user/ignored-repo",
|
||||
)
|
||||
self.repo_visible = _repo(
|
||||
"github.com",
|
||||
"user",
|
||||
"visible-repo",
|
||||
ignore=False,
|
||||
directory="/repos/github.com/user/visible-repo",
|
||||
)
|
||||
self.all_repos = [self.repo_ignored, self.repo_visible]
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 1) Explizite Identifier – ignorierte Repos dürfen ausgewählt werden
|
||||
# ------------------------------------------------------------------
|
||||
def test_identifiers_bypass_ignore_filter(self) -> None:
|
||||
args = SimpleNamespace(
|
||||
identifiers=["ignored-repo"], # matches by repository name
|
||||
all=False,
|
||||
category=[],
|
||||
string="",
|
||||
tag=[],
|
||||
include_ignored=False, # should be ignored for explicit identifiers
|
||||
)
|
||||
|
||||
selected = get_selected_repos(args, self.all_repos)
|
||||
|
||||
self.assertEqual(len(selected), 1)
|
||||
self.assertIs(selected[0], self.repo_ignored)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 2) Filter-only Modus – ignorierte Repos werden rausgefiltert
|
||||
# ------------------------------------------------------------------
|
||||
def test_filter_mode_excludes_ignored_by_default(self) -> None:
|
||||
# string-Filter, der beide Repos matchen würde
|
||||
args = SimpleNamespace(
|
||||
identifiers=[],
|
||||
all=False,
|
||||
category=[],
|
||||
string="repo", # substring in beiden Namen
|
||||
tag=[],
|
||||
include_ignored=False,
|
||||
)
|
||||
|
||||
selected = get_selected_repos(args, self.all_repos)
|
||||
|
||||
self.assertEqual(len(selected), 1)
|
||||
self.assertIs(selected[0], self.repo_visible)
|
||||
|
||||
def test_filter_mode_can_include_ignored_when_flag_set(self) -> None:
|
||||
args = SimpleNamespace(
|
||||
identifiers=[],
|
||||
all=False,
|
||||
category=[],
|
||||
string="repo",
|
||||
tag=[],
|
||||
include_ignored=True,
|
||||
)
|
||||
|
||||
selected = get_selected_repos(args, self.all_repos)
|
||||
|
||||
# Beide Repos sollten erscheinen, weil include_ignored=True
|
||||
self.assertEqual({r["repository"] for r in selected}, {"ignored-repo", "visible-repo"})
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 3) --all Modus – ignorierte Repos werden per Default entfernt
|
||||
# ------------------------------------------------------------------
|
||||
def test_all_mode_excludes_ignored_by_default(self) -> None:
|
||||
args = SimpleNamespace(
|
||||
identifiers=[],
|
||||
all=True,
|
||||
category=[],
|
||||
string="",
|
||||
tag=[],
|
||||
include_ignored=False,
|
||||
)
|
||||
|
||||
selected = get_selected_repos(args, self.all_repos)
|
||||
|
||||
self.assertEqual(len(selected), 1)
|
||||
self.assertIs(selected[0], self.repo_visible)
|
||||
|
||||
def test_all_mode_can_include_ignored_when_flag_set(self) -> None:
|
||||
args = SimpleNamespace(
|
||||
identifiers=[],
|
||||
all=True,
|
||||
category=[],
|
||||
string="",
|
||||
tag=[],
|
||||
include_ignored=True,
|
||||
)
|
||||
|
||||
selected = get_selected_repos(args, self.all_repos)
|
||||
|
||||
self.assertEqual(len(selected), 2)
|
||||
self.assertCountEqual(
|
||||
[r["repository"] for r in selected],
|
||||
["ignored-repo", "visible-repo"],
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 4) CWD-Modus – Repo anhand des aktuellen Verzeichnisses auswählen
|
||||
# ------------------------------------------------------------------
|
||||
def test_cwd_selection_excludes_ignored_by_default(self) -> None:
|
||||
# Wir lassen CWD auf das Verzeichnis des ignorierten Repos zeigen.
|
||||
cwd = os.path.abspath(self.repo_ignored["directory"])
|
||||
|
||||
args = SimpleNamespace(
|
||||
identifiers=[],
|
||||
all=False,
|
||||
category=[],
|
||||
string="",
|
||||
tag=[],
|
||||
include_ignored=False,
|
||||
)
|
||||
|
||||
with patch("os.getcwd", return_value=cwd):
|
||||
selected = get_selected_repos(args, self.all_repos)
|
||||
|
||||
# Da das einzige Repo für dieses Verzeichnis ignoriert ist,
|
||||
# sollte die Auswahl leer sein.
|
||||
self.assertEqual(selected, [])
|
||||
|
||||
def test_cwd_selection_can_include_ignored_when_flag_set(self) -> None:
|
||||
cwd = os.path.abspath(self.repo_ignored["directory"])
|
||||
|
||||
args = SimpleNamespace(
|
||||
identifiers=[],
|
||||
all=False,
|
||||
category=[],
|
||||
string="",
|
||||
tag=[],
|
||||
include_ignored=True,
|
||||
)
|
||||
|
||||
with patch("os.getcwd", return_value=cwd):
|
||||
selected = get_selected_repos(args, self.all_repos)
|
||||
|
||||
self.assertEqual(len(selected), 1)
|
||||
self.assertIs(selected[0], self.repo_ignored)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
Reference in New Issue
Block a user