Compare commits
24 Commits
0119af330f
...
latest
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c5c84704db | ||
|
|
c46df92953 | ||
|
|
997c265cfb | ||
|
|
955028288f | ||
|
|
866572e252 | ||
|
|
b0a733369e | ||
|
|
c5843ccd30 | ||
|
|
3cb7852cb4 | ||
|
|
f995e3d368 | ||
|
|
ffa9d9660a | ||
|
|
be70dd4239 | ||
|
|
74876e2e15 | ||
|
|
54058c7f4d | ||
|
|
8583fdf172 | ||
|
|
374f4ed745 | ||
|
|
63e1b3d145 | ||
|
|
2f89de1ff5 | ||
|
|
019aa4b0d9 | ||
|
|
9c22c7dbb4 | ||
|
|
f83e192e37 | ||
|
|
486863eb58 | ||
|
|
bb23bd94f2 | ||
|
|
2a66c082eb | ||
|
|
ee9d7758ed |
8
.github/workflows/test-virgin-root.yml
vendored
8
.github/workflows/test-virgin-root.yml
vendored
@@ -31,15 +31,15 @@ jobs:
|
||||
set -euo pipefail
|
||||
|
||||
docker run --rm \
|
||||
-v "$PWD":/src \
|
||||
-v "$PWD":/opt/src/pkgmgr \
|
||||
-v pkgmgr_repos:/root/Repositories \
|
||||
-v pkgmgr_pip_cache:/root/.cache/pip \
|
||||
-w /src \
|
||||
-w /opt/src/pkgmgr \
|
||||
"pkgmgr-${{ matrix.distro }}-virgin" \
|
||||
bash -lc '
|
||||
set -euo pipefail
|
||||
|
||||
git config --global --add safe.directory /src
|
||||
git config --global --add safe.directory /opt/src/pkgmgr
|
||||
|
||||
make install
|
||||
make setup
|
||||
@@ -50,5 +50,5 @@ jobs:
|
||||
pkgmgr version pkgmgr
|
||||
|
||||
echo ">>> Running Nix-based: nix run .#pkgmgr -- version pkgmgr"
|
||||
nix run /src#pkgmgr -- version pkgmgr
|
||||
nix run /opt/src/pkgmgr#pkgmgr -- version pkgmgr
|
||||
'
|
||||
|
||||
10
.github/workflows/test-virgin-user.yml
vendored
10
.github/workflows/test-virgin-user.yml
vendored
@@ -31,8 +31,8 @@ jobs:
|
||||
set -euo pipefail
|
||||
|
||||
docker run --rm \
|
||||
-v "$PWD":/src \
|
||||
-w /src \
|
||||
-v "$PWD":/opt/src/pkgmgr \
|
||||
-w /opt/src/pkgmgr \
|
||||
"pkgmgr-${{ matrix.distro }}-virgin" \
|
||||
bash -lc '
|
||||
set -euo pipefail
|
||||
@@ -42,7 +42,7 @@ jobs:
|
||||
useradd -m dev
|
||||
echo "dev ALL=(ALL) NOPASSWD: ALL" > /etc/sudoers.d/dev
|
||||
chmod 0440 /etc/sudoers.d/dev
|
||||
chown -R dev:dev /src
|
||||
chown -R dev:dev /opt/src/pkgmgr
|
||||
|
||||
mkdir -p /nix/store /nix/var/nix /nix/var/log/nix /nix/var/nix/profiles
|
||||
chown -R dev:dev /nix
|
||||
@@ -51,7 +51,7 @@ jobs:
|
||||
|
||||
sudo -H -u dev env HOME=/home/dev PKGMGR_DISABLE_NIX_FLAKE_INSTALLER=1 bash -lc "
|
||||
set -euo pipefail
|
||||
cd /src
|
||||
cd /opt/src/pkgmgr
|
||||
|
||||
make setup-venv
|
||||
. \"\$HOME/.venvs/pkgmgr/bin/activate\"
|
||||
@@ -59,6 +59,6 @@ jobs:
|
||||
pkgmgr version pkgmgr
|
||||
|
||||
export NIX_REMOTE=local
|
||||
nix run /src#pkgmgr -- version pkgmgr
|
||||
nix run /opt/src/pkgmgr#pkgmgr -- version pkgmgr
|
||||
"
|
||||
'
|
||||
|
||||
35
CHANGELOG.md
35
CHANGELOG.md
@@ -1,3 +1,38 @@
|
||||
## [1.8.5] - 2025-12-17
|
||||
|
||||
* * Clearer Git error handling, especially when a directory is not a Git repository.
|
||||
* More reliable repository verification with improved commit and GPG signature checks.
|
||||
* Better error messages and overall robustness when working with Git-based workflows.
|
||||
|
||||
|
||||
## [1.9.0] - 2025-12-17
|
||||
|
||||
* Automated release.
|
||||
|
||||
|
||||
## [1.8.4] - 2025-12-17
|
||||
|
||||
* * Made pkgmgr’s base-layer role explicit by standardizing the Docker/CI mount path to *`/opt/src/pkgmgr`*.
|
||||
|
||||
|
||||
## [1.8.3] - 2025-12-16
|
||||
|
||||
* MIRRORS now supports plain URL entries, ensuring metadata-only sources like PyPI are recorded without ever being added to the Git configuration.
|
||||
|
||||
|
||||
## [1.8.2] - 2025-12-16
|
||||
|
||||
* * ***pkgmgr tools code*** is more robust and predictable: it now fails early with clear errors if VS Code is not installed or a repository is not yet identified.
|
||||
|
||||
|
||||
## [1.8.1] - 2025-12-16
|
||||
|
||||
* * Improved stability and consistency of all Git operations (clone, pull, push, release, branch handling) with clearer error messages and predictable preview behavior.
|
||||
* Mirrors are now handled cleanly: only valid Git remotes are used for Git operations, while non-Git URLs (e.g. PyPI) are excluded, preventing broken or confusing repository configs.
|
||||
* GitHub authentication is more robust: tokens are automatically resolved via the GitHub CLI (`gh`), invalid stored tokens are replaced, and interactive prompts occur only when necessary.
|
||||
* Repository creation and release workflows are more reliable, producing cleaner Git configurations and more predictable version handling.
|
||||
|
||||
|
||||
## [1.8.0] - 2025-12-15
|
||||
|
||||
* *** New Features: ***
|
||||
|
||||
@@ -50,6 +50,6 @@ RUN set -euo pipefail; \
|
||||
# Entry point
|
||||
COPY scripts/docker/entry.sh /usr/local/bin/docker-entry.sh
|
||||
|
||||
WORKDIR /src
|
||||
WORKDIR /opt/src/pkgmgr
|
||||
ENTRYPOINT ["/usr/local/bin/docker-entry.sh"]
|
||||
CMD ["pkgmgr", "--help"]
|
||||
|
||||
@@ -32,7 +32,7 @@
|
||||
rec {
|
||||
pkgmgr = pyPkgs.buildPythonApplication {
|
||||
pname = "package-manager";
|
||||
version = "1.8.0";
|
||||
version = "1.8.5";
|
||||
|
||||
# Use the git repo as source
|
||||
src = ./.;
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Maintainer: Kevin Veen-Birkenbach <info@veen.world>
|
||||
|
||||
pkgname=package-manager
|
||||
pkgver=1.8.0
|
||||
pkgver=1.8.5
|
||||
pkgrel=1
|
||||
pkgdesc="Local-flake wrapper for Kevin's package-manager (Nix-based)."
|
||||
arch=('any')
|
||||
|
||||
@@ -1,3 +1,44 @@
|
||||
package-manager (1.8.5-1) unstable; urgency=medium
|
||||
|
||||
* * Clearer Git error handling, especially when a directory is not a Git repository.
|
||||
* More reliable repository verification with improved commit and GPG signature checks.
|
||||
* Better error messages and overall robustness when working with Git-based workflows.
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Wed, 17 Dec 2025 22:15:48 +0100
|
||||
|
||||
package-manager (1.9.0-1) unstable; urgency=medium
|
||||
|
||||
* Automated release.
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Wed, 17 Dec 2025 22:10:31 +0100
|
||||
|
||||
package-manager (1.8.4-1) unstable; urgency=medium
|
||||
|
||||
* * Made pkgmgr’s base-layer role explicit by standardizing the Docker/CI mount path to *`/opt/src/pkgmgr`*.
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Wed, 17 Dec 2025 11:20:16 +0100
|
||||
|
||||
package-manager (1.8.3-1) unstable; urgency=medium
|
||||
|
||||
* MIRRORS now supports plain URL entries, ensuring metadata-only sources like PyPI are recorded without ever being added to the Git configuration.
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 16 Dec 2025 19:49:51 +0100
|
||||
|
||||
package-manager (1.8.2-1) unstable; urgency=medium
|
||||
|
||||
* * ***pkgmgr tools code*** is more robust and predictable: it now fails early with clear errors if VS Code is not installed or a repository is not yet identified.
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 16 Dec 2025 19:22:41 +0100
|
||||
|
||||
package-manager (1.8.1-1) unstable; urgency=medium
|
||||
|
||||
* * Improved stability and consistency of all Git operations (clone, pull, push, release, branch handling) with clearer error messages and predictable preview behavior.
|
||||
* Mirrors are now handled cleanly: only valid Git remotes are used for Git operations, while non-Git URLs (e.g. PyPI) are excluded, preventing broken or confusing repository configs.
|
||||
* GitHub authentication is more robust: tokens are automatically resolved via the GitHub CLI (`gh`), invalid stored tokens are replaced, and interactive prompts occur only when necessary.
|
||||
* Repository creation and release workflows are more reliable, producing cleaner Git configurations and more predictable version handling.
|
||||
|
||||
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 16 Dec 2025 18:06:35 +0100
|
||||
|
||||
package-manager (1.8.0-1) unstable; urgency=medium
|
||||
|
||||
* *** New Features: ***
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
Name: package-manager
|
||||
Version: 1.8.0
|
||||
Version: 1.8.5
|
||||
Release: 1%{?dist}
|
||||
Summary: Wrapper that runs Kevin's package-manager via Nix flake
|
||||
|
||||
@@ -74,6 +74,29 @@ echo ">>> package-manager removed. Nix itself was not removed."
|
||||
/usr/lib/package-manager/
|
||||
|
||||
%changelog
|
||||
* Wed Dec 17 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.8.5-1
|
||||
- * Clearer Git error handling, especially when a directory is not a Git repository.
|
||||
* More reliable repository verification with improved commit and GPG signature checks.
|
||||
* Better error messages and overall robustness when working with Git-based workflows.
|
||||
|
||||
* Wed Dec 17 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.9.0-1
|
||||
- Automated release.
|
||||
|
||||
* Wed Dec 17 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.8.4-1
|
||||
- * Made pkgmgr’s base-layer role explicit by standardizing the Docker/CI mount path to *`/opt/src/pkgmgr`*.
|
||||
|
||||
* Tue Dec 16 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.8.3-1
|
||||
- MIRRORS now supports plain URL entries, ensuring metadata-only sources like PyPI are recorded without ever being added to the Git configuration.
|
||||
|
||||
* Tue Dec 16 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.8.2-1
|
||||
- * ***pkgmgr tools code*** is more robust and predictable: it now fails early with clear errors if VS Code is not installed or a repository is not yet identified.
|
||||
|
||||
* Tue Dec 16 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.8.1-1
|
||||
- * Improved stability and consistency of all Git operations (clone, pull, push, release, branch handling) with clearer error messages and predictable preview behavior.
|
||||
* Mirrors are now handled cleanly: only valid Git remotes are used for Git operations, while non-Git URLs (e.g. PyPI) are excluded, preventing broken or confusing repository configs.
|
||||
* GitHub authentication is more robust: tokens are automatically resolved via the GitHub CLI (`gh`), invalid stored tokens are replaced, and interactive prompts occur only when necessary.
|
||||
* Repository creation and release workflows are more reliable, producing cleaner Git configurations and more predictable version handling.
|
||||
|
||||
* Mon Dec 15 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.8.0-1
|
||||
- *** New Features: ***
|
||||
- **Silent Updates**: You can now use the `--silent` flag during installs and updates to suppress error messages for individual repositories and get a single summary at the end. This ensures the process continues even if some repositories fail, while still preserving interactive checks when not in silent mode.
|
||||
|
||||
@@ -7,7 +7,7 @@ build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "kpmx"
|
||||
version = "1.8.0"
|
||||
version = "1.8.5"
|
||||
description = "Kevin's package-manager tool (pkgmgr)"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.9"
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
echo "[docker] Starting package-manager container"
|
||||
echo "[docker-pkgmgr] Starting package-manager container"
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Log distribution info
|
||||
@@ -9,19 +9,19 @@ echo "[docker] Starting package-manager container"
|
||||
if [[ -f /etc/os-release ]]; then
|
||||
# shellcheck disable=SC1091
|
||||
. /etc/os-release
|
||||
echo "[docker] Detected distro: ${ID:-unknown} (like: ${ID_LIKE:-})"
|
||||
echo "[docker-pkgmgr] Detected distro: ${ID:-unknown} (like: ${ID_LIKE:-})"
|
||||
fi
|
||||
|
||||
# Always use /src (mounted from host) as working directory
|
||||
echo "[docker] Using /src as working directory"
|
||||
cd /src
|
||||
# Always use /opt/src/pkgmgr (mounted from host) as working directory
|
||||
echo "[docker-pkgmgr] Using /opt/src/pkgmgr as working directory"
|
||||
cd /opt/src/pkgmgr
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# DEV mode: rebuild package-manager from the mounted /src tree
|
||||
# DEV mode: rebuild package-manager from the mounted /opt/src/pkgmgr tree
|
||||
# ---------------------------------------------------------------------------
|
||||
if [[ "${REINSTALL_PKGMGR:-0}" == "1" ]]; then
|
||||
echo "[docker] DEV mode enabled (REINSTALL_PKGMGR=1)"
|
||||
echo "[docker] Rebuilding package-manager from /src via scripts/installation/package.sh..."
|
||||
echo "[docker-pkgmgr] DEV mode enabled (REINSTALL_PKGMGR=1)"
|
||||
echo "[docker-pkgmgr] Rebuilding package-manager from /opt/src/pkgmgr via scripts/installation/package.sh..."
|
||||
bash scripts/installation/package.sh || exit 1
|
||||
fi
|
||||
|
||||
@@ -29,9 +29,9 @@ fi
|
||||
# Hand off to pkgmgr or arbitrary command
|
||||
# ---------------------------------------------------------------------------
|
||||
if [[ $# -eq 0 ]]; then
|
||||
echo "[docker] No arguments provided. Showing pkgmgr help..."
|
||||
echo "[docker-pkgmgr] No arguments provided. Showing pkgmgr help..."
|
||||
exec pkgmgr --help
|
||||
else
|
||||
echo "[docker] Executing command: $*"
|
||||
echo "[docker-pkgmgr] Executing command: $*"
|
||||
exec "$@"
|
||||
fi
|
||||
|
||||
@@ -6,7 +6,7 @@ echo "[arch/package] Building Arch package (makepkg --nodeps) in an isolated bui
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "${SCRIPT_DIR}/../../.." && pwd)"
|
||||
|
||||
# We must not build inside /src (mounted repo). Build in /tmp to avoid permission issues.
|
||||
# We must not build inside /opt/src/pkgmgr (mounted repo). Build in /tmp to avoid permission issues.
|
||||
BUILD_ROOT="/tmp/package-manager-arch-build"
|
||||
PKG_SRC_DIR="${PROJECT_ROOT}/packaging/arch"
|
||||
PKG_BUILD_DIR="${BUILD_ROOT}/packaging/arch"
|
||||
|
||||
@@ -6,12 +6,12 @@ echo ">>> Running E2E tests: $PKGMGR_DISTRO"
|
||||
echo "============================================================"
|
||||
|
||||
docker run --rm \
|
||||
-v "$(pwd):/src" \
|
||||
-v "$(pwd):/opt/src/pkgmgr" \
|
||||
-v "pkgmgr_nix_store_${PKGMGR_DISTRO}:/nix" \
|
||||
-v "pkgmgr_nix_cache_${PKGMGR_DISTRO}:/root/.cache/nix" \
|
||||
-e REINSTALL_PKGMGR=1 \
|
||||
-e TEST_PATTERN="${TEST_PATTERN}" \
|
||||
--workdir /src \
|
||||
--workdir /opt/src/pkgmgr \
|
||||
"pkgmgr-${PKGMGR_DISTRO}" \
|
||||
bash -lc '
|
||||
set -euo pipefail
|
||||
@@ -40,14 +40,14 @@ docker run --rm \
|
||||
}
|
||||
|
||||
# Mark the mounted repository as safe to avoid Git ownership errors.
|
||||
# Newer Git (e.g. on Ubuntu) complains about the gitdir (/src/.git),
|
||||
# older versions about the worktree (/src). Nix turns "." into the
|
||||
# flake input "git+file:///src", which then uses Git under the hood.
|
||||
# Newer Git (e.g. on Ubuntu) complains about the gitdir (/opt/src/pkgmgr/.git),
|
||||
# older versions about the worktree (/opt/src/pkgmgr). Nix turns "." into the
|
||||
# flake input "git+file:///opt/src/pkgmgr", which then uses Git under the hood.
|
||||
if command -v git >/dev/null 2>&1; then
|
||||
# Worktree path
|
||||
git config --global --add safe.directory /src || true
|
||||
git config --global --add safe.directory /opt/src/pkgmgr || true
|
||||
# Gitdir path shown in the "dubious ownership" error
|
||||
git config --global --add safe.directory /src/.git || true
|
||||
git config --global --add safe.directory /opt/src/pkgmgr/.git || true
|
||||
# Ephemeral CI containers: allow all paths as a last resort
|
||||
git config --global --add safe.directory "*" || true
|
||||
fi
|
||||
@@ -55,6 +55,6 @@ docker run --rm \
|
||||
# Run the E2E tests inside the Nix development shell
|
||||
nix develop .#default --no-write-lock-file -c \
|
||||
python3 -m unittest discover \
|
||||
-s /src/tests/e2e \
|
||||
-s /opt/src/pkgmgr/tests/e2e \
|
||||
-p "$TEST_PATTERN"
|
||||
'
|
||||
|
||||
@@ -9,18 +9,18 @@ echo ">>> Image: ${IMAGE}"
|
||||
echo "============================================================"
|
||||
|
||||
docker run --rm \
|
||||
-v "$(pwd):/src" \
|
||||
-v "$(pwd):/opt/src/pkgmgr" \
|
||||
-v "pkgmgr_nix_store_${PKGMGR_DISTRO}:/nix" \
|
||||
-v "pkgmgr_nix_cache_${PKGMGR_DISTRO}:/root/.cache/nix" \
|
||||
--workdir /src \
|
||||
--workdir /opt/src/pkgmgr \
|
||||
-e REINSTALL_PKGMGR=1 \
|
||||
"${IMAGE}" \
|
||||
bash -lc '
|
||||
set -euo pipefail
|
||||
|
||||
if command -v git >/dev/null 2>&1; then
|
||||
git config --global --add safe.directory /src || true
|
||||
git config --global --add safe.directory /src/.git || true
|
||||
git config --global --add safe.directory /opt/src/pkgmgr || true
|
||||
git config --global --add safe.directory /opt/src/pkgmgr/.git || true
|
||||
git config --global --add safe.directory "*" || true
|
||||
fi
|
||||
|
||||
@@ -38,9 +38,9 @@ docker run --rm \
|
||||
# ------------------------------------------------------------
|
||||
# Retry helper for GitHub API rate-limit (HTTP 403)
|
||||
# ------------------------------------------------------------
|
||||
if [[ -f /src/scripts/nix/lib/retry_403.sh ]]; then
|
||||
if [[ -f /opt/src/pkgmgr/scripts/nix/lib/retry_403.sh ]]; then
|
||||
# shellcheck source=./scripts/nix/lib/retry_403.sh
|
||||
source /src/scripts/nix/lib/retry_403.sh
|
||||
source /opt/src/pkgmgr/scripts/nix/lib/retry_403.sh
|
||||
elif [[ -f ./scripts/nix/lib/retry_403.sh ]]; then
|
||||
# shellcheck source=./scripts/nix/lib/retry_403.sh
|
||||
source ./scripts/nix/lib/retry_403.sh
|
||||
|
||||
@@ -17,8 +17,8 @@ echo
|
||||
# ------------------------------------------------------------
|
||||
if OUTPUT=$(docker run --rm \
|
||||
-e REINSTALL_PKGMGR=1 \
|
||||
-v "$(pwd):/src" \
|
||||
-w /src \
|
||||
-v "$(pwd):/opt/src/pkgmgr" \
|
||||
-w /opt/src/pkgmgr \
|
||||
"${IMAGE}" \
|
||||
bash -lc '
|
||||
set -euo pipefail
|
||||
|
||||
@@ -6,19 +6,19 @@ echo ">>> Running INTEGRATION tests in ${PKGMGR_DISTRO} container"
|
||||
echo "============================================================"
|
||||
|
||||
docker run --rm \
|
||||
-v "$(pwd):/src" \
|
||||
-v "$(pwd):/opt/src/pkgmgr" \
|
||||
-v "pkgmgr_nix_store_${PKGMGR_DISTRO}:/nix" \
|
||||
-v "pkgmgr_nix_cache_${PKGMGR_DISTRO}:/root/.cache/nix" \
|
||||
--workdir /src \
|
||||
--workdir /opt/src/pkgmgr \
|
||||
-e REINSTALL_PKGMGR=1 \
|
||||
-e TEST_PATTERN="${TEST_PATTERN}" \
|
||||
"pkgmgr-${PKGMGR_DISTRO}" \
|
||||
bash -lc '
|
||||
set -e;
|
||||
git config --global --add safe.directory /src || true;
|
||||
git config --global --add safe.directory /opt/src/pkgmgr || true;
|
||||
nix develop .#default --no-write-lock-file -c \
|
||||
python3 -m unittest discover \
|
||||
-s tests/integration \
|
||||
-t /src \
|
||||
-t /opt/src/pkgmgr \
|
||||
-p "$TEST_PATTERN";
|
||||
'
|
||||
|
||||
@@ -6,19 +6,19 @@ echo ">>> Running UNIT tests in ${PKGMGR_DISTRO} container"
|
||||
echo "============================================================"
|
||||
|
||||
docker run --rm \
|
||||
-v "$(pwd):/src" \
|
||||
-v "$(pwd):/opt/src/pkgmgr" \
|
||||
-v "pkgmgr_nix_cache_${PKGMGR_DISTRO}:/root/.cache/nix" \
|
||||
-v "pkgmgr_nix_store_${PKGMGR_DISTRO}:/nix" \
|
||||
--workdir /src \
|
||||
--workdir /opt/src/pkgmgr \
|
||||
-e REINSTALL_PKGMGR=1 \
|
||||
-e TEST_PATTERN="${TEST_PATTERN}" \
|
||||
"pkgmgr-${PKGMGR_DISTRO}" \
|
||||
bash -lc '
|
||||
set -e;
|
||||
git config --global --add safe.directory /src || true;
|
||||
git config --global --add safe.directory /opt/src/pkgmgr || true;
|
||||
nix develop .#default --no-write-lock-file -c \
|
||||
python3 -m unittest discover \
|
||||
-s tests/unit \
|
||||
-t /src \
|
||||
-t /opt/src/pkgmgr \
|
||||
-p "$TEST_PATTERN";
|
||||
'
|
||||
|
||||
@@ -2,7 +2,7 @@ from __future__ import annotations
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from pkgmgr.core.git.errors import GitError
|
||||
from pkgmgr.core.git.errors import GitRunError
|
||||
from pkgmgr.core.git.queries import get_current_branch
|
||||
from pkgmgr.core.git.commands import (
|
||||
GitDeleteRemoteBranchError,
|
||||
@@ -32,7 +32,7 @@ def close_branch(
|
||||
if not name:
|
||||
try:
|
||||
name = get_current_branch(cwd=cwd)
|
||||
except GitError as exc:
|
||||
except GitRunError as exc:
|
||||
raise RuntimeError(f"Failed to detect current branch: {exc}") from exc
|
||||
|
||||
if not name:
|
||||
@@ -55,7 +55,7 @@ def close_branch(
|
||||
print("Aborted closing branch.")
|
||||
return
|
||||
|
||||
# Execute workflow (commands raise specific GitError subclasses)
|
||||
# Execute workflow (commands raise specific GitRunError subclasses)
|
||||
fetch("origin", cwd=cwd)
|
||||
checkout(target_base, cwd=cwd)
|
||||
pull("origin", target_base, cwd=cwd)
|
||||
|
||||
@@ -2,7 +2,7 @@ from __future__ import annotations
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from pkgmgr.core.git.errors import GitError
|
||||
from pkgmgr.core.git.errors import GitRunError
|
||||
from pkgmgr.core.git.queries import get_current_branch
|
||||
from pkgmgr.core.git.commands import (
|
||||
GitDeleteRemoteBranchError,
|
||||
@@ -26,7 +26,7 @@ def drop_branch(
|
||||
if not name:
|
||||
try:
|
||||
name = get_current_branch(cwd=cwd)
|
||||
except GitError as exc:
|
||||
except GitRunError as exc:
|
||||
raise RuntimeError(f"Failed to detect current branch: {exc}") from exc
|
||||
|
||||
if not name:
|
||||
|
||||
@@ -30,7 +30,7 @@ def open_branch(
|
||||
|
||||
resolved_base = resolve_base_branch(base_branch, fallback_base, cwd=cwd)
|
||||
|
||||
# Workflow (commands raise specific GitError subclasses)
|
||||
# Workflow (commands raise specific GitBaseError subclasses)
|
||||
fetch("origin", cwd=cwd)
|
||||
checkout(resolved_base, cwd=cwd)
|
||||
pull("origin", resolved_base, cwd=cwd)
|
||||
|
||||
@@ -14,7 +14,7 @@ with the expected structure:
|
||||
|
||||
For each discovered repository, the function:
|
||||
• derives provider, account, repository from the folder structure
|
||||
• (optionally) determines the latest commit hash via git log
|
||||
• (optionally) determines the latest commit hash via git
|
||||
• generates a unique CLI alias
|
||||
• marks ignore=True for newly discovered repos
|
||||
• skips repos already known in defaults or user config
|
||||
@@ -23,11 +23,11 @@ For each discovered repository, the function:
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
from typing import Any, Dict
|
||||
|
||||
from pkgmgr.core.command.alias import generate_alias
|
||||
from pkgmgr.core.config.save import save_user_config
|
||||
from pkgmgr.core.git.queries import get_latest_commit
|
||||
|
||||
|
||||
def config_init(
|
||||
@@ -116,27 +116,18 @@ def config_init(
|
||||
|
||||
print(f"[ADD] {provider}/{account}/{repo_name}")
|
||||
|
||||
# Determine commit hash
|
||||
try:
|
||||
result = subprocess.run(
|
||||
["git", "log", "-1", "--format=%H"],
|
||||
cwd=repo_path,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
text=True,
|
||||
check=True,
|
||||
)
|
||||
verified = result.stdout.strip()
|
||||
print(f"[INFO] Latest commit: {verified}")
|
||||
except Exception as exc:
|
||||
verified = ""
|
||||
print(f"[WARN] Could not read commit: {exc}")
|
||||
# Determine commit hash via git query
|
||||
verified_commit = get_latest_commit(repo_path) or ""
|
||||
if verified_commit:
|
||||
print(f"[INFO] Latest commit: {verified_commit}")
|
||||
else:
|
||||
print("[WARN] Could not read commit (not a git repo or no commits).")
|
||||
|
||||
entry = {
|
||||
entry: Dict[str, Any] = {
|
||||
"provider": provider,
|
||||
"account": account,
|
||||
"repository": repo_name,
|
||||
"verified": {"commit": verified},
|
||||
"verified": {"commit": verified_commit},
|
||||
"ignore": True,
|
||||
}
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ from __future__ import annotations
|
||||
import os
|
||||
from typing import Optional, Set
|
||||
|
||||
from pkgmgr.core.git.errors import GitError
|
||||
from pkgmgr.core.git.errors import GitRunError
|
||||
from pkgmgr.core.git.commands import (
|
||||
GitAddRemoteError,
|
||||
GitAddRemotePushUrlError,
|
||||
@@ -12,14 +12,38 @@ from pkgmgr.core.git.commands import (
|
||||
add_remote_push_url,
|
||||
set_remote_url,
|
||||
)
|
||||
from pkgmgr.core.git.queries import (
|
||||
get_remote_push_urls,
|
||||
list_remotes,
|
||||
)
|
||||
from pkgmgr.core.git.queries import get_remote_push_urls, list_remotes
|
||||
|
||||
from .types import MirrorMap, RepoMirrorContext, Repository
|
||||
|
||||
|
||||
def _is_git_remote_url(url: str) -> bool:
|
||||
"""
|
||||
True only for URLs that should become git remotes / push URLs.
|
||||
|
||||
Accepted:
|
||||
- git@host:owner/repo(.git) (SCP-like SSH)
|
||||
- ssh://git@host(:port)/owner/repo(.git) (SSH URL)
|
||||
- https://host/owner/repo.git (HTTPS git remote)
|
||||
- http://host/owner/repo.git (rare, but possible)
|
||||
Everything else (e.g. PyPI project page) stays metadata only.
|
||||
"""
|
||||
u = (url or "").strip()
|
||||
if not u:
|
||||
return False
|
||||
|
||||
if u.startswith("git@"):
|
||||
return True
|
||||
|
||||
if u.startswith("ssh://"):
|
||||
return True
|
||||
|
||||
if (u.startswith("https://") or u.startswith("http://")) and u.endswith(".git"):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def build_default_ssh_url(repo: Repository) -> Optional[str]:
|
||||
provider = repo.get("provider")
|
||||
account = repo.get("account")
|
||||
@@ -35,25 +59,29 @@ def build_default_ssh_url(repo: Repository) -> Optional[str]:
|
||||
return f"git@{provider}:{account}/{name}.git"
|
||||
|
||||
|
||||
def _git_mirrors_only(m: MirrorMap) -> MirrorMap:
|
||||
return {k: v for k, v in m.items() if v and _is_git_remote_url(v)}
|
||||
|
||||
|
||||
def determine_primary_remote_url(
|
||||
repo: Repository,
|
||||
ctx: RepoMirrorContext,
|
||||
) -> Optional[str]:
|
||||
"""
|
||||
Priority order:
|
||||
1. origin from resolved mirrors
|
||||
2. MIRRORS file order
|
||||
3. config mirrors order
|
||||
Priority order (GIT URLS ONLY):
|
||||
1. origin from resolved mirrors (if it is a git URL)
|
||||
2. first git URL from MIRRORS file (in file order)
|
||||
3. first git URL from config mirrors (in config order)
|
||||
4. default SSH URL
|
||||
"""
|
||||
resolved = ctx.resolved_mirrors
|
||||
|
||||
if resolved.get("origin"):
|
||||
return resolved["origin"]
|
||||
origin = resolved.get("origin")
|
||||
if origin and _is_git_remote_url(origin):
|
||||
return origin
|
||||
|
||||
for mirrors in (ctx.file_mirrors, ctx.config_mirrors):
|
||||
for _, url in mirrors.items():
|
||||
if url:
|
||||
if url and _is_git_remote_url(url):
|
||||
return url
|
||||
|
||||
return build_default_ssh_url(repo)
|
||||
@@ -62,7 +90,7 @@ def determine_primary_remote_url(
|
||||
def has_origin_remote(repo_dir: str) -> bool:
|
||||
try:
|
||||
return "origin" in list_remotes(cwd=repo_dir)
|
||||
except GitError:
|
||||
except GitRunError:
|
||||
return False
|
||||
|
||||
|
||||
@@ -82,16 +110,19 @@ def _ensure_additional_push_urls(
|
||||
preview: bool,
|
||||
) -> None:
|
||||
"""
|
||||
Ensure all mirror URLs (except primary) are configured as additional push URLs for origin.
|
||||
Preview is handled by the underlying git runner.
|
||||
Ensure all *git* mirror URLs (except primary) are configured as additional
|
||||
push URLs for origin.
|
||||
|
||||
Non-git URLs (like PyPI) are ignored and will never land in git config.
|
||||
"""
|
||||
desired: Set[str] = {u for u in mirrors.values() if u and u != primary}
|
||||
git_only = _git_mirrors_only(mirrors)
|
||||
desired: Set[str] = {u for u in git_only.values() if u and u != primary}
|
||||
if not desired:
|
||||
return
|
||||
|
||||
try:
|
||||
existing = get_remote_push_urls("origin", cwd=repo_dir)
|
||||
except GitError:
|
||||
except GitRunError:
|
||||
existing = set()
|
||||
|
||||
for url in sorted(desired - existing):
|
||||
@@ -110,8 +141,8 @@ def ensure_origin_remote(
|
||||
return
|
||||
|
||||
primary = determine_primary_remote_url(repo, ctx)
|
||||
if not primary:
|
||||
print("[WARN] No primary mirror URL could be determined.")
|
||||
if not primary or not _is_git_remote_url(primary):
|
||||
print("[WARN] No valid git primary mirror URL could be determined.")
|
||||
return
|
||||
|
||||
# 1) Ensure origin exists
|
||||
@@ -122,14 +153,13 @@ def ensure_origin_remote(
|
||||
print(f"[WARN] Failed to add origin remote: {exc}")
|
||||
return # without origin we cannot reliably proceed
|
||||
|
||||
# 2) Ensure origin fetch+push URLs are correct (ALWAYS, even if origin already existed)
|
||||
# 2) Ensure origin fetch+push URLs are correct
|
||||
try:
|
||||
_set_origin_fetch_and_push(repo_dir, primary, preview)
|
||||
except GitSetRemoteUrlError as exc:
|
||||
# Do not abort: still try to add additional push URLs
|
||||
print(f"[WARN] Failed to set origin URLs: {exc}")
|
||||
|
||||
# 3) Ensure additional push URLs for mirrors
|
||||
# 3) Ensure additional push URLs for mirrors (git urls only)
|
||||
try:
|
||||
_ensure_additional_push_urls(repo_dir, ctx.resolved_mirrors, primary, preview)
|
||||
except GitAddRemotePushUrlError as exc:
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from collections.abc import Iterable, Mapping
|
||||
from typing import Union
|
||||
from urllib.parse import urlparse
|
||||
from typing import Mapping
|
||||
|
||||
from .types import MirrorMap, Repository
|
||||
|
||||
@@ -32,7 +33,7 @@ def read_mirrors_file(repo_dir: str, filename: str = "MIRRORS") -> MirrorMap:
|
||||
"""
|
||||
Supports:
|
||||
NAME URL
|
||||
URL → auto name = hostname
|
||||
URL -> auto-generate name from hostname
|
||||
"""
|
||||
path = os.path.join(repo_dir, filename)
|
||||
mirrors: MirrorMap = {}
|
||||
@@ -52,7 +53,8 @@ def read_mirrors_file(repo_dir: str, filename: str = "MIRRORS") -> MirrorMap:
|
||||
# Case 1: "name url"
|
||||
if len(parts) == 2:
|
||||
name, url = parts
|
||||
# Case 2: "url" → auto-generate name
|
||||
|
||||
# Case 2: "url" -> auto name
|
||||
elif len(parts) == 1:
|
||||
url = parts[0]
|
||||
parsed = urlparse(url)
|
||||
@@ -67,21 +69,56 @@ def read_mirrors_file(repo_dir: str, filename: str = "MIRRORS") -> MirrorMap:
|
||||
continue
|
||||
|
||||
mirrors[name] = url
|
||||
|
||||
except OSError as exc:
|
||||
print(f"[WARN] Could not read MIRRORS file at {path}: {exc}")
|
||||
|
||||
return mirrors
|
||||
|
||||
|
||||
MirrorsInput = Union[Mapping[str, str], Iterable[str]]
|
||||
|
||||
|
||||
def write_mirrors_file(
|
||||
repo_dir: str,
|
||||
mirrors: Mapping[str, str],
|
||||
mirrors: MirrorsInput,
|
||||
filename: str = "MIRRORS",
|
||||
preview: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
Write MIRRORS in one of two formats:
|
||||
|
||||
1) Mapping[str, str] -> "NAME URL" per line (legacy / compatible)
|
||||
2) Iterable[str] -> "URL" per line (new preferred)
|
||||
|
||||
Strings are treated as a single URL (not iterated character-by-character).
|
||||
"""
|
||||
path = os.path.join(repo_dir, filename)
|
||||
lines = [f"{name} {url}" for name, url in sorted(mirrors.items())]
|
||||
|
||||
lines: list[str]
|
||||
|
||||
if isinstance(mirrors, Mapping):
|
||||
items = [
|
||||
(str(name), str(url))
|
||||
for name, url in mirrors.items()
|
||||
if url is not None and str(url).strip()
|
||||
]
|
||||
items.sort(key=lambda x: (x[0], x[1]))
|
||||
lines = [f"{name} {url}" for name, url in items]
|
||||
|
||||
else:
|
||||
if isinstance(mirrors, (str, bytes)):
|
||||
urls = [str(mirrors).strip()]
|
||||
else:
|
||||
urls = [
|
||||
str(url).strip()
|
||||
for url in mirrors
|
||||
if url is not None and str(url).strip()
|
||||
]
|
||||
|
||||
urls = sorted(set(urls))
|
||||
lines = urls
|
||||
|
||||
content = "\n".join(lines) + ("\n" if lines else "")
|
||||
|
||||
if preview:
|
||||
@@ -94,5 +131,6 @@ def write_mirrors_file(
|
||||
with open(path, "w", encoding="utf-8") as fh:
|
||||
fh.write(content)
|
||||
print(f"[INFO] Wrote MIRRORS file at {path}")
|
||||
|
||||
except OSError as exc:
|
||||
print(f"[ERROR] Failed to write MIRRORS file at {path}: {exc}")
|
||||
|
||||
@@ -2,13 +2,29 @@ from __future__ import annotations
|
||||
|
||||
from typing import List
|
||||
|
||||
from pkgmgr.core.git.queries import probe_remote_reachable
|
||||
|
||||
from .context import build_context
|
||||
from .git_remote import ensure_origin_remote, determine_primary_remote_url
|
||||
from pkgmgr.core.git.queries import probe_remote_reachable
|
||||
from .remote_provision import ensure_remote_repository
|
||||
from .types import Repository
|
||||
|
||||
|
||||
def _is_git_remote_url(url: str) -> bool:
|
||||
# Keep the same filtering semantics as in git_remote.py (duplicated on purpose
|
||||
# to keep setup_cmd independent of private helpers).
|
||||
u = (url or "").strip()
|
||||
if not u:
|
||||
return False
|
||||
if u.startswith("git@"):
|
||||
return True
|
||||
if u.startswith("ssh://"):
|
||||
return True
|
||||
if (u.startswith("https://") or u.startswith("http://")) and u.endswith(".git"):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _setup_local_mirrors_for_repo(
|
||||
repo: Repository,
|
||||
repositories_base_dir: str,
|
||||
@@ -48,16 +64,23 @@ def _setup_remote_mirrors_for_repo(
|
||||
preview,
|
||||
)
|
||||
|
||||
if not ctx.resolved_mirrors:
|
||||
# Probe only git URLs (do not try ls-remote against PyPI etc.)
|
||||
# If there are no mirrors at all, probe the primary git URL.
|
||||
git_mirrors = {k: v for k, v in ctx.resolved_mirrors.items() if _is_git_remote_url(v)}
|
||||
|
||||
if not git_mirrors:
|
||||
primary = determine_primary_remote_url(repo, ctx)
|
||||
if not primary:
|
||||
if not primary or not _is_git_remote_url(primary):
|
||||
print("[INFO] No git mirrors to probe.")
|
||||
print()
|
||||
return
|
||||
|
||||
ok = probe_remote_reachable(primary, cwd=ctx.repo_dir)
|
||||
print("[OK]" if ok else "[WARN]", primary)
|
||||
print()
|
||||
return
|
||||
|
||||
for name, url in ctx.resolved_mirrors.items():
|
||||
for name, url in git_mirrors.items():
|
||||
ok = probe_remote_reachable(url, cwd=ctx.repo_dir)
|
||||
print(f"[OK] {name}: {url}" if ok else f"[WARN] {name}: {url}")
|
||||
|
||||
|
||||
@@ -24,6 +24,8 @@ import tempfile
|
||||
from datetime import date, datetime
|
||||
from typing import Optional, Tuple
|
||||
|
||||
from pkgmgr.core.git.queries import get_config_value
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Editor helper for interactive changelog messages
|
||||
@@ -74,10 +76,7 @@ def _open_editor_for_changelog(initial_message: Optional[str] = None) -> str:
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
lines = [
|
||||
line for line in content.splitlines()
|
||||
if not line.strip().startswith("#")
|
||||
]
|
||||
lines = [line for line in content.splitlines() if not line.strip().startswith("#")]
|
||||
return "\n".join(lines).strip()
|
||||
|
||||
|
||||
@@ -85,6 +84,7 @@ def _open_editor_for_changelog(initial_message: Optional[str] = None) -> str:
|
||||
# File update helpers (pyproject + extra packaging + changelog)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def update_pyproject_version(
|
||||
pyproject_path: str,
|
||||
new_version: str,
|
||||
@@ -365,24 +365,6 @@ def update_changelog(
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _get_git_config_value(key: str) -> Optional[str]:
|
||||
"""
|
||||
Try to read a value from `git config --get <key>`.
|
||||
"""
|
||||
try:
|
||||
result = subprocess.run(
|
||||
["git", "config", "--get", key],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=False,
|
||||
)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
value = result.stdout.strip()
|
||||
return value or None
|
||||
|
||||
|
||||
def _get_debian_author() -> Tuple[str, str]:
|
||||
"""
|
||||
Determine the maintainer name/email for debian/changelog entries.
|
||||
@@ -396,9 +378,9 @@ def _get_debian_author() -> Tuple[str, str]:
|
||||
email = os.environ.get("GIT_AUTHOR_EMAIL")
|
||||
|
||||
if not name:
|
||||
name = _get_git_config_value("user.name")
|
||||
name = get_config_value("user.name")
|
||||
if not email:
|
||||
email = _get_git_config_value("user.email")
|
||||
email = get_config_value("user.email")
|
||||
|
||||
if not name:
|
||||
name = "Unknown Maintainer"
|
||||
|
||||
@@ -1,73 +1,90 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import subprocess
|
||||
|
||||
from pkgmgr.core.git import GitError
|
||||
|
||||
|
||||
def run_git_command(cmd: str) -> None:
|
||||
print(f"[GIT] {cmd}")
|
||||
try:
|
||||
subprocess.run(
|
||||
cmd,
|
||||
shell=True,
|
||||
check=True,
|
||||
text=True,
|
||||
capture_output=True,
|
||||
from pkgmgr.core.git.commands import (
|
||||
fetch,
|
||||
pull_ff_only,
|
||||
push,
|
||||
tag_force_annotated,
|
||||
)
|
||||
except subprocess.CalledProcessError as exc:
|
||||
print(f"[ERROR] Git command failed: {cmd}")
|
||||
print(f" Exit code: {exc.returncode}")
|
||||
if exc.stdout:
|
||||
print("\n" + exc.stdout)
|
||||
if exc.stderr:
|
||||
print("\n" + exc.stderr)
|
||||
raise GitError(f"Git command failed: {cmd}") from exc
|
||||
from pkgmgr.core.git.queries import get_upstream_ref, list_tags
|
||||
|
||||
|
||||
def _capture(cmd: str) -> str:
|
||||
res = subprocess.run(cmd, shell=True, check=False, capture_output=True, text=True)
|
||||
return (res.stdout or "").strip()
|
||||
|
||||
|
||||
def ensure_clean_and_synced(preview: bool = False) -> None:
|
||||
def ensure_clean_and_synced(*, preview: bool = False) -> None:
|
||||
"""
|
||||
Always run a pull BEFORE modifying anything.
|
||||
Uses --ff-only to avoid creating merge commits automatically.
|
||||
If no upstream is configured, we skip.
|
||||
"""
|
||||
upstream = _capture("git rev-parse --abbrev-ref --symbolic-full-name @{u} 2>/dev/null")
|
||||
upstream = get_upstream_ref()
|
||||
if not upstream:
|
||||
print("[INFO] No upstream configured for current branch. Skipping pull.")
|
||||
return
|
||||
|
||||
if preview:
|
||||
print("[PREVIEW] Would run: git fetch origin --prune --tags --force")
|
||||
print("[PREVIEW] Would run: git pull --ff-only")
|
||||
return
|
||||
|
||||
print("[INFO] Syncing with remote before making any changes...")
|
||||
run_git_command("git fetch origin --prune --tags --force")
|
||||
run_git_command("git pull --ff-only")
|
||||
|
||||
# Mirrors old behavior:
|
||||
# git fetch origin --prune --tags --force
|
||||
# git pull --ff-only
|
||||
fetch(remote="origin", prune=True, tags=True, force=True, preview=preview)
|
||||
pull_ff_only(preview=preview)
|
||||
|
||||
|
||||
def _parse_v_tag(tag: str) -> tuple[int, ...] | None:
|
||||
"""
|
||||
Parse tags like 'v1.2.3' into (1, 2, 3).
|
||||
Returns None if parsing is not possible.
|
||||
"""
|
||||
if not tag.startswith("v"):
|
||||
return None
|
||||
|
||||
raw = tag[1:]
|
||||
if not raw:
|
||||
return None
|
||||
|
||||
parts = raw.split(".")
|
||||
out: list[int] = []
|
||||
for p in parts:
|
||||
if not p.isdigit():
|
||||
return None
|
||||
out.append(int(p))
|
||||
return tuple(out) if out else None
|
||||
|
||||
|
||||
def is_highest_version_tag(tag: str) -> bool:
|
||||
"""
|
||||
Return True if `tag` is the highest version among all tags matching v*.
|
||||
Comparison uses `sort -V` for natural version ordering.
|
||||
|
||||
We avoid shelling out to `sort -V` and implement a small vX.Y.Z parser.
|
||||
Non-parseable v* tags are ignored for version comparison.
|
||||
"""
|
||||
all_v = _capture("git tag --list 'v*'")
|
||||
all_v = list_tags("v*")
|
||||
if not all_v:
|
||||
return True # No tags yet, so the current tag is the highest
|
||||
return True # No tags yet -> current is highest by definition
|
||||
|
||||
# Get the latest tag in natural version order
|
||||
latest = _capture("git tag --list 'v*' | sort -V | tail -n1")
|
||||
print(f"[INFO] Latest tag: {latest}, Current tag: {tag}")
|
||||
parsed_current = _parse_v_tag(tag)
|
||||
if parsed_current is None:
|
||||
# If the "current" tag isn't parseable, fall back to conservative behavior:
|
||||
# treat it as highest only if it matches the max lexicographically.
|
||||
latest_lex = max(all_v)
|
||||
print(f"[INFO] Latest tag (lex): {latest_lex}, Current tag: {tag}")
|
||||
return tag >= latest_lex
|
||||
|
||||
# Ensure that the current tag is always considered the highest if it's the latest one
|
||||
return tag >= latest # Use comparison operator to consider all future tags
|
||||
parsed_all: list[tuple[int, ...]] = []
|
||||
for t in all_v:
|
||||
parsed = _parse_v_tag(t)
|
||||
if parsed is not None:
|
||||
parsed_all.append(parsed)
|
||||
|
||||
if not parsed_all:
|
||||
# No parseable tags -> nothing to compare against
|
||||
return True
|
||||
|
||||
latest = max(parsed_all)
|
||||
print(f"[INFO] Latest tag (parsed): v{'.'.join(map(str, latest))}, Current tag: {tag}")
|
||||
return parsed_current >= latest
|
||||
|
||||
|
||||
def update_latest_tag(new_tag: str, preview: bool = False) -> None:
|
||||
def update_latest_tag(new_tag: str, *, preview: bool = False) -> None:
|
||||
"""
|
||||
Move the floating 'latest' tag to the newly created release tag.
|
||||
|
||||
@@ -78,15 +95,10 @@ def update_latest_tag(new_tag: str, preview: bool = False) -> None:
|
||||
target_ref = f"{new_tag}^{{}}"
|
||||
print(f"[INFO] Updating 'latest' tag to point at {new_tag} (commit {target_ref})...")
|
||||
|
||||
if preview:
|
||||
print(
|
||||
f'[PREVIEW] Would run: git tag -f -a latest {target_ref} '
|
||||
f'-m "Floating latest tag for {new_tag}"'
|
||||
tag_force_annotated(
|
||||
name="latest",
|
||||
target=target_ref,
|
||||
message=f"Floating latest tag for {new_tag}",
|
||||
preview=preview,
|
||||
)
|
||||
print("[PREVIEW] Would run: git push origin latest --force")
|
||||
return
|
||||
|
||||
run_git_command(
|
||||
f'git tag -f -a latest {target_ref} -m "Floating latest tag for {new_tag}"'
|
||||
)
|
||||
run_git_command("git push origin latest --force")
|
||||
push("origin", "latest", force=True, preview=preview)
|
||||
|
||||
@@ -5,7 +5,8 @@ import sys
|
||||
from typing import Optional
|
||||
|
||||
from pkgmgr.actions.branch import close_branch
|
||||
from pkgmgr.core.git import GitError
|
||||
from pkgmgr.core.git import GitRunError
|
||||
from pkgmgr.core.git.commands import add, commit, push, tag_annotated
|
||||
from pkgmgr.core.git.queries import get_current_branch
|
||||
from pkgmgr.core.repository.paths import resolve_repo_paths
|
||||
|
||||
@@ -21,7 +22,6 @@ from .files import (
|
||||
from .git_ops import (
|
||||
ensure_clean_and_synced,
|
||||
is_highest_version_tag,
|
||||
run_git_command,
|
||||
update_latest_tag,
|
||||
)
|
||||
from .prompts import confirm_proceed_release, should_delete_branch
|
||||
@@ -40,7 +40,7 @@ def _release_impl(
|
||||
# Determine current branch early
|
||||
try:
|
||||
branch = get_current_branch() or "main"
|
||||
except GitError:
|
||||
except GitRunError:
|
||||
branch = "main"
|
||||
print(f"Releasing on branch: {branch}")
|
||||
|
||||
@@ -126,12 +126,11 @@ def _release_impl(
|
||||
existing_files = [p for p in files_to_add if isinstance(p, str) and p and os.path.exists(p)]
|
||||
|
||||
if preview:
|
||||
for path in existing_files:
|
||||
print(f"[PREVIEW] Would run: git add {path}")
|
||||
print(f'[PREVIEW] Would run: git commit -am "{commit_msg}"')
|
||||
print(f'[PREVIEW] Would run: git tag -a {new_tag} -m "{tag_msg}"')
|
||||
print(f"[PREVIEW] Would run: git push origin {branch}")
|
||||
print(f"[PREVIEW] Would run: git push origin {new_tag}")
|
||||
add(existing_files, preview=True)
|
||||
commit(commit_msg, all=True, preview=True)
|
||||
tag_annotated(new_tag, tag_msg, preview=True)
|
||||
push("origin", branch, preview=True)
|
||||
push("origin", new_tag, preview=True)
|
||||
|
||||
if is_highest_version_tag(new_tag):
|
||||
update_latest_tag(new_tag, preview=True)
|
||||
@@ -145,15 +144,13 @@ def _release_impl(
|
||||
print(f"[PREVIEW] Would ask whether to delete branch {branch} after release.")
|
||||
return
|
||||
|
||||
for path in existing_files:
|
||||
run_git_command(f"git add {path}")
|
||||
|
||||
run_git_command(f'git commit -am "{commit_msg}"')
|
||||
run_git_command(f'git tag -a {new_tag} -m "{tag_msg}"')
|
||||
add(existing_files, preview=False)
|
||||
commit(commit_msg, all=True, preview=False)
|
||||
tag_annotated(new_tag, tag_msg, preview=False)
|
||||
|
||||
# Push branch and ONLY the newly created version tag (no --tags)
|
||||
run_git_command(f"git push origin {branch}")
|
||||
run_git_command(f"git push origin {new_tag}")
|
||||
push("origin", branch, preview=False)
|
||||
push("origin", new_tag, preview=False)
|
||||
|
||||
# Update 'latest' only if this is the highest version tag
|
||||
try:
|
||||
@@ -161,7 +158,7 @@ def _release_impl(
|
||||
update_latest_tag(new_tag, preview=False)
|
||||
else:
|
||||
print(f"[INFO] Skipping 'latest' update (tag {new_tag} is not the highest).")
|
||||
except GitError as exc:
|
||||
except GitRunError as exc:
|
||||
print(f"[WARN] Failed to update floating 'latest' tag for {new_tag}: {exc}")
|
||||
print("'latest' tag was not updated.")
|
||||
|
||||
|
||||
@@ -1,103 +1,132 @@
|
||||
import subprocess
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from pkgmgr.core.git.commands import clone as git_clone, GitCloneError
|
||||
from pkgmgr.core.repository.dir import get_repo_dir
|
||||
from pkgmgr.core.repository.identifier import get_repo_identifier
|
||||
from pkgmgr.core.repository.verify import verify_repository
|
||||
|
||||
Repository = Dict[str, Any]
|
||||
|
||||
|
||||
def _build_clone_url(repo: Repository, clone_mode: str) -> Optional[str]:
|
||||
provider = repo.get("provider")
|
||||
account = repo.get("account")
|
||||
name = repo.get("repository")
|
||||
replacement = repo.get("replacement")
|
||||
|
||||
if clone_mode == "ssh":
|
||||
if not provider or not account or not name:
|
||||
return None
|
||||
return f"git@{provider}:{account}/{name}.git"
|
||||
|
||||
if clone_mode in ("https", "shallow"):
|
||||
if replacement:
|
||||
return f"https://{replacement}.git"
|
||||
if not provider or not account or not name:
|
||||
return None
|
||||
return f"https://{provider}/{account}/{name}.git"
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def clone_repos(
|
||||
selected_repos,
|
||||
selected_repos: List[Repository],
|
||||
repositories_base_dir: str,
|
||||
all_repos,
|
||||
all_repos: List[Repository],
|
||||
preview: bool,
|
||||
no_verification: bool,
|
||||
clone_mode: str
|
||||
):
|
||||
clone_mode: str,
|
||||
) -> None:
|
||||
for repo in selected_repos:
|
||||
repo_identifier = get_repo_identifier(repo, all_repos)
|
||||
repo_dir = get_repo_dir(repositories_base_dir, repo)
|
||||
|
||||
if os.path.exists(repo_dir):
|
||||
print(f"[INFO] Repository '{repo_identifier}' already exists at '{repo_dir}'. Skipping clone.")
|
||||
print(
|
||||
f"[INFO] Repository '{repo_identifier}' already exists at '{repo_dir}'. Skipping clone."
|
||||
)
|
||||
continue
|
||||
|
||||
parent_dir = os.path.dirname(repo_dir)
|
||||
os.makedirs(parent_dir, exist_ok=True)
|
||||
# Build clone URL based on the clone_mode
|
||||
# Build clone URL based on the clone_mode
|
||||
if clone_mode == "ssh":
|
||||
clone_url = (
|
||||
f"git@{repo.get('provider')}:"
|
||||
f"{repo.get('account')}/"
|
||||
f"{repo.get('repository')}.git"
|
||||
)
|
||||
elif clone_mode in ("https", "shallow"):
|
||||
# Use replacement if defined, otherwise construct from provider/account/repository
|
||||
if repo.get("replacement"):
|
||||
clone_url = f"https://{repo.get('replacement')}.git"
|
||||
else:
|
||||
clone_url = (
|
||||
f"https://{repo.get('provider')}/"
|
||||
f"{repo.get('account')}/"
|
||||
f"{repo.get('repository')}.git"
|
||||
)
|
||||
else:
|
||||
print(f"Unknown clone mode '{clone_mode}'. Aborting clone for {repo_identifier}.")
|
||||
|
||||
clone_url = _build_clone_url(repo, clone_mode)
|
||||
if not clone_url:
|
||||
print(f"[WARNING] Cannot build clone URL for '{repo_identifier}'. Skipping.")
|
||||
continue
|
||||
|
||||
# Build base clone command
|
||||
base_clone_cmd = "git clone"
|
||||
if clone_mode == "shallow":
|
||||
# Shallow clone: only latest state via HTTPS, no full history
|
||||
base_clone_cmd += " --depth 1 --single-branch"
|
||||
shallow = clone_mode == "shallow"
|
||||
mode_label = "HTTPS (shallow)" if shallow else clone_mode.upper()
|
||||
|
||||
mode_label = "HTTPS (shallow)" if clone_mode == "shallow" else clone_mode.upper()
|
||||
print(
|
||||
f"[INFO] Attempting to clone '{repo_identifier}' using {mode_label} "
|
||||
f"from {clone_url} into '{repo_dir}'."
|
||||
)
|
||||
|
||||
if preview:
|
||||
print(f"[Preview] Would run: {base_clone_cmd} {clone_url} {repo_dir} in {parent_dir}")
|
||||
result = subprocess.CompletedProcess(args=[], returncode=0)
|
||||
else:
|
||||
result = subprocess.run(
|
||||
f"{base_clone_cmd} {clone_url} {repo_dir}",
|
||||
try:
|
||||
args = []
|
||||
if shallow:
|
||||
args += ["--depth", "1", "--single-branch"]
|
||||
args += [clone_url, repo_dir]
|
||||
|
||||
git_clone(
|
||||
args,
|
||||
cwd=parent_dir,
|
||||
shell=True,
|
||||
preview=preview,
|
||||
)
|
||||
|
||||
if result.returncode != 0:
|
||||
# Only offer fallback if the original mode was SSH.
|
||||
if clone_mode == "ssh":
|
||||
print(f"[WARNING] SSH clone failed for '{repo_identifier}' with return code {result.returncode}.")
|
||||
choice = input("Do you want to attempt HTTPS clone instead? (y/N): ").strip().lower()
|
||||
if choice == 'y':
|
||||
# Attempt HTTPS clone
|
||||
if repo.get("replacement"):
|
||||
clone_url = f"https://{repo.get('replacement')}.git"
|
||||
else:
|
||||
clone_url = f"https://{repo.get('provider')}/{repo.get('account')}/{repo.get('repository')}.git"
|
||||
print(f"[INFO] Attempting to clone '{repo_identifier}' using HTTPS from {clone_url} into '{repo_dir}'.")
|
||||
if preview:
|
||||
print(f"[Preview] Would run: git clone {clone_url} {repo_dir} in {parent_dir}")
|
||||
result = subprocess.CompletedProcess(args=[], returncode=0)
|
||||
else:
|
||||
result = subprocess.run(f"git clone {clone_url} {repo_dir}", cwd=parent_dir, shell=True)
|
||||
else:
|
||||
print(f"[INFO] HTTPS clone not attempted for '{repo_identifier}'.")
|
||||
continue
|
||||
else:
|
||||
# For https mode, do not attempt fallback.
|
||||
print(f"[WARNING] HTTPS clone failed for '{repo_identifier}' with return code {result.returncode}.")
|
||||
except GitCloneError as exc:
|
||||
if clone_mode != "ssh":
|
||||
print(f"[WARNING] Clone failed for '{repo_identifier}': {exc}")
|
||||
continue
|
||||
|
||||
print(f"[WARNING] SSH clone failed for '{repo_identifier}': {exc}")
|
||||
choice = input("Do you want to attempt HTTPS clone instead? (y/N): ").strip().lower()
|
||||
if choice != "y":
|
||||
print(f"[INFO] HTTPS clone not attempted for '{repo_identifier}'.")
|
||||
continue
|
||||
|
||||
fallback_url = _build_clone_url(repo, "https")
|
||||
if not fallback_url:
|
||||
print(f"[WARNING] Cannot build HTTPS URL for '{repo_identifier}'.")
|
||||
continue
|
||||
|
||||
print(
|
||||
f"[INFO] Attempting to clone '{repo_identifier}' using HTTPS "
|
||||
f"from {fallback_url} into '{repo_dir}'."
|
||||
)
|
||||
|
||||
try:
|
||||
git_clone(
|
||||
[fallback_url, repo_dir],
|
||||
cwd=parent_dir,
|
||||
preview=preview,
|
||||
)
|
||||
except GitCloneError as exc2:
|
||||
print(f"[WARNING] HTTPS clone failed for '{repo_identifier}': {exc2}")
|
||||
continue
|
||||
|
||||
# After cloning, perform verification in local mode.
|
||||
verified_info = repo.get("verified")
|
||||
if verified_info:
|
||||
verified_ok, errors, commit_hash, signing_key = verify_repository(repo, repo_dir, mode="local", no_verification=no_verification)
|
||||
if not no_verification and not verified_ok:
|
||||
if not verified_info:
|
||||
continue
|
||||
|
||||
verified_ok, errors, _commit_hash, _signing_key = verify_repository(
|
||||
repo,
|
||||
repo_dir,
|
||||
mode="local",
|
||||
no_verification=no_verification,
|
||||
)
|
||||
|
||||
if no_verification or verified_ok:
|
||||
continue
|
||||
|
||||
print(f"Warning: Verification failed for {repo_identifier} after cloning:")
|
||||
for err in errors:
|
||||
print(f" - {err}")
|
||||
|
||||
choice = input("Proceed anyway? (y/N): ").strip().lower()
|
||||
if choice != "y":
|
||||
print(f"Skipping repository {repo_identifier} due to failed verification.")
|
||||
|
||||
@@ -1,257 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Dict, Optional, Tuple
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import yaml
|
||||
|
||||
from pkgmgr.actions.mirror.io import write_mirrors_file
|
||||
from pkgmgr.actions.mirror.setup_cmd import setup_mirrors
|
||||
from pkgmgr.actions.repository.scaffold import render_default_templates
|
||||
from pkgmgr.core.command.alias import generate_alias
|
||||
from pkgmgr.core.config.save import save_user_config
|
||||
|
||||
Repository = Dict[str, Any]
|
||||
|
||||
_NAME_RE = re.compile(r"^[a-z0-9_-]+$")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class RepoParts:
|
||||
host: str
|
||||
port: Optional[str]
|
||||
owner: str
|
||||
name: str
|
||||
|
||||
|
||||
def _run(cmd: str, cwd: str, preview: bool) -> None:
|
||||
if preview:
|
||||
print(f"[Preview] Would run in {cwd}: {cmd}")
|
||||
return
|
||||
subprocess.run(cmd, cwd=cwd, shell=True, check=True)
|
||||
|
||||
|
||||
def _git_get(key: str) -> str:
|
||||
try:
|
||||
out = subprocess.run(
|
||||
f"git config --get {key}",
|
||||
shell=True,
|
||||
check=False,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
return (out.stdout or "").strip()
|
||||
except Exception:
|
||||
return ""
|
||||
|
||||
|
||||
def _split_host_port(host_with_port: str) -> Tuple[str, Optional[str]]:
|
||||
if ":" in host_with_port:
|
||||
host, port = host_with_port.split(":", 1)
|
||||
return host, port or None
|
||||
return host_with_port, None
|
||||
|
||||
|
||||
def _strip_git_suffix(name: str) -> str:
|
||||
return name[:-4] if name.endswith(".git") else name
|
||||
|
||||
|
||||
def _parse_git_url(url: str) -> RepoParts:
|
||||
if url.startswith("git@") and "://" not in url:
|
||||
left, right = url.split(":", 1)
|
||||
host = left.split("@", 1)[1]
|
||||
path = right.lstrip("/")
|
||||
owner, name = path.split("/", 1)
|
||||
return RepoParts(host=host, port=None, owner=owner, name=_strip_git_suffix(name))
|
||||
|
||||
parsed = urlparse(url)
|
||||
host = (parsed.hostname or "").strip()
|
||||
port = str(parsed.port) if parsed.port else None
|
||||
path = (parsed.path or "").strip("/")
|
||||
|
||||
if not host or not path or "/" not in path:
|
||||
raise ValueError(f"Could not parse git URL: {url}")
|
||||
|
||||
owner, name = path.split("/", 1)
|
||||
return RepoParts(host=host, port=port, owner=owner, name=_strip_git_suffix(name))
|
||||
|
||||
|
||||
def _parse_identifier(identifier: str) -> RepoParts:
|
||||
ident = identifier.strip()
|
||||
|
||||
if "://" in ident or ident.startswith("git@"):
|
||||
return _parse_git_url(ident)
|
||||
|
||||
parts = ident.split("/")
|
||||
if len(parts) != 3:
|
||||
raise ValueError("Identifier must be URL or 'provider(:port)/owner/repo'.")
|
||||
|
||||
host_with_port, owner, name = parts
|
||||
host, port = _split_host_port(host_with_port)
|
||||
return RepoParts(host=host, port=port, owner=owner, name=name)
|
||||
|
||||
|
||||
def _ensure_valid_repo_name(name: str) -> None:
|
||||
if not name or not _NAME_RE.fullmatch(name):
|
||||
raise ValueError("Repository name must match: lowercase a-z, 0-9, '_' and '-'.")
|
||||
|
||||
|
||||
def _repo_homepage(host: str, owner: str, name: str) -> str:
|
||||
return f"https://{host}/{owner}/{name}"
|
||||
|
||||
|
||||
def _build_default_primary_url(parts: RepoParts) -> str:
|
||||
if parts.port:
|
||||
return f"ssh://git@{parts.host}:{parts.port}/{parts.owner}/{parts.name}.git"
|
||||
return f"git@{parts.host}:{parts.owner}/{parts.name}.git"
|
||||
|
||||
|
||||
def _write_default_mirrors(repo_dir: str, primary: str, name: str, preview: bool) -> None:
|
||||
mirrors = {"origin": primary, "pypi": f"https://pypi.org/project/{name}/"}
|
||||
write_mirrors_file(repo_dir, mirrors, preview=preview)
|
||||
|
||||
|
||||
def _git_init_and_initial_commit(repo_dir: str, preview: bool) -> None:
|
||||
_run("git init", cwd=repo_dir, preview=preview)
|
||||
_run("git add -A", cwd=repo_dir, preview=preview)
|
||||
|
||||
if preview:
|
||||
print(f'[Preview] Would run in {repo_dir}: git commit -m "Initial commit"')
|
||||
return
|
||||
|
||||
subprocess.run('git commit -m "Initial commit"', cwd=repo_dir, shell=True, check=False)
|
||||
|
||||
|
||||
def _git_push_main_or_master(repo_dir: str, preview: bool) -> None:
|
||||
_run("git branch -M main", cwd=repo_dir, preview=preview)
|
||||
try:
|
||||
_run("git push -u origin main", cwd=repo_dir, preview=preview)
|
||||
return
|
||||
except subprocess.CalledProcessError:
|
||||
pass
|
||||
|
||||
try:
|
||||
_run("git branch -M master", cwd=repo_dir, preview=preview)
|
||||
_run("git push -u origin master", cwd=repo_dir, preview=preview)
|
||||
except subprocess.CalledProcessError as exc:
|
||||
print(f"[WARN] Push failed: {exc}")
|
||||
|
||||
|
||||
def create_repo(
|
||||
identifier: str,
|
||||
config_merged: Dict[str, Any],
|
||||
user_config_path: str,
|
||||
bin_dir: str,
|
||||
*,
|
||||
remote: bool = False,
|
||||
preview: bool = False,
|
||||
) -> None:
|
||||
parts = _parse_identifier(identifier)
|
||||
_ensure_valid_repo_name(parts.name)
|
||||
|
||||
directories = config_merged.get("directories") or {}
|
||||
base_dir = os.path.expanduser(str(directories.get("repositories", "~/Repositories")))
|
||||
repo_dir = os.path.join(base_dir, parts.host, parts.owner, parts.name)
|
||||
|
||||
author_name = _git_get("user.name") or "Unknown Author"
|
||||
author_email = _git_get("user.email") or "unknown@example.invalid"
|
||||
|
||||
homepage = _repo_homepage(parts.host, parts.owner, parts.name)
|
||||
primary_url = _build_default_primary_url(parts)
|
||||
|
||||
repositories = config_merged.get("repositories") or []
|
||||
exists = any(
|
||||
(
|
||||
r.get("provider") == parts.host
|
||||
and r.get("account") == parts.owner
|
||||
and r.get("repository") == parts.name
|
||||
)
|
||||
for r in repositories
|
||||
)
|
||||
|
||||
if not exists:
|
||||
new_entry: Repository = {
|
||||
"provider": parts.host,
|
||||
"port": parts.port,
|
||||
"account": parts.owner,
|
||||
"repository": parts.name,
|
||||
"homepage": homepage,
|
||||
"alias": generate_alias(
|
||||
{"repository": parts.name, "provider": parts.host, "account": parts.owner},
|
||||
bin_dir,
|
||||
existing_aliases=set(),
|
||||
),
|
||||
"verified": {},
|
||||
}
|
||||
|
||||
if os.path.exists(user_config_path):
|
||||
with open(user_config_path, "r", encoding="utf-8") as f:
|
||||
user_config = yaml.safe_load(f) or {}
|
||||
else:
|
||||
user_config = {"repositories": []}
|
||||
|
||||
user_config.setdefault("repositories", [])
|
||||
user_config["repositories"].append(new_entry)
|
||||
|
||||
if preview:
|
||||
print(f"[Preview] Would save user config: {user_config_path}")
|
||||
else:
|
||||
save_user_config(user_config, user_config_path)
|
||||
|
||||
config_merged.setdefault("repositories", []).append(new_entry)
|
||||
repo = new_entry
|
||||
print(f"[INFO] Added repository to configuration: {parts.host}/{parts.owner}/{parts.name}")
|
||||
else:
|
||||
repo = next(
|
||||
r
|
||||
for r in repositories
|
||||
if (
|
||||
r.get("provider") == parts.host
|
||||
and r.get("account") == parts.owner
|
||||
and r.get("repository") == parts.name
|
||||
)
|
||||
)
|
||||
print(f"[INFO] Repository already in configuration: {parts.host}/{parts.owner}/{parts.name}")
|
||||
|
||||
if preview:
|
||||
print(f"[Preview] Would ensure directory exists: {repo_dir}")
|
||||
else:
|
||||
os.makedirs(repo_dir, exist_ok=True)
|
||||
|
||||
tpl_context = {
|
||||
"provider": parts.host,
|
||||
"port": parts.port,
|
||||
"account": parts.owner,
|
||||
"repository": parts.name,
|
||||
"homepage": homepage,
|
||||
"author_name": author_name,
|
||||
"author_email": author_email,
|
||||
"license_text": f"All rights reserved by {author_name}",
|
||||
"primary_remote": primary_url,
|
||||
}
|
||||
|
||||
render_default_templates(repo_dir, context=tpl_context, preview=preview)
|
||||
_git_init_and_initial_commit(repo_dir, preview=preview)
|
||||
|
||||
_write_default_mirrors(repo_dir, primary=primary_url, name=parts.name, preview=preview)
|
||||
|
||||
repo.setdefault("mirrors", {})
|
||||
repo["mirrors"].setdefault("origin", primary_url)
|
||||
repo["mirrors"].setdefault("pypi", f"https://pypi.org/project/{parts.name}/")
|
||||
|
||||
setup_mirrors(
|
||||
selected_repos=[repo],
|
||||
repositories_base_dir=base_dir,
|
||||
all_repos=config_merged.get("repositories", []),
|
||||
preview=preview,
|
||||
local=True,
|
||||
remote=True,
|
||||
ensure_remote=bool(remote),
|
||||
)
|
||||
|
||||
if remote:
|
||||
_git_push_main_or_master(repo_dir, preview=preview)
|
||||
28
src/pkgmgr/actions/repository/create/__init__.py
Normal file
28
src/pkgmgr/actions/repository/create/__init__.py
Normal file
@@ -0,0 +1,28 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Dict
|
||||
|
||||
from .service import CreateRepoService
|
||||
|
||||
RepositoryConfig = Dict[str, Any]
|
||||
|
||||
__all__ = [
|
||||
"CreateRepoService",
|
||||
"create_repo",
|
||||
]
|
||||
|
||||
|
||||
def create_repo(
|
||||
identifier: str,
|
||||
config_merged: RepositoryConfig,
|
||||
user_config_path: str,
|
||||
bin_dir: str,
|
||||
*,
|
||||
remote: bool = False,
|
||||
preview: bool = False,
|
||||
) -> None:
|
||||
CreateRepoService(
|
||||
config_merged=config_merged,
|
||||
user_config_path=user_config_path,
|
||||
bin_dir=bin_dir,
|
||||
).run(identifier=identifier, preview=preview, remote=remote)
|
||||
84
src/pkgmgr/actions/repository/create/config_writer.py
Normal file
84
src/pkgmgr/actions/repository/create/config_writer.py
Normal file
@@ -0,0 +1,84 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from typing import Dict, Any, Set
|
||||
|
||||
import yaml
|
||||
|
||||
from pkgmgr.core.command.alias import generate_alias
|
||||
from pkgmgr.core.config.save import save_user_config
|
||||
|
||||
Repository = Dict[str, Any]
|
||||
|
||||
|
||||
class ConfigRepoWriter:
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
config_merged: Dict[str, Any],
|
||||
user_config_path: str,
|
||||
bin_dir: str,
|
||||
):
|
||||
self.config_merged = config_merged
|
||||
self.user_config_path = user_config_path
|
||||
self.bin_dir = bin_dir
|
||||
|
||||
def ensure_repo_entry(
|
||||
self,
|
||||
*,
|
||||
host: str,
|
||||
port: str | None,
|
||||
owner: str,
|
||||
name: str,
|
||||
homepage: str,
|
||||
preview: bool,
|
||||
) -> Repository:
|
||||
repositories = self.config_merged.setdefault("repositories", [])
|
||||
|
||||
for repo in repositories:
|
||||
if (
|
||||
repo.get("provider") == host
|
||||
and repo.get("account") == owner
|
||||
and repo.get("repository") == name
|
||||
):
|
||||
return repo
|
||||
|
||||
existing_aliases: Set[str] = {
|
||||
str(r.get("alias")) for r in repositories if r.get("alias")
|
||||
}
|
||||
|
||||
repo: Repository = {
|
||||
"provider": host,
|
||||
"port": port,
|
||||
"account": owner,
|
||||
"repository": name,
|
||||
"homepage": homepage,
|
||||
"alias": generate_alias(
|
||||
{
|
||||
"repository": name,
|
||||
"provider": host,
|
||||
"account": owner,
|
||||
},
|
||||
self.bin_dir,
|
||||
existing_aliases=existing_aliases,
|
||||
),
|
||||
"verified": {},
|
||||
}
|
||||
|
||||
if preview:
|
||||
print(f"[Preview] Would add repository to config: {repo}")
|
||||
return repo
|
||||
|
||||
if os.path.exists(self.user_config_path):
|
||||
with open(self.user_config_path, "r", encoding="utf-8") as f:
|
||||
user_cfg = yaml.safe_load(f) or {}
|
||||
else:
|
||||
user_cfg = {}
|
||||
|
||||
user_cfg.setdefault("repositories", []).append(repo)
|
||||
save_user_config(user_cfg, self.user_config_path)
|
||||
|
||||
repositories.append(repo)
|
||||
print(f"[INFO] Added repository to configuration: {host}/{owner}/{name}")
|
||||
|
||||
return repo
|
||||
35
src/pkgmgr/actions/repository/create/git_bootstrap.py
Normal file
35
src/pkgmgr/actions/repository/create/git_bootstrap.py
Normal file
@@ -0,0 +1,35 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from pkgmgr.core.git.commands import (
|
||||
GitCommitError,
|
||||
GitPushUpstreamError,
|
||||
add_all,
|
||||
branch_move,
|
||||
commit,
|
||||
init,
|
||||
push_upstream,
|
||||
)
|
||||
|
||||
|
||||
class GitBootstrapper:
|
||||
def init_repo(self, repo_dir: str, preview: bool) -> None:
|
||||
init(cwd=repo_dir, preview=preview)
|
||||
add_all(cwd=repo_dir, preview=preview)
|
||||
try:
|
||||
commit("Initial commit", cwd=repo_dir, preview=preview)
|
||||
except GitCommitError as exc:
|
||||
print(f"[WARN] Initial commit failed (continuing): {exc}")
|
||||
|
||||
def push_default_branch(self, repo_dir: str, preview: bool) -> None:
|
||||
try:
|
||||
branch_move("main", cwd=repo_dir, preview=preview)
|
||||
push_upstream("origin", "main", cwd=repo_dir, preview=preview)
|
||||
return
|
||||
except GitPushUpstreamError:
|
||||
pass
|
||||
|
||||
try:
|
||||
branch_move("master", cwd=repo_dir, preview=preview)
|
||||
push_upstream("origin", "master", cwd=repo_dir, preview=preview)
|
||||
except GitPushUpstreamError as exc:
|
||||
print(f"[WARN] Push failed: {exc}")
|
||||
53
src/pkgmgr/actions/repository/create/mirrors.py
Normal file
53
src/pkgmgr/actions/repository/create/mirrors.py
Normal file
@@ -0,0 +1,53 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Dict
|
||||
|
||||
from pkgmgr.actions.mirror.io import write_mirrors_file
|
||||
from pkgmgr.actions.mirror.setup_cmd import setup_mirrors
|
||||
|
||||
Repository = Dict[str, Any]
|
||||
|
||||
|
||||
class MirrorBootstrapper:
|
||||
"""
|
||||
MIRRORS is the single source of truth.
|
||||
|
||||
Defaults are written to MIRRORS and mirror setup derives
|
||||
git remotes exclusively from that file (git URLs only).
|
||||
"""
|
||||
|
||||
def write_defaults(
|
||||
self,
|
||||
*,
|
||||
repo_dir: str,
|
||||
primary: str,
|
||||
name: str,
|
||||
preview: bool,
|
||||
) -> None:
|
||||
mirrors = {
|
||||
primary,
|
||||
f"https://pypi.org/project/{name}/",
|
||||
}
|
||||
write_mirrors_file(repo_dir, mirrors, preview=preview)
|
||||
|
||||
def setup(
|
||||
self,
|
||||
*,
|
||||
repo: Repository,
|
||||
repositories_base_dir: str,
|
||||
all_repos: list[Repository],
|
||||
preview: bool,
|
||||
remote: bool,
|
||||
) -> None:
|
||||
# IMPORTANT:
|
||||
# Do NOT set repo["mirrors"] here.
|
||||
# MIRRORS file is the single source of truth.
|
||||
setup_mirrors(
|
||||
selected_repos=[repo],
|
||||
repositories_base_dir=repositories_base_dir,
|
||||
all_repos=all_repos,
|
||||
preview=preview,
|
||||
local=True,
|
||||
remote=True,
|
||||
ensure_remote=remote,
|
||||
)
|
||||
12
src/pkgmgr/actions/repository/create/model.py
Normal file
12
src/pkgmgr/actions/repository/create/model.py
Normal file
@@ -0,0 +1,12 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class RepoParts:
|
||||
host: str
|
||||
port: Optional[str]
|
||||
owner: str
|
||||
name: str
|
||||
68
src/pkgmgr/actions/repository/create/parser.py
Normal file
68
src/pkgmgr/actions/repository/create/parser.py
Normal file
@@ -0,0 +1,68 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
from typing import Tuple
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from .model import RepoParts
|
||||
|
||||
_NAME_RE = re.compile(r"^[a-z0-9_-]+$")
|
||||
|
||||
|
||||
def parse_identifier(identifier: str) -> RepoParts:
|
||||
ident = identifier.strip()
|
||||
|
||||
if "://" in ident or ident.startswith("git@"):
|
||||
return _parse_git_url(ident)
|
||||
|
||||
parts = ident.split("/")
|
||||
if len(parts) != 3:
|
||||
raise ValueError("Identifier must be URL or 'provider(:port)/owner/repo'.")
|
||||
|
||||
host_with_port, owner, name = parts
|
||||
host, port = _split_host_port(host_with_port)
|
||||
_ensure_valid_repo_name(name)
|
||||
|
||||
return RepoParts(host=host, port=port, owner=owner, name=name)
|
||||
|
||||
|
||||
def _parse_git_url(url: str) -> RepoParts:
|
||||
if url.startswith("git@") and "://" not in url:
|
||||
left, right = url.split(":", 1)
|
||||
host = left.split("@", 1)[1]
|
||||
owner, name = right.lstrip("/").split("/", 1)
|
||||
name = _strip_git_suffix(name)
|
||||
_ensure_valid_repo_name(name)
|
||||
return RepoParts(host=host, port=None, owner=owner, name=name)
|
||||
|
||||
parsed = urlparse(url)
|
||||
host = parsed.hostname or ""
|
||||
port = str(parsed.port) if parsed.port else None
|
||||
path = (parsed.path or "").strip("/")
|
||||
|
||||
if not host or "/" not in path:
|
||||
raise ValueError(f"Could not parse git URL: {url}")
|
||||
|
||||
owner, name = path.split("/", 1)
|
||||
name = _strip_git_suffix(name)
|
||||
_ensure_valid_repo_name(name)
|
||||
|
||||
return RepoParts(host=host, port=port, owner=owner, name=name)
|
||||
|
||||
|
||||
def _split_host_port(host: str) -> Tuple[str, str | None]:
|
||||
if ":" in host:
|
||||
h, p = host.split(":", 1)
|
||||
return h, p or None
|
||||
return host, None
|
||||
|
||||
|
||||
def _strip_git_suffix(name: str) -> str:
|
||||
return name[:-4] if name.endswith(".git") else name
|
||||
|
||||
|
||||
def _ensure_valid_repo_name(name: str) -> None:
|
||||
if not _NAME_RE.fullmatch(name):
|
||||
raise ValueError(
|
||||
"Repository name must match: lowercase a-z, 0-9, '_' and '-'."
|
||||
)
|
||||
52
src/pkgmgr/actions/repository/create/planner.py
Normal file
52
src/pkgmgr/actions/repository/create/planner.py
Normal file
@@ -0,0 +1,52 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from typing import Dict, Any
|
||||
|
||||
from .model import RepoParts
|
||||
|
||||
|
||||
class CreateRepoPlanner:
|
||||
def __init__(self, parts: RepoParts, repositories_base_dir: str):
|
||||
self.parts = parts
|
||||
self.repositories_base_dir = os.path.expanduser(repositories_base_dir)
|
||||
|
||||
@property
|
||||
def repo_dir(self) -> str:
|
||||
return os.path.join(
|
||||
self.repositories_base_dir,
|
||||
self.parts.host,
|
||||
self.parts.owner,
|
||||
self.parts.name,
|
||||
)
|
||||
|
||||
@property
|
||||
def homepage(self) -> str:
|
||||
return f"https://{self.parts.host}/{self.parts.owner}/{self.parts.name}"
|
||||
|
||||
@property
|
||||
def primary_remote(self) -> str:
|
||||
if self.parts.port:
|
||||
return (
|
||||
f"ssh://git@{self.parts.host}:{self.parts.port}/"
|
||||
f"{self.parts.owner}/{self.parts.name}.git"
|
||||
)
|
||||
return f"git@{self.parts.host}:{self.parts.owner}/{self.parts.name}.git"
|
||||
|
||||
def template_context(
|
||||
self,
|
||||
*,
|
||||
author_name: str,
|
||||
author_email: str,
|
||||
) -> Dict[str, Any]:
|
||||
return {
|
||||
"provider": self.parts.host,
|
||||
"port": self.parts.port,
|
||||
"account": self.parts.owner,
|
||||
"repository": self.parts.name,
|
||||
"homepage": self.homepage,
|
||||
"author_name": author_name,
|
||||
"author_email": author_email,
|
||||
"license_text": f"All rights reserved by {author_name}",
|
||||
"primary_remote": self.primary_remote,
|
||||
}
|
||||
97
src/pkgmgr/actions/repository/create/service.py
Normal file
97
src/pkgmgr/actions/repository/create/service.py
Normal file
@@ -0,0 +1,97 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from typing import Dict, Any
|
||||
|
||||
from pkgmgr.core.git.queries import get_config_value
|
||||
|
||||
from .parser import parse_identifier
|
||||
from .planner import CreateRepoPlanner
|
||||
from .config_writer import ConfigRepoWriter
|
||||
from .templates import TemplateRenderer
|
||||
from .git_bootstrap import GitBootstrapper
|
||||
from .mirrors import MirrorBootstrapper
|
||||
|
||||
|
||||
class CreateRepoService:
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
config_merged: Dict[str, Any],
|
||||
user_config_path: str,
|
||||
bin_dir: str,
|
||||
):
|
||||
self.config_merged = config_merged
|
||||
self.user_config_path = user_config_path
|
||||
self.bin_dir = bin_dir
|
||||
|
||||
self.templates = TemplateRenderer()
|
||||
self.git = GitBootstrapper()
|
||||
self.mirrors = MirrorBootstrapper()
|
||||
|
||||
def run(
|
||||
self,
|
||||
*,
|
||||
identifier: str,
|
||||
preview: bool,
|
||||
remote: bool,
|
||||
) -> None:
|
||||
parts = parse_identifier(identifier)
|
||||
|
||||
base_dir = self.config_merged.get("directories", {}).get(
|
||||
"repositories", "~/Repositories"
|
||||
)
|
||||
|
||||
planner = CreateRepoPlanner(parts, base_dir)
|
||||
|
||||
writer = ConfigRepoWriter(
|
||||
config_merged=self.config_merged,
|
||||
user_config_path=self.user_config_path,
|
||||
bin_dir=self.bin_dir,
|
||||
)
|
||||
|
||||
repo = writer.ensure_repo_entry(
|
||||
host=parts.host,
|
||||
port=parts.port,
|
||||
owner=parts.owner,
|
||||
name=parts.name,
|
||||
homepage=planner.homepage,
|
||||
preview=preview,
|
||||
)
|
||||
|
||||
if preview:
|
||||
print(f"[Preview] Would ensure directory exists: {planner.repo_dir}")
|
||||
else:
|
||||
os.makedirs(planner.repo_dir, exist_ok=True)
|
||||
|
||||
author_name = get_config_value("user.name") or "Unknown Author"
|
||||
author_email = get_config_value("user.email") or "unknown@example.invalid"
|
||||
|
||||
self.templates.render(
|
||||
repo_dir=planner.repo_dir,
|
||||
context=planner.template_context(
|
||||
author_name=author_name,
|
||||
author_email=author_email,
|
||||
),
|
||||
preview=preview,
|
||||
)
|
||||
|
||||
self.git.init_repo(planner.repo_dir, preview=preview)
|
||||
|
||||
self.mirrors.write_defaults(
|
||||
repo_dir=planner.repo_dir,
|
||||
primary=planner.primary_remote,
|
||||
name=parts.name,
|
||||
preview=preview,
|
||||
)
|
||||
|
||||
self.mirrors.setup(
|
||||
repo=repo,
|
||||
repositories_base_dir=os.path.expanduser(base_dir),
|
||||
all_repos=self.config_merged.get("repositories", []),
|
||||
preview=preview,
|
||||
remote=remote,
|
||||
)
|
||||
|
||||
if remote:
|
||||
self.git.push_default_branch(planner.repo_dir, preview=preview)
|
||||
80
src/pkgmgr/actions/repository/create/templates.py
Normal file
80
src/pkgmgr/actions/repository/create/templates.py
Normal file
@@ -0,0 +1,80 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Dict, Any
|
||||
|
||||
from pkgmgr.core.git.queries import get_repo_root
|
||||
|
||||
try:
|
||||
from jinja2 import Environment, FileSystemLoader, StrictUndefined
|
||||
except Exception as exc: # pragma: no cover
|
||||
Environment = None # type: ignore
|
||||
FileSystemLoader = None # type: ignore
|
||||
StrictUndefined = None # type: ignore
|
||||
_JINJA_IMPORT_ERROR = exc
|
||||
else:
|
||||
_JINJA_IMPORT_ERROR = None
|
||||
|
||||
|
||||
class TemplateRenderer:
|
||||
def __init__(self) -> None:
|
||||
self.templates_dir = self._resolve_templates_dir()
|
||||
|
||||
def render(
|
||||
self,
|
||||
*,
|
||||
repo_dir: str,
|
||||
context: Dict[str, Any],
|
||||
preview: bool,
|
||||
) -> None:
|
||||
if preview:
|
||||
self._preview()
|
||||
return
|
||||
|
||||
if Environment is None:
|
||||
raise RuntimeError(
|
||||
"Jinja2 is required but not available. "
|
||||
f"Import error: {_JINJA_IMPORT_ERROR}"
|
||||
)
|
||||
|
||||
env = Environment(
|
||||
loader=FileSystemLoader(self.templates_dir),
|
||||
undefined=StrictUndefined,
|
||||
autoescape=False,
|
||||
keep_trailing_newline=True,
|
||||
)
|
||||
|
||||
for root, _, files in os.walk(self.templates_dir):
|
||||
for fn in files:
|
||||
if not fn.endswith(".j2"):
|
||||
continue
|
||||
|
||||
abs_src = os.path.join(root, fn)
|
||||
rel_src = os.path.relpath(abs_src, self.templates_dir)
|
||||
rel_out = rel_src[:-3]
|
||||
abs_out = os.path.join(repo_dir, rel_out)
|
||||
|
||||
os.makedirs(os.path.dirname(abs_out), exist_ok=True)
|
||||
template = env.get_template(rel_src)
|
||||
rendered = template.render(**context)
|
||||
|
||||
with open(abs_out, "w", encoding="utf-8") as f:
|
||||
f.write(rendered)
|
||||
|
||||
def _preview(self) -> None:
|
||||
for root, _, files in os.walk(self.templates_dir):
|
||||
for fn in files:
|
||||
if fn.endswith(".j2"):
|
||||
rel = os.path.relpath(
|
||||
os.path.join(root, fn), self.templates_dir
|
||||
)
|
||||
print(f"[Preview] Would render template: {rel} -> {rel[:-3]}")
|
||||
|
||||
@staticmethod
|
||||
def _resolve_templates_dir() -> str:
|
||||
here = Path(__file__).resolve().parent
|
||||
root = get_repo_root(cwd=str(here))
|
||||
if not root:
|
||||
raise RuntimeError("Could not determine repository root for templates.")
|
||||
return os.path.join(root, "templates", "default")
|
||||
@@ -1,25 +1,30 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import List, Dict, Any
|
||||
|
||||
from pkgmgr.core.git.commands import pull_args, GitPullArgsError
|
||||
from pkgmgr.core.repository.dir import get_repo_dir
|
||||
from pkgmgr.core.repository.identifier import get_repo_identifier
|
||||
from pkgmgr.core.repository.verify import verify_repository
|
||||
|
||||
Repository = Dict[str, Any]
|
||||
|
||||
|
||||
def pull_with_verification(
|
||||
selected_repos,
|
||||
repositories_base_dir,
|
||||
all_repos,
|
||||
extra_args,
|
||||
no_verification,
|
||||
selected_repos: List[Repository],
|
||||
repositories_base_dir: str,
|
||||
all_repos: List[Repository],
|
||||
extra_args: List[str],
|
||||
no_verification: bool,
|
||||
preview: bool,
|
||||
) -> None:
|
||||
"""
|
||||
Execute `git pull` for each repository with verification.
|
||||
|
||||
- If verification fails and verification is enabled, prompt user to continue.
|
||||
- Uses core.git.commands.pull_args() (no raw subprocess usage).
|
||||
"""
|
||||
for repo in selected_repos:
|
||||
repo_identifier = get_repo_identifier(repo, all_repos)
|
||||
@@ -37,12 +42,7 @@ def pull_with_verification(
|
||||
no_verification=no_verification,
|
||||
)
|
||||
|
||||
if (
|
||||
not preview
|
||||
and not no_verification
|
||||
and verified_info
|
||||
and not verified_ok
|
||||
):
|
||||
if not preview and not no_verification and verified_info and not verified_ok:
|
||||
print(f"Warning: Verification failed for {repo_identifier}:")
|
||||
for err in errors:
|
||||
print(f" - {err}")
|
||||
@@ -50,17 +50,10 @@ def pull_with_verification(
|
||||
if choice != "y":
|
||||
continue
|
||||
|
||||
args_part = " ".join(extra_args) if extra_args else ""
|
||||
full_cmd = f"git pull{(' ' + args_part) if args_part else ''}"
|
||||
|
||||
if preview:
|
||||
print(f"[Preview] In '{repo_dir}': {full_cmd}")
|
||||
else:
|
||||
print(f"Running in '{repo_dir}': {full_cmd}")
|
||||
result = subprocess.run(full_cmd, cwd=repo_dir, shell=True, check=False)
|
||||
if result.returncode != 0:
|
||||
print(
|
||||
f"'git pull' for {repo_identifier} failed "
|
||||
f"with exit code {result.returncode}."
|
||||
)
|
||||
sys.exit(result.returncode)
|
||||
try:
|
||||
pull_args(extra_args, cwd=repo_dir, preview=preview)
|
||||
except GitPullArgsError as exc:
|
||||
# Keep behavior consistent with previous implementation:
|
||||
# stop on first failure and propagate return code as generic failure.
|
||||
print(str(exc))
|
||||
sys.exit(1)
|
||||
|
||||
@@ -1,105 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
try:
|
||||
from jinja2 import Environment, FileSystemLoader, StrictUndefined
|
||||
except Exception as exc: # pragma: no cover
|
||||
Environment = None # type: ignore[assignment]
|
||||
FileSystemLoader = None # type: ignore[assignment]
|
||||
StrictUndefined = None # type: ignore[assignment]
|
||||
_JINJA_IMPORT_ERROR = exc
|
||||
else:
|
||||
_JINJA_IMPORT_ERROR = None
|
||||
|
||||
|
||||
def _repo_root_from_here(anchor: Optional[Path] = None) -> str:
|
||||
"""
|
||||
Prefer git root (robust in editable installs / different layouts).
|
||||
Fallback to a conservative relative parent lookup.
|
||||
"""
|
||||
here = (anchor or Path(__file__)).resolve().parent
|
||||
try:
|
||||
r = subprocess.run(
|
||||
["git", "rev-parse", "--show-toplevel"],
|
||||
cwd=str(here),
|
||||
check=False,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
if r.returncode == 0:
|
||||
top = (r.stdout or "").strip()
|
||||
if top:
|
||||
return top
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Fallback: src/pkgmgr/actions/repository/scaffold.py -> <repo root> = parents[5]
|
||||
p = (anchor or Path(__file__)).resolve()
|
||||
if len(p.parents) < 6:
|
||||
raise RuntimeError(f"Unexpected path depth for: {p}")
|
||||
return str(p.parents[5])
|
||||
|
||||
|
||||
def _templates_dir() -> str:
|
||||
return os.path.join(_repo_root_from_here(), "templates", "default")
|
||||
|
||||
|
||||
def render_default_templates(
|
||||
repo_dir: str,
|
||||
*,
|
||||
context: Dict[str, Any],
|
||||
preview: bool,
|
||||
) -> None:
|
||||
"""
|
||||
Render templates/default/*.j2 into repo_dir.
|
||||
Keeps create.py clean: create.py calls this function only.
|
||||
"""
|
||||
tpl_dir = _templates_dir()
|
||||
if not os.path.isdir(tpl_dir):
|
||||
raise RuntimeError(f"Templates directory not found: {tpl_dir}")
|
||||
|
||||
# Preview mode: do not require Jinja2 at all. We only print planned outputs.
|
||||
if preview:
|
||||
for root, _, files in os.walk(tpl_dir):
|
||||
for fn in files:
|
||||
if not fn.endswith(".j2"):
|
||||
continue
|
||||
abs_src = os.path.join(root, fn)
|
||||
rel_src = os.path.relpath(abs_src, tpl_dir)
|
||||
rel_out = rel_src[:-3]
|
||||
print(f"[Preview] Would render template: {rel_src} -> {rel_out}")
|
||||
return
|
||||
|
||||
if Environment is None or FileSystemLoader is None or StrictUndefined is None:
|
||||
raise RuntimeError(
|
||||
"Jinja2 is required for repo templates but is not available. "
|
||||
f"Import error: {_JINJA_IMPORT_ERROR}"
|
||||
)
|
||||
|
||||
env = Environment(
|
||||
loader=FileSystemLoader(tpl_dir),
|
||||
undefined=StrictUndefined,
|
||||
autoescape=False,
|
||||
keep_trailing_newline=True,
|
||||
)
|
||||
|
||||
for root, _, files in os.walk(tpl_dir):
|
||||
for fn in files:
|
||||
if not fn.endswith(".j2"):
|
||||
continue
|
||||
|
||||
abs_src = os.path.join(root, fn)
|
||||
rel_src = os.path.relpath(abs_src, tpl_dir)
|
||||
rel_out = rel_src[:-3]
|
||||
abs_out = os.path.join(repo_dir, rel_out)
|
||||
|
||||
os.makedirs(os.path.dirname(abs_out), exist_ok=True)
|
||||
template = env.get_template(rel_src)
|
||||
rendered = template.render(**context)
|
||||
|
||||
with open(abs_out, "w", encoding="utf-8") as f:
|
||||
f.write(rendered)
|
||||
@@ -1,64 +1,27 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from pkgmgr.cli.context import CLIContext
|
||||
from pkgmgr.cli.tools import open_vscode_workspace
|
||||
from pkgmgr.cli.tools.paths import resolve_repository_path
|
||||
from pkgmgr.core.command.run import run_command
|
||||
from pkgmgr .core .repository .identifier import get_repo_identifier
|
||||
from pkgmgr .core .repository .dir import get_repo_dir
|
||||
|
||||
|
||||
Repository = Dict[str, Any]
|
||||
|
||||
|
||||
def _resolve_repository_path(repository: Repository, ctx: CLIContext) -> str:
|
||||
"""
|
||||
Resolve the filesystem path for a repository.
|
||||
|
||||
Priority:
|
||||
1. Use explicit keys if present (directory / path / workspace / workspace_dir).
|
||||
2. Fallback to get_repo_dir(...) using the repositories base directory
|
||||
from the CLI context.
|
||||
"""
|
||||
|
||||
# 1) Explicit path-like keys on the repository object
|
||||
for key in ("directory", "path", "workspace", "workspace_dir"):
|
||||
value = repository.get(key)
|
||||
if value:
|
||||
return value
|
||||
|
||||
# 2) Fallback: compute from base dir + repository metadata
|
||||
base_dir = (
|
||||
getattr(ctx, "repositories_base_dir", None)
|
||||
or getattr(ctx, "repositories_dir", None)
|
||||
)
|
||||
if not base_dir:
|
||||
raise RuntimeError(
|
||||
"Cannot resolve repositories base directory from context; "
|
||||
"expected ctx.repositories_base_dir or ctx.repositories_dir."
|
||||
)
|
||||
|
||||
return get_repo_dir(base_dir, repository)
|
||||
|
||||
|
||||
def handle_tools_command(
|
||||
args,
|
||||
ctx: CLIContext,
|
||||
selected: List[Repository],
|
||||
) -> None:
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# nautilus "explore" command
|
||||
# ------------------------------------------------------------------
|
||||
if args.command == "explore":
|
||||
for repository in selected:
|
||||
repo_path = _resolve_repository_path(repository, ctx)
|
||||
run_command(
|
||||
f'nautilus "{repo_path}" & disown'
|
||||
)
|
||||
repo_path = resolve_repository_path(repository, ctx)
|
||||
run_command(f'nautilus "{repo_path}" & disown')
|
||||
return
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
@@ -66,50 +29,13 @@ def handle_tools_command(
|
||||
# ------------------------------------------------------------------
|
||||
if args.command == "terminal":
|
||||
for repository in selected:
|
||||
repo_path = _resolve_repository_path(repository, ctx)
|
||||
run_command(
|
||||
f'gnome-terminal --tab --working-directory="{repo_path}"'
|
||||
)
|
||||
repo_path = resolve_repository_path(repository, ctx)
|
||||
run_command(f'gnome-terminal --tab --working-directory="{repo_path}"')
|
||||
return
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# VS Code workspace command
|
||||
# ------------------------------------------------------------------
|
||||
if args.command == "code":
|
||||
if not selected:
|
||||
print("No repositories selected.")
|
||||
return
|
||||
|
||||
identifiers = [
|
||||
get_repo_identifier(repo, ctx.all_repositories)
|
||||
for repo in selected
|
||||
]
|
||||
sorted_identifiers = sorted(identifiers)
|
||||
workspace_name = "_".join(sorted_identifiers) + ".code-workspace"
|
||||
|
||||
directories_cfg = ctx.config_merged.get("directories") or {}
|
||||
workspaces_dir = os.path.expanduser(
|
||||
directories_cfg.get("workspaces", "~/Workspaces")
|
||||
)
|
||||
os.makedirs(workspaces_dir, exist_ok=True)
|
||||
workspace_file = os.path.join(workspaces_dir, workspace_name)
|
||||
|
||||
folders = [
|
||||
{"path": _resolve_repository_path(repository, ctx)}
|
||||
for repository in selected
|
||||
]
|
||||
|
||||
workspace_data = {
|
||||
"folders": folders,
|
||||
"settings": {},
|
||||
}
|
||||
|
||||
if not os.path.exists(workspace_file):
|
||||
with open(workspace_file, "w", encoding="utf-8") as f:
|
||||
json.dump(workspace_data, f, indent=4)
|
||||
print(f"Created workspace file: {workspace_file}")
|
||||
else:
|
||||
print(f"Using existing workspace file: {workspace_file}")
|
||||
|
||||
run_command(f'code "{workspace_file}"')
|
||||
open_vscode_workspace(ctx, selected)
|
||||
return
|
||||
|
||||
5
src/pkgmgr/cli/tools/__init__.py
Normal file
5
src/pkgmgr/cli/tools/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from .vscode import open_vscode_workspace
|
||||
|
||||
__all__ = ["open_vscode_workspace"]
|
||||
35
src/pkgmgr/cli/tools/paths.py
Normal file
35
src/pkgmgr/cli/tools/paths.py
Normal file
@@ -0,0 +1,35 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Dict
|
||||
|
||||
from pkgmgr.cli.context import CLIContext
|
||||
from pkgmgr.core.repository.dir import get_repo_dir
|
||||
|
||||
Repository = Dict[str, Any]
|
||||
|
||||
|
||||
def resolve_repository_path(repository: Repository, ctx: CLIContext) -> str:
|
||||
"""
|
||||
Resolve the filesystem path for a repository.
|
||||
|
||||
Priority:
|
||||
1. Use explicit keys if present (directory / path / workspace / workspace_dir).
|
||||
2. Fallback to get_repo_dir(...) using the repositories base directory
|
||||
from the CLI context.
|
||||
"""
|
||||
for key in ("directory", "path", "workspace", "workspace_dir"):
|
||||
value = repository.get(key)
|
||||
if value:
|
||||
return value
|
||||
|
||||
base_dir = (
|
||||
getattr(ctx, "repositories_base_dir", None)
|
||||
or getattr(ctx, "repositories_dir", None)
|
||||
)
|
||||
if not base_dir:
|
||||
raise RuntimeError(
|
||||
"Cannot resolve repositories base directory from context; "
|
||||
"expected ctx.repositories_base_dir or ctx.repositories_dir."
|
||||
)
|
||||
|
||||
return get_repo_dir(base_dir, repository)
|
||||
102
src/pkgmgr/cli/tools/vscode.py
Normal file
102
src/pkgmgr/cli/tools/vscode.py
Normal file
@@ -0,0 +1,102 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from pkgmgr.cli.context import CLIContext
|
||||
from pkgmgr.cli.tools.paths import resolve_repository_path
|
||||
from pkgmgr.core.command.run import run_command
|
||||
from pkgmgr.core.repository.identifier import get_repo_identifier
|
||||
|
||||
Repository = Dict[str, Any]
|
||||
|
||||
|
||||
def _ensure_vscode_cli_available() -> None:
|
||||
"""
|
||||
Ensure that the VS Code CLI ('code') is available in PATH.
|
||||
"""
|
||||
if shutil.which("code") is None:
|
||||
raise RuntimeError(
|
||||
"VS Code CLI ('code') not found in PATH.\n\n"
|
||||
"Hint:\n"
|
||||
" Install Visual Studio Code and ensure the 'code' command is available.\n"
|
||||
" VS Code → Command Palette → 'Shell Command: Install code command in PATH'\n"
|
||||
)
|
||||
|
||||
|
||||
def _ensure_identifiers_are_filename_safe(identifiers: List[str]) -> None:
|
||||
"""
|
||||
Ensure identifiers can be used in a filename.
|
||||
|
||||
If an identifier contains '/', it likely means the repository has not yet
|
||||
been explicitly identified (no short identifier configured).
|
||||
"""
|
||||
invalid = [i for i in identifiers if "/" in i or os.sep in i]
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
"Cannot create VS Code workspace.\n\n"
|
||||
"The following repositories are not yet identified "
|
||||
"(identifier contains '/'): \n"
|
||||
+ "\n".join(f" - {i}" for i in invalid)
|
||||
+ "\n\n"
|
||||
"Hint:\n"
|
||||
" The repository has no short identifier yet.\n"
|
||||
" Add an explicit identifier in your configuration before using `pkgmgr tools code`.\n"
|
||||
)
|
||||
|
||||
|
||||
def _resolve_workspaces_dir(ctx: CLIContext) -> str:
|
||||
directories_cfg = ctx.config_merged.get("directories") or {}
|
||||
return os.path.expanduser(directories_cfg.get("workspaces", "~/Workspaces"))
|
||||
|
||||
|
||||
def _build_workspace_filename(identifiers: List[str]) -> str:
|
||||
sorted_identifiers = sorted(identifiers)
|
||||
return "_".join(sorted_identifiers) + ".code-workspace"
|
||||
|
||||
|
||||
def _build_workspace_data(selected: List[Repository], ctx: CLIContext) -> Dict[str, Any]:
|
||||
folders = [{"path": resolve_repository_path(repo, ctx)} for repo in selected]
|
||||
return {
|
||||
"folders": folders,
|
||||
"settings": {},
|
||||
}
|
||||
|
||||
|
||||
def open_vscode_workspace(ctx: CLIContext, selected: List[Repository]) -> None:
|
||||
"""
|
||||
Create (if missing) and open a VS Code workspace for the selected repositories.
|
||||
|
||||
Policy:
|
||||
- Fail with a clear error if VS Code CLI is missing.
|
||||
- Fail with a clear error if any repository identifier contains '/', because that
|
||||
indicates the repo has not been explicitly identified (no short identifier).
|
||||
- Do NOT auto-sanitize identifiers and do NOT create subfolders under workspaces.
|
||||
"""
|
||||
if not selected:
|
||||
print("No repositories selected.")
|
||||
return
|
||||
|
||||
_ensure_vscode_cli_available()
|
||||
|
||||
identifiers = [get_repo_identifier(repo, ctx.all_repositories) for repo in selected]
|
||||
_ensure_identifiers_are_filename_safe(identifiers)
|
||||
|
||||
workspaces_dir = _resolve_workspaces_dir(ctx)
|
||||
os.makedirs(workspaces_dir, exist_ok=True)
|
||||
|
||||
workspace_name = _build_workspace_filename(identifiers)
|
||||
workspace_file = os.path.join(workspaces_dir, workspace_name)
|
||||
|
||||
workspace_data = _build_workspace_data(selected, ctx)
|
||||
|
||||
if not os.path.exists(workspace_file):
|
||||
with open(workspace_file, "w", encoding="utf-8") as f:
|
||||
json.dump(workspace_data, f, indent=4)
|
||||
print(f"Created workspace file: {workspace_file}")
|
||||
else:
|
||||
print(f"Using existing workspace file: {workspace_file}")
|
||||
|
||||
run_command(f'code "{workspace_file}"')
|
||||
@@ -1,4 +1,3 @@
|
||||
# src/pkgmgr/core/credentials/__init__.py
|
||||
"""Credential resolution for provider APIs."""
|
||||
|
||||
from .resolver import ResolutionOptions, TokenResolver
|
||||
|
||||
@@ -3,9 +3,11 @@
|
||||
from .env import EnvTokenProvider
|
||||
from .keyring import KeyringTokenProvider
|
||||
from .prompt import PromptTokenProvider
|
||||
from .gh import GhTokenProvider
|
||||
|
||||
__all__ = [
|
||||
"EnvTokenProvider",
|
||||
"KeyringTokenProvider",
|
||||
"PromptTokenProvider",
|
||||
"GhTokenProvider",
|
||||
]
|
||||
|
||||
43
src/pkgmgr/core/credentials/providers/gh.py
Normal file
43
src/pkgmgr/core/credentials/providers/gh.py
Normal file
@@ -0,0 +1,43 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import shutil
|
||||
import subprocess
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
|
||||
from ..types import TokenRequest, TokenResult
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class GhTokenProvider:
|
||||
"""
|
||||
Resolve a GitHub token via GitHub CLI (`gh auth token`).
|
||||
|
||||
This does NOT persist anything; it only reads what `gh` already knows.
|
||||
"""
|
||||
source_name: str = "gh"
|
||||
|
||||
def get(self, request: TokenRequest) -> Optional[TokenResult]:
|
||||
# Only meaningful for GitHub-like providers
|
||||
kind = (request.provider_kind or "").strip().lower()
|
||||
if kind not in ("github", "github.com"):
|
||||
return None
|
||||
|
||||
if not shutil.which("gh"):
|
||||
return None
|
||||
|
||||
host = (request.host or "").strip() or "github.com"
|
||||
|
||||
try:
|
||||
out = subprocess.check_output(
|
||||
["gh", "auth", "token", "--hostname", host],
|
||||
stderr=subprocess.STDOUT,
|
||||
text=True,
|
||||
).strip()
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
if not out:
|
||||
return None
|
||||
|
||||
return TokenResult(token=out, source=self.source_name)
|
||||
@@ -6,9 +6,11 @@ from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
|
||||
from .providers.env import EnvTokenProvider
|
||||
from .providers.gh import GhTokenProvider
|
||||
from .providers.keyring import KeyringTokenProvider
|
||||
from .providers.prompt import PromptTokenProvider
|
||||
from .types import KeyringUnavailableError, NoCredentialsError, TokenRequest, TokenResult
|
||||
from .validate import validate_token
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
@@ -21,10 +23,24 @@ class ResolutionOptions:
|
||||
|
||||
|
||||
class TokenResolver:
|
||||
"""Resolve tokens from multiple sources (ENV -> Keyring -> Prompt)."""
|
||||
"""
|
||||
Resolve tokens for provider APIs using the following policy:
|
||||
|
||||
0) ENV (explicit user intent) -> return as-is (do NOT persist)
|
||||
1) GitHub CLI (gh) -> if available and token validates, return
|
||||
2) Keyring -> if token validates, return; if invalid and
|
||||
interactive prompting is allowed, prompt and
|
||||
OVERWRITE the keyring entry
|
||||
3) Prompt -> prompt and (optionally) store in keyring
|
||||
|
||||
Notes:
|
||||
- Keyring requires python-keyring.
|
||||
- Token validation is provider-specific (currently GitHub cloud).
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._env = EnvTokenProvider()
|
||||
self._gh = GhTokenProvider()
|
||||
self._keyring = KeyringTokenProvider()
|
||||
self._prompt = PromptTokenProvider()
|
||||
self._warned_keyring: bool = False
|
||||
@@ -48,6 +64,33 @@ class TokenResolver:
|
||||
print(" sudo dnf install python3-keyring", file=sys.stderr)
|
||||
print("", file=sys.stderr)
|
||||
|
||||
def _prompt_and_maybe_store(
|
||||
self,
|
||||
request: TokenRequest,
|
||||
opts: ResolutionOptions,
|
||||
) -> Optional[TokenResult]:
|
||||
"""
|
||||
Prompt for a token and optionally store it in keyring.
|
||||
If keyring is unavailable, still return the token for this run.
|
||||
"""
|
||||
if not (opts.interactive and opts.allow_prompt):
|
||||
return None
|
||||
|
||||
prompt_res = self._prompt.get(request)
|
||||
if not prompt_res:
|
||||
return None
|
||||
|
||||
if opts.save_prompt_token_to_keyring:
|
||||
try:
|
||||
self._keyring.set(request, prompt_res.token) # overwrite is fine
|
||||
except KeyringUnavailableError as exc:
|
||||
self._warn_keyring_unavailable(exc)
|
||||
except Exception:
|
||||
# If keyring cannot store, still use token for this run.
|
||||
pass
|
||||
|
||||
return prompt_res
|
||||
|
||||
def get_token(
|
||||
self,
|
||||
provider_kind: str,
|
||||
@@ -58,16 +101,29 @@ class TokenResolver:
|
||||
opts = options or ResolutionOptions()
|
||||
request = TokenRequest(provider_kind=provider_kind, host=host, owner=owner)
|
||||
|
||||
# 1) ENV
|
||||
# 0) ENV (highest priority; explicit user intent)
|
||||
env_res = self._env.get(request)
|
||||
if env_res:
|
||||
# Do NOT validate or persist env tokens automatically.
|
||||
return env_res
|
||||
|
||||
# 2) Keyring
|
||||
# 1) GitHub CLI (gh) (auto-read; validate)
|
||||
gh_res = self._gh.get(request)
|
||||
if gh_res and validate_token(request.provider_kind, request.host, gh_res.token):
|
||||
return gh_res
|
||||
|
||||
# 2) Keyring (validate; if invalid -> prompt + overwrite)
|
||||
try:
|
||||
kr_res = self._keyring.get(request)
|
||||
if kr_res:
|
||||
if validate_token(request.provider_kind, request.host, kr_res.token):
|
||||
return kr_res
|
||||
|
||||
# Token exists but seems invalid -> re-prompt and overwrite keyring.
|
||||
renewed = self._prompt_and_maybe_store(request, opts)
|
||||
if renewed:
|
||||
return renewed
|
||||
|
||||
except KeyringUnavailableError as exc:
|
||||
# Show a helpful warning once, then continue (prompt fallback).
|
||||
self._warn_keyring_unavailable(exc)
|
||||
@@ -76,21 +132,12 @@ class TokenResolver:
|
||||
pass
|
||||
|
||||
# 3) Prompt (optional)
|
||||
if opts.interactive and opts.allow_prompt:
|
||||
prompt_res = self._prompt.get(request)
|
||||
prompt_res = self._prompt_and_maybe_store(request, opts)
|
||||
if prompt_res:
|
||||
if opts.save_prompt_token_to_keyring:
|
||||
try:
|
||||
self._keyring.set(request, prompt_res.token)
|
||||
except KeyringUnavailableError as exc:
|
||||
self._warn_keyring_unavailable(exc)
|
||||
except Exception:
|
||||
# If keyring cannot store, still use token for this run.
|
||||
pass
|
||||
return prompt_res
|
||||
|
||||
raise NoCredentialsError(
|
||||
f"No token available for {provider_kind}@{host}"
|
||||
+ (f" (owner: {owner})" if owner else "")
|
||||
+ ". Provide it via environment variable or keyring."
|
||||
+ ". Provide it via environment variable, keyring, or gh auth."
|
||||
)
|
||||
|
||||
@@ -44,6 +44,7 @@ def env_var_candidates(provider_kind: str, host: str, owner: Optional[str]) -> l
|
||||
candidates.append(f"PKGMGR_{kind}_TOKEN")
|
||||
candidates.append(f"PKGMGR_TOKEN_{kind}")
|
||||
candidates.append("PKGMGR_TOKEN")
|
||||
|
||||
return candidates
|
||||
|
||||
|
||||
|
||||
40
src/pkgmgr/core/credentials/validate.py
Normal file
40
src/pkgmgr/core/credentials/validate.py
Normal file
@@ -0,0 +1,40 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import urllib.request
|
||||
import json
|
||||
|
||||
|
||||
def validate_token(provider_kind: str, host: str, token: str) -> bool:
|
||||
"""
|
||||
Return True if token appears valid for the provider.
|
||||
Currently implemented for GitHub only.
|
||||
"""
|
||||
kind = (provider_kind or "").strip().lower()
|
||||
host = (host or "").strip() or "github.com"
|
||||
token = (token or "").strip()
|
||||
if not token:
|
||||
return False
|
||||
|
||||
if kind in ("github", "github.com") and host.lower() == "github.com":
|
||||
req = urllib.request.Request(
|
||||
"https://api.github.com/user",
|
||||
headers={
|
||||
"Authorization": f"Bearer {token}",
|
||||
"Accept": "application/vnd.github+json",
|
||||
"User-Agent": "pkgmgr",
|
||||
},
|
||||
method="GET",
|
||||
)
|
||||
try:
|
||||
with urllib.request.urlopen(req, timeout=10) as resp:
|
||||
if resp.status != 200:
|
||||
return False
|
||||
# Optional: parse to ensure body is JSON
|
||||
_ = json.loads(resp.read().decode("utf-8"))
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
# Unknown provider: don't hard-fail validation (conservative default)
|
||||
# If you prefer strictness: return False here.
|
||||
return True
|
||||
@@ -1,5 +1,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from .errors import GitRunError
|
||||
from .run import run
|
||||
|
||||
"""
|
||||
Lightweight helper functions around Git commands.
|
||||
|
||||
@@ -8,10 +11,7 @@ logic (release, version, changelog) does not have to deal with the
|
||||
details of subprocess handling.
|
||||
"""
|
||||
|
||||
from .errors import GitError
|
||||
from .run import run
|
||||
|
||||
__all__ = [
|
||||
"GitError",
|
||||
"run"
|
||||
"GitRunError",
|
||||
"run",
|
||||
]
|
||||
|
||||
@@ -1,25 +1,40 @@
|
||||
# src/pkgmgr/core/git/commands/__init__.py
|
||||
from __future__ import annotations
|
||||
|
||||
from .add import GitAddError, add
|
||||
from .add_all import GitAddAllError, add_all
|
||||
from .add_remote import GitAddRemoteError, add_remote
|
||||
from .add_remote_push_url import GitAddRemotePushUrlError, add_remote_push_url
|
||||
from .branch_move import GitBranchMoveError, branch_move
|
||||
from .checkout import GitCheckoutError, checkout
|
||||
from .clone import GitCloneError, clone
|
||||
from .commit import GitCommitError, commit
|
||||
from .create_branch import GitCreateBranchError, create_branch
|
||||
from .delete_local_branch import GitDeleteLocalBranchError, delete_local_branch
|
||||
from .delete_remote_branch import GitDeleteRemoteBranchError, delete_remote_branch
|
||||
from .fetch import GitFetchError, fetch
|
||||
from .init import GitInitError, init
|
||||
from .merge_no_ff import GitMergeError, merge_no_ff
|
||||
from .pull import GitPullError, pull
|
||||
from .pull_args import GitPullArgsError, pull_args
|
||||
from .pull_ff_only import GitPullFfOnlyError, pull_ff_only
|
||||
from .push import GitPushError, push
|
||||
from .create_branch import GitCreateBranchError, create_branch
|
||||
from .push_upstream import GitPushUpstreamError, push_upstream
|
||||
|
||||
from .add_remote import GitAddRemoteError, add_remote
|
||||
from .set_remote_url import GitSetRemoteUrlError, set_remote_url
|
||||
from .add_remote_push_url import GitAddRemotePushUrlError, add_remote_push_url
|
||||
from .tag_annotated import GitTagAnnotatedError, tag_annotated
|
||||
from .tag_force_annotated import GitTagForceAnnotatedError, tag_force_annotated
|
||||
|
||||
__all__ = [
|
||||
"add",
|
||||
"add_all",
|
||||
"fetch",
|
||||
"checkout",
|
||||
"pull",
|
||||
"pull_args",
|
||||
"pull_ff_only",
|
||||
"merge_no_ff",
|
||||
"push",
|
||||
"commit",
|
||||
"delete_local_branch",
|
||||
"delete_remote_branch",
|
||||
"create_branch",
|
||||
@@ -27,11 +42,21 @@ __all__ = [
|
||||
"add_remote",
|
||||
"set_remote_url",
|
||||
"add_remote_push_url",
|
||||
"tag_annotated",
|
||||
"tag_force_annotated",
|
||||
"clone",
|
||||
"init",
|
||||
"branch_move",
|
||||
"GitAddError",
|
||||
"GitAddAllError",
|
||||
"GitFetchError",
|
||||
"GitCheckoutError",
|
||||
"GitPullError",
|
||||
"GitPullArgsError",
|
||||
"GitPullFfOnlyError",
|
||||
"GitMergeError",
|
||||
"GitPushError",
|
||||
"GitCommitError",
|
||||
"GitDeleteLocalBranchError",
|
||||
"GitDeleteRemoteBranchError",
|
||||
"GitCreateBranchError",
|
||||
@@ -39,4 +64,9 @@ __all__ = [
|
||||
"GitAddRemoteError",
|
||||
"GitSetRemoteUrlError",
|
||||
"GitAddRemotePushUrlError",
|
||||
"GitTagAnnotatedError",
|
||||
"GitTagForceAnnotatedError",
|
||||
"GitCloneError",
|
||||
"GitInitError",
|
||||
"GitBranchMoveError",
|
||||
]
|
||||
|
||||
44
src/pkgmgr/core/git/commands/add.py
Normal file
44
src/pkgmgr/core/git/commands/add.py
Normal file
@@ -0,0 +1,44 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Iterable, List, Sequence, Union
|
||||
|
||||
from ..errors import GitRunError, GitCommandError
|
||||
from ..run import run
|
||||
|
||||
|
||||
class GitAddError(GitCommandError):
|
||||
"""Raised when `git add` fails."""
|
||||
|
||||
|
||||
PathLike = Union[str, Sequence[str], Iterable[str]]
|
||||
|
||||
|
||||
def _normalize_paths(paths: PathLike) -> List[str]:
|
||||
if isinstance(paths, str):
|
||||
return [paths]
|
||||
return [p for p in paths]
|
||||
|
||||
|
||||
def add(
|
||||
paths: PathLike,
|
||||
*,
|
||||
cwd: str = ".",
|
||||
preview: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
Stage one or multiple paths.
|
||||
|
||||
Equivalent to:
|
||||
git add <path...>
|
||||
"""
|
||||
normalized = _normalize_paths(paths)
|
||||
if not normalized:
|
||||
return
|
||||
|
||||
try:
|
||||
run(["add", *normalized], cwd=cwd, preview=preview)
|
||||
except GitRunError as exc:
|
||||
raise GitAddError(
|
||||
f"Failed to add paths to staging area: {normalized!r}.",
|
||||
cwd=cwd,
|
||||
) from exc
|
||||
21
src/pkgmgr/core/git/commands/add_all.py
Normal file
21
src/pkgmgr/core/git/commands/add_all.py
Normal file
@@ -0,0 +1,21 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from ..errors import GitCommandError, GitRunError
|
||||
from ..run import run
|
||||
|
||||
|
||||
class GitAddAllError(GitCommandError):
|
||||
"""Raised when `git add -A` fails."""
|
||||
|
||||
|
||||
def add_all(*, cwd: str = ".", preview: bool = False) -> None:
|
||||
"""
|
||||
Stage all changes (tracked + untracked).
|
||||
|
||||
Equivalent to:
|
||||
git add -A
|
||||
"""
|
||||
try:
|
||||
run(["add", "-A"], cwd=cwd, preview=preview)
|
||||
except GitRunError as exc:
|
||||
raise GitAddAllError("Failed to stage all changes with `git add -A`.", cwd=cwd) from exc
|
||||
@@ -1,6 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from ..errors import GitError, GitCommandError
|
||||
from ..errors import GitRunError, GitCommandError
|
||||
from ..run import run
|
||||
|
||||
|
||||
@@ -27,7 +27,7 @@ def add_remote(
|
||||
cwd=cwd,
|
||||
preview=preview,
|
||||
)
|
||||
except GitError as exc:
|
||||
except GitRunError as exc:
|
||||
raise GitAddRemoteError(
|
||||
f"Failed to add remote {name!r} with URL {url!r}.",
|
||||
cwd=cwd,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from ..errors import GitError, GitCommandError
|
||||
from ..errors import GitRunError, GitCommandError
|
||||
from ..run import run
|
||||
|
||||
|
||||
@@ -27,7 +27,7 @@ def add_remote_push_url(
|
||||
cwd=cwd,
|
||||
preview=preview,
|
||||
)
|
||||
except GitError as exc:
|
||||
except GitRunError as exc:
|
||||
raise GitAddRemotePushUrlError(
|
||||
f"Failed to add push url {url!r} to remote {remote!r}.",
|
||||
cwd=cwd,
|
||||
|
||||
21
src/pkgmgr/core/git/commands/branch_move.py
Normal file
21
src/pkgmgr/core/git/commands/branch_move.py
Normal file
@@ -0,0 +1,21 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from ..errors import GitRunError, GitCommandError
|
||||
from ..run import run
|
||||
|
||||
|
||||
class GitBranchMoveError(GitCommandError):
|
||||
"""Raised when renaming/moving a branch fails."""
|
||||
|
||||
|
||||
def branch_move(branch: str, *, cwd: str = ".", preview: bool = False) -> None:
|
||||
"""
|
||||
Rename the current branch to `branch`, creating it if needed.
|
||||
|
||||
Equivalent to:
|
||||
git branch -M <branch>
|
||||
"""
|
||||
try:
|
||||
run(["branch", "-M", branch], cwd=cwd, preview=preview)
|
||||
except GitRunError as exc:
|
||||
raise GitBranchMoveError(f"Failed to move/rename current branch to {branch!r}.", cwd=cwd) from exc
|
||||
@@ -1,6 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from ..errors import GitError, GitCommandError
|
||||
from ..errors import GitRunError, GitCommandError
|
||||
from ..run import run
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ class GitCheckoutError(GitCommandError):
|
||||
def checkout(branch: str, cwd: str = ".") -> None:
|
||||
try:
|
||||
run(["checkout", branch], cwd=cwd)
|
||||
except GitError as exc:
|
||||
except GitRunError as exc:
|
||||
raise GitCheckoutError(
|
||||
f"Failed to checkout branch {branch!r}.",
|
||||
cwd=cwd,
|
||||
|
||||
32
src/pkgmgr/core/git/commands/clone.py
Normal file
32
src/pkgmgr/core/git/commands/clone.py
Normal file
@@ -0,0 +1,32 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import List
|
||||
|
||||
from ..errors import GitRunError, GitCommandError
|
||||
from ..run import run
|
||||
|
||||
|
||||
class GitCloneError(GitCommandError):
|
||||
"""Raised when `git clone` fails."""
|
||||
|
||||
|
||||
def clone(
|
||||
args: List[str],
|
||||
*,
|
||||
cwd: str = ".",
|
||||
preview: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
Execute `git clone` with caller-provided arguments.
|
||||
|
||||
Examples:
|
||||
["https://example.com/repo.git", "/path/to/dir"]
|
||||
["--depth", "1", "--single-branch", url, dest]
|
||||
"""
|
||||
try:
|
||||
run(["clone", *args], cwd=cwd, preview=preview)
|
||||
except GitRunError as exc:
|
||||
raise GitCloneError(
|
||||
f"Git clone failed with args={args!r}.",
|
||||
cwd=cwd,
|
||||
) from exc
|
||||
37
src/pkgmgr/core/git/commands/commit.py
Normal file
37
src/pkgmgr/core/git/commands/commit.py
Normal file
@@ -0,0 +1,37 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from ..errors import GitRunError, GitCommandError
|
||||
from ..run import run
|
||||
|
||||
|
||||
class GitCommitError(GitCommandError):
|
||||
"""Raised when `git commit` fails."""
|
||||
|
||||
|
||||
def commit(
|
||||
message: str,
|
||||
*,
|
||||
cwd: str = ".",
|
||||
all: bool = False,
|
||||
preview: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
Create a commit.
|
||||
|
||||
Equivalent to:
|
||||
git commit -m "<message>"
|
||||
or (if all=True):
|
||||
git commit -am "<message>"
|
||||
"""
|
||||
args = ["commit"]
|
||||
if all:
|
||||
args.append("-a")
|
||||
args += ["-m", message]
|
||||
|
||||
try:
|
||||
run(args, cwd=cwd, preview=preview)
|
||||
except GitRunError as exc:
|
||||
raise GitCommitError(
|
||||
"Failed to create commit.",
|
||||
cwd=cwd,
|
||||
) from exc
|
||||
@@ -1,6 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from ..errors import GitError, GitCommandError
|
||||
from ..errors import GitRunError, GitCommandError
|
||||
from ..run import run
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ def create_branch(branch: str, base: str, cwd: str = ".") -> None:
|
||||
"""
|
||||
try:
|
||||
run(["checkout", "-b", branch, base], cwd=cwd)
|
||||
except GitError as exc:
|
||||
except GitRunError as exc:
|
||||
raise GitCreateBranchError(
|
||||
f"Failed to create branch {branch!r} from base {base!r}.",
|
||||
cwd=cwd,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from ..errors import GitError, GitCommandError
|
||||
from ..errors import GitRunError, GitCommandError
|
||||
from ..run import run
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@ def delete_local_branch(branch: str, cwd: str = ".", force: bool = False) -> Non
|
||||
flag = "-D" if force else "-d"
|
||||
try:
|
||||
run(["branch", flag, branch], cwd=cwd)
|
||||
except GitError as exc:
|
||||
except GitRunError as exc:
|
||||
raise GitDeleteLocalBranchError(
|
||||
f"Failed to delete local branch {branch!r} (flag {flag}).",
|
||||
cwd=cwd,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from ..errors import GitError, GitCommandError
|
||||
from ..errors import GitRunError, GitCommandError
|
||||
from ..run import run
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ class GitDeleteRemoteBranchError(GitCommandError):
|
||||
def delete_remote_branch(remote: str, branch: str, cwd: str = ".") -> None:
|
||||
try:
|
||||
run(["push", remote, "--delete", branch], cwd=cwd)
|
||||
except GitError as exc:
|
||||
except GitRunError as exc:
|
||||
raise GitDeleteRemoteBranchError(
|
||||
f"Failed to delete remote branch {branch!r} on {remote!r}.",
|
||||
cwd=cwd,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from ..errors import GitError, GitCommandError
|
||||
from ..errors import GitRunError, GitCommandError
|
||||
from ..run import run
|
||||
|
||||
|
||||
@@ -8,10 +8,32 @@ class GitFetchError(GitCommandError):
|
||||
"""Raised when fetching from a remote fails."""
|
||||
|
||||
|
||||
def fetch(remote: str = "origin", cwd: str = ".") -> None:
|
||||
def fetch(
|
||||
remote: str = "origin",
|
||||
*,
|
||||
prune: bool = False,
|
||||
tags: bool = False,
|
||||
force: bool = False,
|
||||
cwd: str = ".",
|
||||
preview: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
Fetch from a remote, optionally with prune/tags/force.
|
||||
|
||||
Equivalent to:
|
||||
git fetch <remote> [--prune] [--tags] [--force]
|
||||
"""
|
||||
args = ["fetch", remote]
|
||||
if prune:
|
||||
args.append("--prune")
|
||||
if tags:
|
||||
args.append("--tags")
|
||||
if force:
|
||||
args.append("--force")
|
||||
|
||||
try:
|
||||
run(["fetch", remote], cwd=cwd)
|
||||
except GitError as exc:
|
||||
run(args, cwd=cwd, preview=preview)
|
||||
except GitRunError as exc:
|
||||
raise GitFetchError(
|
||||
f"Failed to fetch from remote {remote!r}.",
|
||||
cwd=cwd,
|
||||
|
||||
21
src/pkgmgr/core/git/commands/init.py
Normal file
21
src/pkgmgr/core/git/commands/init.py
Normal file
@@ -0,0 +1,21 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from ..errors import GitRunError, GitCommandError
|
||||
from ..run import run
|
||||
|
||||
|
||||
class GitInitError(GitCommandError):
|
||||
"""Raised when `git init` fails."""
|
||||
|
||||
|
||||
def init(*, cwd: str = ".", preview: bool = False) -> None:
|
||||
"""
|
||||
Initialize a repository.
|
||||
|
||||
Equivalent to:
|
||||
git init
|
||||
"""
|
||||
try:
|
||||
run(["init"], cwd=cwd, preview=preview)
|
||||
except GitRunError as exc:
|
||||
raise GitInitError("Failed to initialize git repository.", cwd=cwd) from exc
|
||||
@@ -1,6 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from ..errors import GitError, GitCommandError
|
||||
from ..errors import GitRunError, GitCommandError
|
||||
from ..run import run
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ class GitMergeError(GitCommandError):
|
||||
def merge_no_ff(branch: str, cwd: str = ".") -> None:
|
||||
try:
|
||||
run(["merge", "--no-ff", branch], cwd=cwd)
|
||||
except GitError as exc:
|
||||
except GitRunError as exc:
|
||||
raise GitMergeError(
|
||||
f"Failed to merge branch {branch!r} with --no-ff.",
|
||||
cwd=cwd,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from ..errors import GitError, GitCommandError
|
||||
from ..errors import GitRunError, GitCommandError
|
||||
from ..run import run
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ class GitPullError(GitCommandError):
|
||||
def pull(remote: str, branch: str, cwd: str = ".") -> None:
|
||||
try:
|
||||
run(["pull", remote, branch], cwd=cwd)
|
||||
except GitError as exc:
|
||||
except GitRunError as exc:
|
||||
raise GitPullError(
|
||||
f"Failed to pull {remote!r}/{branch!r}.",
|
||||
cwd=cwd,
|
||||
|
||||
35
src/pkgmgr/core/git/commands/pull_args.py
Normal file
35
src/pkgmgr/core/git/commands/pull_args.py
Normal file
@@ -0,0 +1,35 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import List
|
||||
|
||||
from ..errors import GitRunError, GitCommandError
|
||||
from ..run import run
|
||||
|
||||
|
||||
class GitPullArgsError(GitCommandError):
|
||||
"""Raised when `git pull` with arbitrary args fails."""
|
||||
|
||||
|
||||
def pull_args(
|
||||
args: List[str] | None = None,
|
||||
*,
|
||||
cwd: str = ".",
|
||||
preview: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
Execute `git pull` with caller-provided arguments.
|
||||
|
||||
Examples:
|
||||
[] -> git pull
|
||||
["--ff-only"] -> git pull --ff-only
|
||||
["--rebase"] -> git pull --rebase
|
||||
["origin", "main"] -> git pull origin main
|
||||
"""
|
||||
extra = args or []
|
||||
try:
|
||||
run(["pull", *extra], cwd=cwd, preview=preview)
|
||||
except GitRunError as exc:
|
||||
raise GitPullArgsError(
|
||||
f"Failed to run `git pull` with args={extra!r}.",
|
||||
cwd=cwd,
|
||||
) from exc
|
||||
24
src/pkgmgr/core/git/commands/pull_ff_only.py
Normal file
24
src/pkgmgr/core/git/commands/pull_ff_only.py
Normal file
@@ -0,0 +1,24 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from ..errors import GitRunError, GitCommandError
|
||||
from ..run import run
|
||||
|
||||
|
||||
class GitPullFfOnlyError(GitCommandError):
|
||||
"""Raised when pulling with --ff-only fails."""
|
||||
|
||||
|
||||
def pull_ff_only(*, cwd: str = ".", preview: bool = False) -> None:
|
||||
"""
|
||||
Pull using fast-forward only.
|
||||
|
||||
Equivalent to:
|
||||
git pull --ff-only
|
||||
"""
|
||||
try:
|
||||
run(["pull", "--ff-only"], cwd=cwd, preview=preview)
|
||||
except GitRunError as exc:
|
||||
raise GitPullFfOnlyError(
|
||||
"Failed to pull with --ff-only.",
|
||||
cwd=cwd,
|
||||
) from exc
|
||||
@@ -1,6 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from ..errors import GitError, GitCommandError
|
||||
from ..errors import GitRunError, GitCommandError
|
||||
from ..run import run
|
||||
|
||||
|
||||
@@ -8,10 +8,27 @@ class GitPushError(GitCommandError):
|
||||
"""Raised when pushing to a remote fails."""
|
||||
|
||||
|
||||
def push(remote: str, ref: str, cwd: str = ".") -> None:
|
||||
def push(
|
||||
remote: str,
|
||||
ref: str,
|
||||
*,
|
||||
force: bool = False,
|
||||
cwd: str = ".",
|
||||
preview: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
Push a ref to a remote, optionally forced.
|
||||
|
||||
Equivalent to:
|
||||
git push <remote> <ref> [--force]
|
||||
"""
|
||||
args = ["push", remote, ref]
|
||||
if force:
|
||||
args.append("--force")
|
||||
|
||||
try:
|
||||
run(["push", remote, ref], cwd=cwd)
|
||||
except GitError as exc:
|
||||
run(args, cwd=cwd, preview=preview)
|
||||
except GitRunError as exc:
|
||||
raise GitPushError(
|
||||
f"Failed to push ref {ref!r} to remote {remote!r}.",
|
||||
cwd=cwd,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from ..errors import GitError, GitCommandError
|
||||
from ..errors import GitRunError, GitCommandError
|
||||
from ..run import run
|
||||
|
||||
|
||||
@@ -8,15 +8,22 @@ class GitPushUpstreamError(GitCommandError):
|
||||
"""Raised when pushing a branch with upstream tracking fails."""
|
||||
|
||||
|
||||
def push_upstream(remote: str, branch: str, cwd: str = ".") -> None:
|
||||
def push_upstream(
|
||||
remote: str,
|
||||
branch: str,
|
||||
*,
|
||||
cwd: str = ".",
|
||||
preview: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
Push a branch and set upstream tracking.
|
||||
|
||||
Equivalent to: git push -u <remote> <branch>
|
||||
Equivalent to:
|
||||
git push -u <remote> <branch>
|
||||
"""
|
||||
try:
|
||||
run(["push", "-u", remote, branch], cwd=cwd)
|
||||
except GitError as exc:
|
||||
run(["push", "-u", remote, branch], cwd=cwd, preview=preview)
|
||||
except GitRunError as exc:
|
||||
raise GitPushUpstreamError(
|
||||
f"Failed to push branch {branch!r} to {remote!r} with upstream tracking.",
|
||||
cwd=cwd,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from ..errors import GitError, GitCommandError
|
||||
from ..errors import GitRunError, GitCommandError
|
||||
from ..run import run
|
||||
|
||||
|
||||
@@ -35,7 +35,7 @@ def set_remote_url(
|
||||
cwd=cwd,
|
||||
preview=preview,
|
||||
)
|
||||
except GitError as exc:
|
||||
except GitRunError as exc:
|
||||
mode = "push" if push else "fetch"
|
||||
raise GitSetRemoteUrlError(
|
||||
f"Failed to set {mode} url for remote {remote!r} to {url!r}.",
|
||||
|
||||
30
src/pkgmgr/core/git/commands/tag_annotated.py
Normal file
30
src/pkgmgr/core/git/commands/tag_annotated.py
Normal file
@@ -0,0 +1,30 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from ..errors import GitRunError, GitCommandError
|
||||
from ..run import run
|
||||
|
||||
|
||||
class GitTagAnnotatedError(GitCommandError):
|
||||
"""Raised when creating an annotated tag fails."""
|
||||
|
||||
|
||||
def tag_annotated(
|
||||
tag: str,
|
||||
message: str,
|
||||
*,
|
||||
cwd: str = ".",
|
||||
preview: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
Create an annotated tag.
|
||||
|
||||
Equivalent to:
|
||||
git tag -a <tag> -m "<message>"
|
||||
"""
|
||||
try:
|
||||
run(["tag", "-a", tag, "-m", message], cwd=cwd, preview=preview)
|
||||
except GitRunError as exc:
|
||||
raise GitTagAnnotatedError(
|
||||
f"Failed to create annotated tag {tag!r}.",
|
||||
cwd=cwd,
|
||||
) from exc
|
||||
31
src/pkgmgr/core/git/commands/tag_force_annotated.py
Normal file
31
src/pkgmgr/core/git/commands/tag_force_annotated.py
Normal file
@@ -0,0 +1,31 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from ..errors import GitRunError, GitCommandError
|
||||
from ..run import run
|
||||
|
||||
|
||||
class GitTagForceAnnotatedError(GitCommandError):
|
||||
"""Raised when forcing an annotated tag fails."""
|
||||
|
||||
|
||||
def tag_force_annotated(
|
||||
name: str,
|
||||
target: str,
|
||||
message: str,
|
||||
*,
|
||||
cwd: str = ".",
|
||||
preview: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
Force-create an annotated tag pointing at a given target.
|
||||
|
||||
Equivalent to:
|
||||
git tag -f -a <name> <target> -m "<message>"
|
||||
"""
|
||||
try:
|
||||
run(["tag", "-f", "-a", name, target, "-m", message], cwd=cwd, preview=preview)
|
||||
except GitRunError as exc:
|
||||
raise GitTagForceAnnotatedError(
|
||||
f"Failed to force annotated tag {name!r} at {target!r}.",
|
||||
cwd=cwd,
|
||||
) from exc
|
||||
@@ -1,11 +1,19 @@
|
||||
from __future__ import annotations
|
||||
|
||||
|
||||
class GitError(RuntimeError):
|
||||
class GitBaseError(RuntimeError):
|
||||
"""Base error raised for Git related failures."""
|
||||
|
||||
class GitRunError(GitBaseError):
|
||||
"""Base error raised for Git related failures."""
|
||||
|
||||
class GitCommandError(GitError):
|
||||
class GitNotRepositoryError(GitBaseError):
|
||||
"""Raised when the current working directory is not a git repository."""
|
||||
|
||||
class GitQueryError(GitRunError):
|
||||
"""Base class for read-only git query failures."""
|
||||
|
||||
class GitCommandError(GitRunError):
|
||||
"""
|
||||
Base class for state-changing git command failures.
|
||||
|
||||
@@ -13,4 +21,5 @@ class GitCommandError(GitError):
|
||||
"""
|
||||
def __init__(self, message: str, *, cwd: str = ".") -> None:
|
||||
super().__init__(message)
|
||||
if cwd in locals():
|
||||
self.cwd = cwd
|
||||
@@ -1,18 +1,36 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from .get_changelog import GitChangelogQueryError, get_changelog
|
||||
from .get_config_value import get_config_value
|
||||
from .get_current_branch import get_current_branch
|
||||
from .get_head_commit import get_head_commit
|
||||
from .get_tags import get_tags
|
||||
from .resolve_base_branch import GitBaseBranchNotFoundError, resolve_base_branch
|
||||
from .list_remotes import list_remotes
|
||||
from .get_latest_commit import get_latest_commit
|
||||
from .get_latest_signing_key import (
|
||||
GitLatestSigningKeyQueryError,
|
||||
get_latest_signing_key,
|
||||
)
|
||||
from .get_remote_head_commit import (
|
||||
GitRemoteHeadCommitQueryError,
|
||||
get_remote_head_commit,
|
||||
)
|
||||
from .get_remote_push_urls import get_remote_push_urls
|
||||
from .get_repo_root import get_repo_root
|
||||
from .get_tags import get_tags
|
||||
from .get_tags_at_ref import GitTagsAtRefQueryError, get_tags_at_ref
|
||||
from .get_upstream_ref import get_upstream_ref
|
||||
from .list_remotes import list_remotes
|
||||
from .list_tags import list_tags
|
||||
from .probe_remote_reachable import probe_remote_reachable
|
||||
from .get_changelog import get_changelog, GitChangelogQueryError
|
||||
from .get_tags_at_ref import get_tags_at_ref, GitTagsAtRefQueryError
|
||||
from .resolve_base_branch import GitBaseBranchNotFoundError, resolve_base_branch
|
||||
|
||||
__all__ = [
|
||||
"get_current_branch",
|
||||
"get_head_commit",
|
||||
"get_latest_commit",
|
||||
"get_latest_signing_key",
|
||||
"GitLatestSigningKeyQueryError",
|
||||
"get_remote_head_commit",
|
||||
"GitRemoteHeadCommitQueryError",
|
||||
"get_tags",
|
||||
"resolve_base_branch",
|
||||
"GitBaseBranchNotFoundError",
|
||||
@@ -23,4 +41,8 @@ __all__ = [
|
||||
"GitChangelogQueryError",
|
||||
"get_tags_at_ref",
|
||||
"GitTagsAtRefQueryError",
|
||||
"get_config_value",
|
||||
"get_upstream_ref",
|
||||
"list_tags",
|
||||
"get_repo_root",
|
||||
]
|
||||
|
||||
@@ -2,11 +2,11 @@ from __future__ import annotations
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from ..errors import GitError
|
||||
from ..errors import GitQueryError, GitRunError
|
||||
from ..run import run
|
||||
|
||||
|
||||
class GitChangelogQueryError(GitError):
|
||||
class GitChangelogQueryError(GitQueryError):
|
||||
"""Raised when querying the git changelog fails."""
|
||||
|
||||
|
||||
@@ -38,7 +38,7 @@ def get_changelog(
|
||||
|
||||
try:
|
||||
return run(cmd, cwd=cwd)
|
||||
except GitError as exc:
|
||||
except GitRunError as exc:
|
||||
raise GitChangelogQueryError(
|
||||
f"Failed to query changelog for range {rev_range!r}.",
|
||||
) from exc
|
||||
|
||||
34
src/pkgmgr/core/git/queries/get_config_value.py
Normal file
34
src/pkgmgr/core/git/queries/get_config_value.py
Normal file
@@ -0,0 +1,34 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from ..errors import GitRunError
|
||||
from ..run import run
|
||||
|
||||
|
||||
def _is_missing_key_error(exc: GitRunError) -> bool:
|
||||
msg = str(exc).lower()
|
||||
|
||||
# Ensure we only swallow the expected case for THIS command.
|
||||
if "git config --get" not in msg:
|
||||
return False
|
||||
|
||||
# 'git config --get' returns exit code 1 when the key is not set.
|
||||
return "exit code: 1" in msg
|
||||
|
||||
def get_config_value(key: str, *, cwd: str = ".") -> Optional[str]:
|
||||
"""
|
||||
Return a value from `git config --get <key>`, or None if not set.
|
||||
|
||||
We keep core.git.run() strict (check=True) and interpret the known
|
||||
'not set' exit-code case here.
|
||||
"""
|
||||
try:
|
||||
output = run(["config", "--get", key], cwd=cwd)
|
||||
except GitRunError as exc:
|
||||
if _is_missing_key_error(exc):
|
||||
return None
|
||||
raise
|
||||
|
||||
output = output.strip()
|
||||
return output or None
|
||||
@@ -1,7 +1,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Optional
|
||||
from ..errors import GitError
|
||||
from ..errors import GitRunError
|
||||
from ..run import run
|
||||
|
||||
|
||||
@@ -13,6 +13,6 @@ def get_current_branch(cwd: str = ".") -> Optional[str]:
|
||||
"""
|
||||
try:
|
||||
output = run(["rev-parse", "--abbrev-ref", "HEAD"], cwd=cwd)
|
||||
except GitError:
|
||||
except GitRunError:
|
||||
return None
|
||||
return output or None
|
||||
@@ -2,7 +2,7 @@ from __future__ import annotations
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from ..errors import GitError
|
||||
from ..errors import GitRunError
|
||||
from ..run import run
|
||||
|
||||
|
||||
@@ -12,6 +12,6 @@ def get_head_commit(cwd: str = ".") -> Optional[str]:
|
||||
"""
|
||||
try:
|
||||
output = run(["rev-parse", "HEAD"], cwd=cwd)
|
||||
except GitError:
|
||||
except GitRunError:
|
||||
return None
|
||||
return output or None
|
||||
|
||||
26
src/pkgmgr/core/git/queries/get_latest_commit.py
Normal file
26
src/pkgmgr/core/git/queries/get_latest_commit.py
Normal file
@@ -0,0 +1,26 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from ..errors import GitRunError
|
||||
from ..run import run
|
||||
|
||||
|
||||
def get_latest_commit(cwd: str = ".") -> Optional[str]:
|
||||
"""
|
||||
Return the latest commit hash for the repository in `cwd`.
|
||||
|
||||
Equivalent to:
|
||||
git log -1 --format=%H
|
||||
|
||||
Returns:
|
||||
The commit hash string, or None if it cannot be determined
|
||||
(e.g. not a git repo, no commits, or other git failure).
|
||||
"""
|
||||
try:
|
||||
output = run(["log", "-1", "--format=%H"], cwd=cwd)
|
||||
except GitRunError:
|
||||
return None
|
||||
|
||||
output = output.strip()
|
||||
return output or None
|
||||
25
src/pkgmgr/core/git/queries/get_latest_signing_key.py
Normal file
25
src/pkgmgr/core/git/queries/get_latest_signing_key.py
Normal file
@@ -0,0 +1,25 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from ..errors import GitQueryError, GitRunError
|
||||
from ..run import run
|
||||
|
||||
|
||||
class GitLatestSigningKeyQueryError(GitQueryError):
|
||||
"""Raised when querying the latest commit signing key fails."""
|
||||
|
||||
|
||||
def get_latest_signing_key(*, cwd: str = ".") -> str:
|
||||
"""
|
||||
Return the GPG signing key ID of the latest commit, via:
|
||||
|
||||
git log -1 --format=%GK
|
||||
|
||||
Returns:
|
||||
The key id string (may be empty if commit is not signed).
|
||||
"""
|
||||
try:
|
||||
return run(["log", "-1", "--format=%GK"], cwd=cwd).strip()
|
||||
except GitRunError as exc:
|
||||
raise GitLatestSigningKeyQueryError(
|
||||
"Failed to query latest signing key.",
|
||||
) from exc
|
||||
33
src/pkgmgr/core/git/queries/get_remote_head_commit.py
Normal file
33
src/pkgmgr/core/git/queries/get_remote_head_commit.py
Normal file
@@ -0,0 +1,33 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from ..errors import GitQueryError, GitRunError
|
||||
from ..run import run
|
||||
|
||||
|
||||
class GitRemoteHeadCommitQueryError(GitQueryError):
|
||||
"""Raised when querying the remote HEAD commit fails."""
|
||||
|
||||
|
||||
def get_remote_head_commit(
|
||||
*,
|
||||
remote: str = "origin",
|
||||
ref: str = "HEAD",
|
||||
cwd: str = ".",
|
||||
) -> str:
|
||||
"""
|
||||
Return the commit hash for <remote> <ref> via:
|
||||
|
||||
git ls-remote <remote> <ref>
|
||||
|
||||
Returns:
|
||||
The commit hash string (may be empty if remote/ref yields no output).
|
||||
"""
|
||||
try:
|
||||
out = run(["ls-remote", remote, ref], cwd=cwd).strip()
|
||||
except GitRunError as exc:
|
||||
raise GitRemoteHeadCommitQueryError(
|
||||
f"Failed to query remote head commit for {remote!r} {ref!r}.",
|
||||
) from exc
|
||||
|
||||
# minimal parsing: first token is the hash
|
||||
return (out.split()[0].strip() if out else "")
|
||||
@@ -2,10 +2,8 @@ from __future__ import annotations
|
||||
|
||||
from typing import Set
|
||||
|
||||
from ..errors import GitError
|
||||
from ..run import run
|
||||
|
||||
|
||||
def get_remote_push_urls(remote: str, cwd: str = ".") -> Set[str]:
|
||||
"""
|
||||
Return all push URLs configured for a remote.
|
||||
@@ -13,7 +11,7 @@ def get_remote_push_urls(remote: str, cwd: str = ".") -> Set[str]:
|
||||
Equivalent to:
|
||||
git remote get-url --push --all <remote>
|
||||
|
||||
Raises GitError if the command fails.
|
||||
Raises GitBaseError if the command fails.
|
||||
"""
|
||||
output = run(["remote", "get-url", "--push", "--all", remote], cwd=cwd)
|
||||
if not output:
|
||||
|
||||
22
src/pkgmgr/core/git/queries/get_repo_root.py
Normal file
22
src/pkgmgr/core/git/queries/get_repo_root.py
Normal file
@@ -0,0 +1,22 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from ..errors import GitRunError
|
||||
from ..run import run
|
||||
|
||||
|
||||
def get_repo_root(*, cwd: str = ".") -> Optional[str]:
|
||||
"""
|
||||
Return the git repository root directory (top-level), or None if not available.
|
||||
|
||||
Equivalent to:
|
||||
git rev-parse --show-toplevel
|
||||
"""
|
||||
try:
|
||||
out = run(["rev-parse", "--show-toplevel"], cwd=cwd)
|
||||
except GitRunError:
|
||||
return None
|
||||
|
||||
out = out.strip()
|
||||
return out or None
|
||||
@@ -2,7 +2,7 @@ from __future__ import annotations
|
||||
|
||||
from typing import List
|
||||
|
||||
from ..errors import GitError
|
||||
from ..errors import GitRunError
|
||||
from ..run import run
|
||||
|
||||
|
||||
@@ -14,11 +14,10 @@ def get_tags(cwd: str = ".") -> List[str]:
|
||||
"""
|
||||
try:
|
||||
output = run(["tag"], cwd=cwd)
|
||||
except GitError as exc:
|
||||
except GitRunError as exc:
|
||||
# If the repo is not a git repo, surface a clear error.
|
||||
if "not a git repository" in str(exc):
|
||||
raise
|
||||
# Otherwise, treat as "no tags" (e.g., empty stdout).
|
||||
return []
|
||||
|
||||
if not output:
|
||||
|
||||
@@ -2,11 +2,11 @@ from __future__ import annotations
|
||||
|
||||
from typing import List
|
||||
|
||||
from ..errors import GitError
|
||||
from ..errors import GitQueryError, GitRunError
|
||||
from ..run import run
|
||||
|
||||
|
||||
class GitTagsAtRefQueryError(GitError):
|
||||
class GitTagsAtRefQueryError(GitQueryError):
|
||||
"""Raised when querying tags for a ref fails."""
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@ def get_tags_at_ref(ref: str, *, cwd: str = ".") -> List[str]:
|
||||
"""
|
||||
try:
|
||||
output = run(["tag", "--points-at", ref], cwd=cwd)
|
||||
except GitError as exc:
|
||||
except GitRunError as exc:
|
||||
raise GitTagsAtRefQueryError(
|
||||
f"Failed to query tags at ref {ref!r}.",
|
||||
) from exc
|
||||
|
||||
25
src/pkgmgr/core/git/queries/get_upstream_ref.py
Normal file
25
src/pkgmgr/core/git/queries/get_upstream_ref.py
Normal file
@@ -0,0 +1,25 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from ..errors import GitRunError
|
||||
from ..run import run
|
||||
|
||||
|
||||
def get_upstream_ref(*, cwd: str = ".") -> Optional[str]:
|
||||
"""
|
||||
Return the configured upstream ref for the current branch, or None if none.
|
||||
|
||||
Equivalent to:
|
||||
git rev-parse --abbrev-ref --symbolic-full-name @{u}
|
||||
"""
|
||||
try:
|
||||
out = run(
|
||||
["rev-parse", "--abbrev-ref", "--symbolic-full-name", "@{u}"],
|
||||
cwd=cwd,
|
||||
)
|
||||
except GitRunError:
|
||||
return None
|
||||
|
||||
out = out.strip()
|
||||
return out or None
|
||||
@@ -2,7 +2,6 @@ from __future__ import annotations
|
||||
|
||||
from typing import List
|
||||
|
||||
from ..errors import GitError
|
||||
from ..run import run
|
||||
|
||||
|
||||
@@ -10,7 +9,7 @@ def list_remotes(cwd: str = ".") -> List[str]:
|
||||
"""
|
||||
Return a list of configured git remotes (e.g. ['origin', 'upstream']).
|
||||
|
||||
Raises GitError if the command fails.
|
||||
Raises GitBaseError if the command fails.
|
||||
"""
|
||||
output = run(["remote"], cwd=cwd)
|
||||
if not output:
|
||||
|
||||
18
src/pkgmgr/core/git/queries/list_tags.py
Normal file
18
src/pkgmgr/core/git/queries/list_tags.py
Normal file
@@ -0,0 +1,18 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import List
|
||||
|
||||
from ..run import run
|
||||
|
||||
|
||||
def list_tags(pattern: str = "*", *, cwd: str = ".") -> List[str]:
|
||||
"""
|
||||
List tags matching a pattern.
|
||||
|
||||
Equivalent to:
|
||||
git tag --list <pattern>
|
||||
"""
|
||||
out = run(["tag", "--list", pattern], cwd=cwd)
|
||||
if not out:
|
||||
return []
|
||||
return [line.strip() for line in out.splitlines() if line.strip()]
|
||||
@@ -1,6 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from ..errors import GitError
|
||||
from ..errors import GitRunError
|
||||
from ..run import run
|
||||
|
||||
|
||||
@@ -17,5 +17,5 @@ def probe_remote_reachable(url: str, cwd: str = ".") -> bool:
|
||||
try:
|
||||
run(["ls-remote", "--exit-code", url], cwd=cwd)
|
||||
return True
|
||||
except GitError:
|
||||
except GitRunError:
|
||||
return False
|
||||
|
||||
@@ -1,15 +1,14 @@
|
||||
# src/pkgmgr/core/git/queries/resolve_base_branch.py
|
||||
from __future__ import annotations
|
||||
|
||||
from ..errors import GitError
|
||||
from ..errors import GitQueryError, GitRunError
|
||||
from ..run import run
|
||||
|
||||
|
||||
class GitBaseBranchNotFoundError(GitError):
|
||||
class GitBaseBranchNotFoundError(GitQueryError):
|
||||
"""Raised when neither preferred nor fallback base branch exists."""
|
||||
|
||||
|
||||
def _is_branch_missing_error(exc: GitError) -> bool:
|
||||
def _is_branch_missing_error(exc: GitRunError) -> bool:
|
||||
"""
|
||||
Heuristic: Detect errors that indicate the branch/ref does not exist.
|
||||
|
||||
@@ -46,15 +45,15 @@ def resolve_base_branch(
|
||||
fall back to `fallback` (default: master).
|
||||
|
||||
Raises GitBaseBranchNotFoundError if neither exists.
|
||||
Raises GitError for other git failures (e.g., not a git repository).
|
||||
Raises GitRunError for other git failures (e.g., not a git repository).
|
||||
"""
|
||||
last_missing_error: GitError | None = None
|
||||
last_missing_error: GitRunError | None = None
|
||||
|
||||
for candidate in (preferred, fallback):
|
||||
try:
|
||||
run(["rev-parse", "--verify", candidate], cwd=cwd)
|
||||
return candidate
|
||||
except GitError as exc:
|
||||
except GitRunError as exc:
|
||||
if _is_branch_missing_error(exc):
|
||||
last_missing_error = exc
|
||||
continue
|
||||
|
||||
@@ -3,7 +3,12 @@ from __future__ import annotations
|
||||
import subprocess
|
||||
from typing import List
|
||||
|
||||
from .errors import GitError
|
||||
from .errors import GitRunError, GitNotRepositoryError
|
||||
|
||||
|
||||
def _is_not_repo_error(stderr: str) -> bool:
|
||||
msg = (stderr or "").lower()
|
||||
return "not a git repository" in msg
|
||||
|
||||
|
||||
def run(
|
||||
@@ -17,7 +22,7 @@ def run(
|
||||
|
||||
If preview=True, the command is printed but NOT executed.
|
||||
|
||||
Raises GitError if execution fails.
|
||||
Raises GitRunError (or a subclass) if execution fails.
|
||||
"""
|
||||
cmd = ["git"] + args
|
||||
cmd_str = " ".join(cmd)
|
||||
@@ -36,11 +41,19 @@ def run(
|
||||
text=True,
|
||||
)
|
||||
except subprocess.CalledProcessError as exc:
|
||||
raise GitError(
|
||||
stderr = exc.stderr or ""
|
||||
if _is_not_repo_error(stderr):
|
||||
raise GitNotRepositoryError(
|
||||
f"Not a git repository: {cwd!r}\n"
|
||||
f"Command: {cmd_str}\n"
|
||||
f"STDERR:\n{stderr}"
|
||||
) from exc
|
||||
|
||||
raise GitRunError(
|
||||
f"Git command failed in {cwd!r}: {cmd_str}\n"
|
||||
f"Exit code: {exc.returncode}\n"
|
||||
f"STDOUT:\n{exc.stdout}\n"
|
||||
f"STDERR:\n{exc.stderr}"
|
||||
f"STDERR:\n{stderr}"
|
||||
) from exc
|
||||
|
||||
return result.stdout.strip()
|
||||
|
||||
@@ -1,48 +1,37 @@
|
||||
import subprocess
|
||||
from __future__ import annotations
|
||||
|
||||
from pkgmgr.core.git.queries import (
|
||||
get_head_commit,
|
||||
get_latest_signing_key,
|
||||
get_remote_head_commit,
|
||||
GitLatestSigningKeyQueryError,
|
||||
GitRemoteHeadCommitQueryError,
|
||||
)
|
||||
|
||||
|
||||
def verify_repository(repo, repo_dir, mode="local", no_verification=False):
|
||||
"""
|
||||
Verifies the repository based on its 'verified' field.
|
||||
_ = no_verification
|
||||
|
||||
The 'verified' field can be a dictionary with the following keys:
|
||||
commit: The expected commit hash.
|
||||
gpg_keys: A list of valid GPG key IDs (at least one must match the signing key).
|
||||
|
||||
If mode == "pull", the remote HEAD commit is checked via "git ls-remote origin HEAD".
|
||||
Otherwise (mode "local", used for install and clone), the local HEAD commit is checked via "git rev-parse HEAD".
|
||||
|
||||
Returns a tuple:
|
||||
(verified_ok, error_details, commit_hash, signing_key)
|
||||
- verified_ok: True if the verification passed (or no verification info is set), False otherwise.
|
||||
- error_details: A list of error messages for any failed checks.
|
||||
- commit_hash: The obtained commit hash.
|
||||
- signing_key: The GPG key ID that signed the latest commit (obtained via "git log -1 --format=%GK").
|
||||
"""
|
||||
verified_info = repo.get("verified")
|
||||
if not verified_info:
|
||||
# Nothing to verify.
|
||||
|
||||
commit_hash = ""
|
||||
signing_key = ""
|
||||
|
||||
# best-effort info collection
|
||||
try:
|
||||
if mode == "pull":
|
||||
try:
|
||||
result = subprocess.run("git ls-remote origin HEAD", cwd=repo_dir, shell=True, check=True,
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
|
||||
commit_hash = result.stdout.split()[0].strip()
|
||||
except Exception:
|
||||
commit_hash = ""
|
||||
commit_hash = get_remote_head_commit(cwd=repo_dir)
|
||||
else:
|
||||
try:
|
||||
result = subprocess.run("git rev-parse HEAD", cwd=repo_dir, shell=True, check=True,
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
|
||||
commit_hash = result.stdout.strip()
|
||||
except Exception:
|
||||
commit_hash = get_head_commit(cwd=repo_dir) or ""
|
||||
except GitRemoteHeadCommitQueryError:
|
||||
commit_hash = ""
|
||||
|
||||
try:
|
||||
result = subprocess.run(["git", "log", "-1", "--format=%GK"], cwd=repo_dir, shell=False, check=True,
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
|
||||
signing_key = result.stdout.strip()
|
||||
except Exception:
|
||||
signing_key = get_latest_signing_key(cwd=repo_dir)
|
||||
except GitLatestSigningKeyQueryError:
|
||||
signing_key = ""
|
||||
|
||||
if not verified_info:
|
||||
return True, [], commit_hash, signing_key
|
||||
|
||||
expected_commit = None
|
||||
@@ -51,47 +40,42 @@ def verify_repository(repo, repo_dir, mode="local", no_verification=False):
|
||||
expected_commit = verified_info.get("commit")
|
||||
expected_gpg_keys = verified_info.get("gpg_keys")
|
||||
else:
|
||||
# If verified is a plain string, treat it as the expected commit.
|
||||
expected_commit = verified_info
|
||||
|
||||
error_details = []
|
||||
error_details: list[str] = []
|
||||
|
||||
# Get commit hash according to the mode.
|
||||
commit_hash = ""
|
||||
# strict retrieval when verification is configured
|
||||
if mode == "pull":
|
||||
try:
|
||||
result = subprocess.run("git ls-remote origin HEAD", cwd=repo_dir, shell=True, check=True,
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
|
||||
commit_hash = result.stdout.split()[0].strip()
|
||||
except Exception as e:
|
||||
error_details.append(f"Error retrieving remote commit: {e}")
|
||||
commit_hash = get_remote_head_commit(cwd=repo_dir)
|
||||
except GitRemoteHeadCommitQueryError as exc:
|
||||
error_details.append(str(exc))
|
||||
commit_hash = ""
|
||||
else:
|
||||
try:
|
||||
result = subprocess.run("git rev-parse HEAD", cwd=repo_dir, shell=True, check=True,
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
|
||||
commit_hash = result.stdout.strip()
|
||||
except Exception as e:
|
||||
error_details.append(f"Error retrieving local commit: {e}")
|
||||
commit_hash = get_head_commit(cwd=repo_dir) or ""
|
||||
|
||||
# Get the signing key using "git log -1 --format=%GK"
|
||||
signing_key = ""
|
||||
try:
|
||||
result = subprocess.run(["git", "log", "-1", "--format=%GK"], cwd=repo_dir, shell=False, check=True,
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
|
||||
signing_key = result.stdout.strip()
|
||||
except Exception as e:
|
||||
error_details.append(f"Error retrieving signing key: {e}")
|
||||
signing_key = get_latest_signing_key(cwd=repo_dir)
|
||||
except GitLatestSigningKeyQueryError as exc:
|
||||
error_details.append(str(exc))
|
||||
signing_key = ""
|
||||
|
||||
commit_check_passed = True
|
||||
gpg_check_passed = True
|
||||
|
||||
if expected_commit:
|
||||
if commit_hash != expected_commit:
|
||||
if not commit_hash:
|
||||
commit_check_passed = False
|
||||
error_details.append(f"Expected commit: {expected_commit}, but could not determine current commit.")
|
||||
elif commit_hash != expected_commit:
|
||||
commit_check_passed = False
|
||||
error_details.append(f"Expected commit: {expected_commit}, found: {commit_hash}")
|
||||
|
||||
if expected_gpg_keys:
|
||||
if signing_key not in expected_gpg_keys:
|
||||
if not signing_key:
|
||||
gpg_check_passed = False
|
||||
error_details.append(f"Expected one of GPG keys: {expected_gpg_keys}, but no signing key was found.")
|
||||
elif signing_key not in expected_gpg_keys:
|
||||
gpg_check_passed = False
|
||||
error_details.append(f"Expected one of GPG keys: {expected_gpg_keys}, found: {signing_key}")
|
||||
|
||||
|
||||
@@ -89,7 +89,7 @@ class TestIntegrationChangelogCommands(unittest.TestCase):
|
||||
"""
|
||||
Run 'pkgmgr changelog HEAD~5..HEAD' inside the pkgmgr repo.
|
||||
Selbst wenn HEAD~5 nicht existiert, sollte der Befehl den
|
||||
GitError intern behandeln und mit Exit-Code 0 beenden
|
||||
GitBaseError intern behandeln und mit Exit-Code 0 beenden
|
||||
(es wird dann eine [ERROR]-Zeile gedruckt).
|
||||
|
||||
Wird übersprungen, wenn das pkgmgr-Repo nicht lokal vorhanden ist.
|
||||
|
||||
@@ -1,42 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import io
|
||||
import unittest
|
||||
from contextlib import redirect_stdout
|
||||
from unittest.mock import patch
|
||||
|
||||
from pkgmgr.actions.repository.create import create_repo
|
||||
|
||||
|
||||
class TestE2ECreateRepoPreviewOutput(unittest.TestCase):
|
||||
def test_create_repo_preview_prints_expected_steps(self) -> None:
|
||||
cfg = {"directories": {"repositories": "/tmp/Repositories"}, "repositories": []}
|
||||
|
||||
out = io.StringIO()
|
||||
with (
|
||||
redirect_stdout(out),
|
||||
patch("pkgmgr.actions.repository.create.os.path.exists", return_value=False),
|
||||
patch("pkgmgr.actions.repository.create.generate_alias", return_value="repo"),
|
||||
patch("pkgmgr.actions.repository.create.save_user_config"),
|
||||
patch("pkgmgr.actions.repository.create.os.makedirs"),
|
||||
patch("pkgmgr.actions.repository.create.render_default_templates"),
|
||||
patch("pkgmgr.actions.repository.create.write_mirrors_file"),
|
||||
patch("pkgmgr.actions.repository.create.setup_mirrors"),
|
||||
patch("pkgmgr.actions.repository.create.subprocess.run"),
|
||||
):
|
||||
create_repo(
|
||||
"github.com/acme/repo",
|
||||
cfg,
|
||||
"/tmp/user.yml",
|
||||
"/tmp/bin",
|
||||
remote=False,
|
||||
preview=True,
|
||||
)
|
||||
|
||||
s = out.getvalue()
|
||||
self.assertIn("[Preview] Would save user config:", s)
|
||||
self.assertIn("[Preview] Would ensure directory exists:", s)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
75
tests/integration/test_repos_create_preview_output.py
Normal file
75
tests/integration/test_repos_create_preview_output.py
Normal file
@@ -0,0 +1,75 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import io
|
||||
import unittest
|
||||
from contextlib import redirect_stdout
|
||||
from unittest.mock import patch
|
||||
|
||||
from pkgmgr.actions.repository.create import create_repo
|
||||
|
||||
|
||||
class TestCreateRepoPreviewOutput(unittest.TestCase):
|
||||
def test_create_repo_preview_prints_expected_steps(self) -> None:
|
||||
cfg = {"directories": {"repositories": "/tmp/Repositories"}, "repositories": []}
|
||||
|
||||
out = io.StringIO()
|
||||
with (
|
||||
redirect_stdout(out),
|
||||
patch(
|
||||
"pkgmgr.actions.repository.create.config_writer.generate_alias",
|
||||
return_value="repo",
|
||||
),
|
||||
patch(
|
||||
"pkgmgr.actions.repository.create.config_writer.save_user_config",
|
||||
),
|
||||
patch(
|
||||
"pkgmgr.actions.repository.create.config_writer.os.path.exists",
|
||||
return_value=False,
|
||||
),
|
||||
patch(
|
||||
"pkgmgr.actions.repository.create.service.os.makedirs",
|
||||
),
|
||||
patch(
|
||||
"pkgmgr.actions.repository.create.templates.TemplateRenderer._resolve_templates_dir",
|
||||
return_value="/tpl",
|
||||
),
|
||||
patch(
|
||||
"pkgmgr.actions.repository.create.templates.os.walk",
|
||||
return_value=[("/tpl", [], ["README.md.j2"])],
|
||||
),
|
||||
patch(
|
||||
"pkgmgr.actions.repository.create.git_bootstrap.init",
|
||||
),
|
||||
patch(
|
||||
"pkgmgr.actions.repository.create.git_bootstrap.add_all",
|
||||
),
|
||||
patch(
|
||||
"pkgmgr.actions.repository.create.git_bootstrap.commit",
|
||||
),
|
||||
patch(
|
||||
"pkgmgr.actions.repository.create.mirrors.write_mirrors_file",
|
||||
),
|
||||
patch(
|
||||
"pkgmgr.actions.repository.create.mirrors.setup_mirrors",
|
||||
),
|
||||
patch(
|
||||
"pkgmgr.actions.repository.create.service.get_config_value",
|
||||
return_value=None,
|
||||
),
|
||||
):
|
||||
create_repo(
|
||||
"github.com/acme/repo",
|
||||
cfg,
|
||||
"/tmp/user.yml",
|
||||
"/tmp/bin",
|
||||
remote=False,
|
||||
preview=True,
|
||||
)
|
||||
|
||||
s = out.getvalue()
|
||||
self.assertIn("[Preview] Would add repository to config:", s)
|
||||
self.assertIn("[Preview] Would ensure directory exists:", s)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
115
tests/integration/test_repos_create_pypi_not_in_git_config.py
Normal file
115
tests/integration/test_repos_create_pypi_not_in_git_config.py
Normal file
@@ -0,0 +1,115 @@
|
||||
# tests/integration/test_repos_create_pypi_not_in_git_config.py
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import tempfile
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
from unittest.mock import patch
|
||||
|
||||
from pkgmgr.actions.repository.create import create_repo
|
||||
|
||||
|
||||
class TestCreateRepoPypiNotInGitConfig(unittest.TestCase):
|
||||
def test_create_repo_writes_pypi_to_mirrors_but_not_git_config(self) -> None:
|
||||
with tempfile.TemporaryDirectory() as tmp:
|
||||
tmp_path = Path(tmp)
|
||||
|
||||
# Repositories base dir used by create flow
|
||||
repos_base = tmp_path / "Repositories"
|
||||
user_cfg = tmp_path / "user.yml"
|
||||
bin_dir = tmp_path / "bin"
|
||||
bin_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
cfg = {
|
||||
"directories": {"repositories": str(repos_base)},
|
||||
"repositories": [],
|
||||
}
|
||||
|
||||
# Provide a minimal templates directory so TemplateRenderer can run
|
||||
tpl_dir = tmp_path / "tpl"
|
||||
tpl_dir.mkdir(parents=True, exist_ok=True)
|
||||
(tpl_dir / "README.md.j2").write_text(
|
||||
"# {{ repository }}\n", encoding="utf-8"
|
||||
)
|
||||
|
||||
# Expected repo dir for identifier github.com/acme/repo
|
||||
repo_dir = repos_base / "github.com" / "acme" / "repo"
|
||||
|
||||
with (
|
||||
# Avoid any real network calls during mirror "remote probing"
|
||||
patch(
|
||||
"pkgmgr.actions.mirror.setup_cmd.probe_remote_reachable",
|
||||
return_value=True,
|
||||
),
|
||||
# Force templates to come from our temp directory
|
||||
patch(
|
||||
"pkgmgr.actions.repository.create.templates.TemplateRenderer._resolve_templates_dir",
|
||||
return_value=str(tpl_dir),
|
||||
),
|
||||
# Make git commit deterministic without depending on global git config
|
||||
patch.dict(
|
||||
os.environ,
|
||||
{
|
||||
"GIT_AUTHOR_NAME": "Test Author",
|
||||
"GIT_AUTHOR_EMAIL": "author@example.invalid",
|
||||
"GIT_COMMITTER_NAME": "Test Author",
|
||||
"GIT_COMMITTER_EMAIL": "author@example.invalid",
|
||||
},
|
||||
clear=False,
|
||||
),
|
||||
):
|
||||
create_repo(
|
||||
"github.com/acme/repo",
|
||||
cfg,
|
||||
str(user_cfg),
|
||||
str(bin_dir),
|
||||
remote=False,
|
||||
preview=False,
|
||||
)
|
||||
|
||||
# --- Assertions: MIRRORS file ---
|
||||
mirrors_file = repo_dir / "MIRRORS"
|
||||
self.assertTrue(mirrors_file.exists(), "MIRRORS file was not created")
|
||||
|
||||
mirrors_content = mirrors_file.read_text(encoding="utf-8")
|
||||
self.assertIn(
|
||||
"https://pypi.org/project/repo/",
|
||||
mirrors_content,
|
||||
"PyPI mirror entry must exist in MIRRORS",
|
||||
)
|
||||
self.assertIn(
|
||||
"git@github.com:acme/repo.git",
|
||||
mirrors_content,
|
||||
"origin SSH URL must exist in MIRRORS",
|
||||
)
|
||||
|
||||
# --- Assertions: git config must NOT contain PyPI ---
|
||||
git_config = repo_dir / ".git" / "config"
|
||||
self.assertTrue(git_config.exists(), ".git/config was not created")
|
||||
|
||||
git_config_content = git_config.read_text(encoding="utf-8")
|
||||
self.assertNotIn(
|
||||
"pypi.org/project",
|
||||
git_config_content,
|
||||
"PyPI must never be written into git config",
|
||||
)
|
||||
|
||||
# --- Assertions: origin remote exists and points to SSH ---
|
||||
remotes = subprocess.check_output(
|
||||
["git", "-C", str(repo_dir), "remote"],
|
||||
text=True,
|
||||
).splitlines()
|
||||
|
||||
self.assertIn("origin", remotes, "origin remote was not created")
|
||||
|
||||
remote_v = subprocess.check_output(
|
||||
["git", "-C", str(repo_dir), "remote", "-v"],
|
||||
text=True,
|
||||
)
|
||||
self.assertIn("git@github.com:acme/repo.git", remote_v)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
106
tests/integration/test_token_resolver_flow.py
Normal file
106
tests/integration/test_token_resolver_flow.py
Normal file
@@ -0,0 +1,106 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import unittest
|
||||
from unittest.mock import patch
|
||||
|
||||
from pkgmgr.core.credentials.resolver import TokenResolver
|
||||
from pkgmgr.core.credentials.types import TokenResult
|
||||
|
||||
|
||||
class TestTokenResolverIntegration(unittest.TestCase):
|
||||
def test_full_resolution_flow_with_invalid_gh_and_keyring_then_prompt(self) -> None:
|
||||
"""
|
||||
Full integration scenario:
|
||||
|
||||
- ENV provides nothing
|
||||
- GitHub CLI (gh) is available and returns a token, but it is INVALID
|
||||
- Keyring contains a token, but it is INVALID
|
||||
- Interactive prompt provides a NEW token
|
||||
- New token is ACCEPTED and OVERWRITES the keyring entry
|
||||
"""
|
||||
|
||||
resolver = TokenResolver()
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 1) ENV: empty
|
||||
# ------------------------------------------------------------------
|
||||
with patch.dict("os.environ", {}, clear=True):
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 2) GH CLI is available
|
||||
# ------------------------------------------------------------------
|
||||
with patch(
|
||||
"pkgmgr.core.credentials.providers.gh.shutil.which",
|
||||
return_value="/usr/bin/gh",
|
||||
):
|
||||
with patch(
|
||||
"pkgmgr.core.credentials.providers.gh.subprocess.check_output",
|
||||
return_value="gh-invalid-token\n",
|
||||
):
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 3) Keyring returns an existing (invalid) token
|
||||
# ------------------------------------------------------------------
|
||||
with patch(
|
||||
"pkgmgr.core.credentials.providers.keyring._import_keyring"
|
||||
) as mock_import_keyring:
|
||||
|
||||
mock_keyring = mock_import_keyring.return_value
|
||||
mock_keyring.get_password.return_value = "keyring-invalid-token"
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 4) Prompt is allowed and returns a NEW token
|
||||
# ------------------------------------------------------------------
|
||||
with patch(
|
||||
"pkgmgr.core.credentials.providers.prompt.sys.stdin.isatty",
|
||||
return_value=True,
|
||||
):
|
||||
with patch(
|
||||
"pkgmgr.core.credentials.providers.prompt.getpass",
|
||||
return_value="new-valid-token",
|
||||
):
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 5) Validation logic:
|
||||
# - gh token invalid
|
||||
# - keyring token invalid
|
||||
# - prompt token is NOT validated (by design)
|
||||
# ------------------------------------------------------------------
|
||||
def validate_side_effect(
|
||||
provider_kind: str,
|
||||
host: str,
|
||||
token: str,
|
||||
) -> bool:
|
||||
return False # gh + keyring invalid
|
||||
|
||||
with patch(
|
||||
"pkgmgr.core.credentials.resolver.validate_token",
|
||||
side_effect=validate_side_effect,
|
||||
) as validate_mock:
|
||||
|
||||
result = resolver.get_token(
|
||||
provider_kind="github",
|
||||
host="github.com",
|
||||
)
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# Assertions
|
||||
# ----------------------------------------------------------------------
|
||||
self.assertIsInstance(result, TokenResult)
|
||||
self.assertEqual(result.token, "new-valid-token")
|
||||
self.assertEqual(result.source, "prompt")
|
||||
|
||||
# validate_token was called ONLY for gh and keyring
|
||||
validated_tokens = [call.args[2] for call in validate_mock.call_args_list]
|
||||
self.assertIn("gh-invalid-token", validated_tokens)
|
||||
self.assertIn("keyring-invalid-token", validated_tokens)
|
||||
self.assertNotIn("new-valid-token", validated_tokens)
|
||||
|
||||
# Keyring must be overwritten with the new token
|
||||
mock_keyring.set_password.assert_called_once()
|
||||
service, username, stored_token = mock_keyring.set_password.call_args.args
|
||||
self.assertEqual(stored_token, "new-valid-token")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
@@ -2,7 +2,7 @@ import unittest
|
||||
from unittest.mock import patch
|
||||
|
||||
from pkgmgr.actions.branch.close_branch import close_branch
|
||||
from pkgmgr.core.git.errors import GitError
|
||||
from pkgmgr.core.git.errors import GitRunError
|
||||
from pkgmgr.core.git.commands import GitDeleteRemoteBranchError
|
||||
|
||||
|
||||
@@ -90,7 +90,7 @@ class TestCloseBranch(unittest.TestCase):
|
||||
delete_local_branch.assert_called_once_with("feature-x", cwd=".", force=False)
|
||||
delete_remote_branch.assert_called_once_with("origin", "feature-x", cwd=".")
|
||||
|
||||
@patch("pkgmgr.actions.branch.close_branch.get_current_branch", side_effect=GitError("fail"))
|
||||
@patch("pkgmgr.actions.branch.close_branch.get_current_branch", side_effect=GitRunError("fail"))
|
||||
def test_close_branch_errors_if_cannot_detect_branch(self, _current) -> None:
|
||||
with self.assertRaises(RuntimeError):
|
||||
close_branch(None)
|
||||
|
||||
@@ -2,7 +2,7 @@ import unittest
|
||||
from unittest.mock import patch
|
||||
|
||||
from pkgmgr.actions.branch.drop_branch import drop_branch
|
||||
from pkgmgr.core.git.errors import GitError
|
||||
from pkgmgr.core.git.errors import GitRunError
|
||||
from pkgmgr.core.git.commands import GitDeleteRemoteBranchError
|
||||
|
||||
|
||||
@@ -50,7 +50,7 @@ class TestDropBranch(unittest.TestCase):
|
||||
delete_local.assert_called_once_with("feature-x", cwd=".", force=False)
|
||||
delete_remote.assert_called_once_with("origin", "feature-x", cwd=".")
|
||||
|
||||
@patch("pkgmgr.actions.branch.drop_branch.get_current_branch", side_effect=GitError("fail"))
|
||||
@patch("pkgmgr.actions.branch.drop_branch.get_current_branch", side_effect=GitRunError("fail"))
|
||||
def test_drop_branch_errors_if_no_branch_detected(self, _current) -> None:
|
||||
with self.assertRaises(RuntimeError):
|
||||
drop_branch(None)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user