Compare commits
14 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
28df54503e | ||
|
|
aa489811e3 | ||
|
|
f66af0157b | ||
|
|
b0b3ccf5aa | ||
|
|
e178afde31 | ||
|
|
9802293871 | ||
|
|
a2138c9985 | ||
|
|
10998e50ad | ||
|
|
a20814cb37 | ||
|
|
feb5ba267f | ||
|
|
591be4ef35 | ||
|
|
3e6ef0fd68 | ||
|
|
3d5c770def | ||
|
|
f4339a746a |
8
.github/workflows/test-virgin-user.yml
vendored
8
.github/workflows/test-virgin-user.yml
vendored
@@ -49,11 +49,13 @@ jobs:
|
|||||||
chown -R dev:dev /nix
|
chown -R dev:dev /nix
|
||||||
chmod 0755 /nix
|
chmod 0755 /nix
|
||||||
chmod 1777 /nix/store
|
chmod 1777 /nix/store
|
||||||
|
sudo -H -u dev env \
|
||||||
sudo -H -u dev env HOME=/home/dev PKGMGR_DISABLE_NIX_FLAKE_INSTALLER=1 bash -lc "
|
HOME=/home/dev \
|
||||||
|
NIX_CONFIG="$NIX_CONFIG" \
|
||||||
|
PKGMGR_DISABLE_NIX_FLAKE_INSTALLER=1 \
|
||||||
|
bash -lc "
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
cd /opt/src/pkgmgr
|
cd /opt/src/pkgmgr
|
||||||
|
|
||||||
make setup-venv
|
make setup-venv
|
||||||
. \"\$HOME/.venvs/pkgmgr/bin/activate\"
|
. \"\$HOME/.venvs/pkgmgr/bin/activate\"
|
||||||
|
|
||||||
|
|||||||
24
CHANGELOG.md
24
CHANGELOG.md
@@ -1,3 +1,27 @@
|
|||||||
|
## [1.9.2] - 2025-12-21
|
||||||
|
|
||||||
|
* Default configuration files are now packaged and loaded correctly when no user config exists, while fully preserving custom user configurations.
|
||||||
|
|
||||||
|
|
||||||
|
## [1.9.1] - 2025-12-21
|
||||||
|
|
||||||
|
* Fixed installation issues and improved loading of default configuration files.
|
||||||
|
|
||||||
|
|
||||||
|
## [1.9.0] - 2025-12-20
|
||||||
|
|
||||||
|
* * New ***mirror visibility*** command to set remote Git repositories to ***public*** or ***private***.
|
||||||
|
* New ***--public*** flag for ***mirror provision*** to create repositories and immediately make them public.
|
||||||
|
* All configured git mirrors are now provisioned.
|
||||||
|
|
||||||
|
|
||||||
|
## [1.8.7] - 2025-12-19
|
||||||
|
|
||||||
|
* * **Release version updates now correctly modify ***pyproject.toml*** files that follow PEP 621**, ensuring the ***[project].version*** field is updated as expected.
|
||||||
|
* **Invalid or incomplete ***pyproject.toml*** files are now handled gracefully** with clear error messages instead of abrupt process termination.
|
||||||
|
* **RPM spec files remain compatible during releases**: existing macros such as ***%{?dist}*** are preserved and no longer accidentally modified.
|
||||||
|
|
||||||
|
|
||||||
## [1.8.6] - 2025-12-17
|
## [1.8.6] - 2025-12-17
|
||||||
|
|
||||||
* Prevent Rate Limits during GitHub Nix Setups
|
* Prevent Rate Limits during GitHub Nix Setups
|
||||||
|
|||||||
@@ -32,7 +32,7 @@
|
|||||||
rec {
|
rec {
|
||||||
pkgmgr = pyPkgs.buildPythonApplication {
|
pkgmgr = pyPkgs.buildPythonApplication {
|
||||||
pname = "package-manager";
|
pname = "package-manager";
|
||||||
version = "1.8.6";
|
version = "1.9.2";
|
||||||
|
|
||||||
# Use the git repo as source
|
# Use the git repo as source
|
||||||
src = ./.;
|
src = ./.;
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
# Maintainer: Kevin Veen-Birkenbach <info@veen.world>
|
# Maintainer: Kevin Veen-Birkenbach <info@veen.world>
|
||||||
|
|
||||||
pkgname=package-manager
|
pkgname=package-manager
|
||||||
pkgver=1.8.6
|
pkgver=1.9.2
|
||||||
pkgrel=1
|
pkgrel=1
|
||||||
pkgdesc="Local-flake wrapper for Kevin's package-manager (Nix-based)."
|
pkgdesc="Local-flake wrapper for Kevin's package-manager (Nix-based)."
|
||||||
arch=('any')
|
arch=('any')
|
||||||
|
|||||||
@@ -1,3 +1,31 @@
|
|||||||
|
package-manager (1.9.2-1) unstable; urgency=medium
|
||||||
|
|
||||||
|
* Default configuration files are now packaged and loaded correctly when no user config exists, while fully preserving custom user configurations.
|
||||||
|
|
||||||
|
-- Kevin Veen-Birkenbach <kevin@veen.world> Sun, 21 Dec 2025 15:30:22 +0100
|
||||||
|
|
||||||
|
package-manager (1.9.1-1) unstable; urgency=medium
|
||||||
|
|
||||||
|
* Fixed installation issues and improved loading of default configuration files.
|
||||||
|
|
||||||
|
-- Kevin Veen-Birkenbach <kevin@veen.world> Sun, 21 Dec 2025 13:38:58 +0100
|
||||||
|
|
||||||
|
package-manager (1.9.0-1) unstable; urgency=medium
|
||||||
|
|
||||||
|
* * New ***mirror visibility*** command to set remote Git repositories to ***public*** or ***private***.
|
||||||
|
* New ***--public*** flag for ***mirror provision*** to create repositories and immediately make them public.
|
||||||
|
* All configured git mirrors are now provisioned.
|
||||||
|
|
||||||
|
-- Kevin Veen-Birkenbach <kevin@veen.world> Sat, 20 Dec 2025 14:37:58 +0100
|
||||||
|
|
||||||
|
package-manager (1.8.7-1) unstable; urgency=medium
|
||||||
|
|
||||||
|
* * **Release version updates now correctly modify ***pyproject.toml*** files that follow PEP 621**, ensuring the ***[project].version*** field is updated as expected.
|
||||||
|
* **Invalid or incomplete ***pyproject.toml*** files are now handled gracefully** with clear error messages instead of abrupt process termination.
|
||||||
|
* **RPM spec files remain compatible during releases**: existing macros such as ***%{?dist}*** are preserved and no longer accidentally modified.
|
||||||
|
|
||||||
|
-- Kevin Veen-Birkenbach <kevin@veen.world> Fri, 19 Dec 2025 14:15:47 +0100
|
||||||
|
|
||||||
package-manager (1.8.6-1) unstable; urgency=medium
|
package-manager (1.8.6-1) unstable; urgency=medium
|
||||||
|
|
||||||
* Prevent Rate Limits during GitHub Nix Setups
|
* Prevent Rate Limits during GitHub Nix Setups
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
Name: package-manager
|
Name: package-manager
|
||||||
Version: 1.8.6
|
Version: 1.9.2
|
||||||
Release: 1%{?dist}
|
Release: 1%{?dist}
|
||||||
Summary: Wrapper that runs Kevin's package-manager via Nix flake
|
Summary: Wrapper that runs Kevin's package-manager via Nix flake
|
||||||
|
|
||||||
@@ -74,6 +74,22 @@ echo ">>> package-manager removed. Nix itself was not removed."
|
|||||||
/usr/lib/package-manager/
|
/usr/lib/package-manager/
|
||||||
|
|
||||||
%changelog
|
%changelog
|
||||||
|
* Sun Dec 21 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.9.2-1
|
||||||
|
- Default configuration files are now packaged and loaded correctly when no user config exists, while fully preserving custom user configurations.
|
||||||
|
|
||||||
|
* Sun Dec 21 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.9.1-1
|
||||||
|
- Fixed installation issues and improved loading of default configuration files.
|
||||||
|
|
||||||
|
* Sat Dec 20 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.9.0-1
|
||||||
|
- * New ***mirror visibility*** command to set remote Git repositories to ***public*** or ***private***.
|
||||||
|
* New ***--public*** flag for ***mirror provision*** to create repositories and immediately make them public.
|
||||||
|
* All configured git mirrors are now provisioned.
|
||||||
|
|
||||||
|
* Fri Dec 19 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.8.7-1
|
||||||
|
- * **Release version updates now correctly modify ***pyproject.toml*** files that follow PEP 621**, ensuring the ***[project].version*** field is updated as expected.
|
||||||
|
* **Invalid or incomplete ***pyproject.toml*** files are now handled gracefully** with clear error messages instead of abrupt process termination.
|
||||||
|
* **RPM spec files remain compatible during releases**: existing macros such as ***%{?dist}*** are preserved and no longer accidentally modified.
|
||||||
|
|
||||||
* Wed Dec 17 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.8.6-1
|
* Wed Dec 17 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.8.6-1
|
||||||
- Prevent Rate Limits during GitHub Nix Setups
|
- Prevent Rate Limits during GitHub Nix Setups
|
||||||
|
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ build-backend = "setuptools.build_meta"
|
|||||||
|
|
||||||
[project]
|
[project]
|
||||||
name = "kpmx"
|
name = "kpmx"
|
||||||
version = "1.8.6"
|
version = "1.9.2"
|
||||||
description = "Kevin's package-manager tool (pkgmgr)"
|
description = "Kevin's package-manager tool (pkgmgr)"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
requires-python = ">=3.9"
|
requires-python = ">=3.9"
|
||||||
@@ -43,11 +43,12 @@ pkgmgr = "pkgmgr.cli:main"
|
|||||||
# -----------------------------
|
# -----------------------------
|
||||||
# Source layout: all packages live under "src/"
|
# Source layout: all packages live under "src/"
|
||||||
[tool.setuptools]
|
[tool.setuptools]
|
||||||
package-dir = { "" = "src", "config" = "config" }
|
package-dir = { "" = "src" }
|
||||||
|
include-package-data = true
|
||||||
|
|
||||||
[tool.setuptools.packages.find]
|
[tool.setuptools.packages.find]
|
||||||
where = ["src", "."]
|
where = ["src"]
|
||||||
include = ["pkgmgr*", "config*"]
|
include = ["pkgmgr*"]
|
||||||
|
|
||||||
[tool.setuptools.package-data]
|
[tool.setuptools.package-data]
|
||||||
"config" = ["defaults.yaml"]
|
"pkgmgr.config" = ["*.yml", "*.yaml"]
|
||||||
|
|||||||
@@ -48,9 +48,13 @@ def close_branch(
|
|||||||
|
|
||||||
# Confirmation
|
# Confirmation
|
||||||
if not force:
|
if not force:
|
||||||
answer = input(
|
answer = (
|
||||||
|
input(
|
||||||
f"Merge branch '{name}' into '{target_base}' and delete it afterwards? (y/N): "
|
f"Merge branch '{name}' into '{target_base}' and delete it afterwards? (y/N): "
|
||||||
).strip().lower()
|
)
|
||||||
|
.strip()
|
||||||
|
.lower()
|
||||||
|
)
|
||||||
if answer != "y":
|
if answer != "y":
|
||||||
print("Aborted closing branch.")
|
print("Aborted closing branch.")
|
||||||
return
|
return
|
||||||
|
|||||||
@@ -41,9 +41,13 @@ def drop_branch(
|
|||||||
|
|
||||||
# Confirmation
|
# Confirmation
|
||||||
if not force:
|
if not force:
|
||||||
answer = input(
|
answer = (
|
||||||
|
input(
|
||||||
f"Delete branch '{name}' locally and on origin? This is destructive! (y/N): "
|
f"Delete branch '{name}' locally and on origin? This is destructive! (y/N): "
|
||||||
).strip().lower()
|
)
|
||||||
|
.strip()
|
||||||
|
.lower()
|
||||||
|
)
|
||||||
if answer != "y":
|
if answer != "y":
|
||||||
print("Aborted dropping branch.")
|
print("Aborted dropping branch.")
|
||||||
return
|
return
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ import yaml
|
|||||||
import os
|
import os
|
||||||
from pkgmgr.core.config.save import save_user_config
|
from pkgmgr.core.config.save import save_user_config
|
||||||
|
|
||||||
|
|
||||||
def interactive_add(config, USER_CONFIG_PATH: str):
|
def interactive_add(config, USER_CONFIG_PATH: str):
|
||||||
"""Interactively prompt the user to add a new repository entry to the user config."""
|
"""Interactively prompt the user to add a new repository entry to the user config."""
|
||||||
print("Adding a new repository configuration entry.")
|
print("Adding a new repository configuration entry.")
|
||||||
@@ -9,7 +10,9 @@ def interactive_add(config,USER_CONFIG_PATH:str):
|
|||||||
new_entry["provider"] = input("Provider (e.g., github.com): ").strip()
|
new_entry["provider"] = input("Provider (e.g., github.com): ").strip()
|
||||||
new_entry["account"] = input("Account (e.g., yourusername): ").strip()
|
new_entry["account"] = input("Account (e.g., yourusername): ").strip()
|
||||||
new_entry["repository"] = input("Repository name (e.g., mytool): ").strip()
|
new_entry["repository"] = input("Repository name (e.g., mytool): ").strip()
|
||||||
new_entry["command"] = input("Command (optional, leave blank to auto-detect): ").strip()
|
new_entry["command"] = input(
|
||||||
|
"Command (optional, leave blank to auto-detect): "
|
||||||
|
).strip()
|
||||||
new_entry["description"] = input("Description (optional): ").strip()
|
new_entry["description"] = input("Description (optional): ").strip()
|
||||||
new_entry["replacement"] = input("Replacement (optional): ").strip()
|
new_entry["replacement"] = input("Replacement (optional): ").strip()
|
||||||
new_entry["alias"] = input("Alias (optional): ").strip()
|
new_entry["alias"] = input("Alias (optional): ").strip()
|
||||||
@@ -25,7 +28,7 @@ def interactive_add(config,USER_CONFIG_PATH:str):
|
|||||||
confirm = input("Add this entry to user config? (y/N): ").strip().lower()
|
confirm = input("Add this entry to user config? (y/N): ").strip().lower()
|
||||||
if confirm == "y":
|
if confirm == "y":
|
||||||
if os.path.exists(USER_CONFIG_PATH):
|
if os.path.exists(USER_CONFIG_PATH):
|
||||||
with open(USER_CONFIG_PATH, 'r') as f:
|
with open(USER_CONFIG_PATH, "r") as f:
|
||||||
user_config = yaml.safe_load(f) or {}
|
user_config = yaml.safe_load(f) or {}
|
||||||
else:
|
else:
|
||||||
user_config = {"repositories": []}
|
user_config = {"repositories": []}
|
||||||
|
|||||||
@@ -107,11 +107,15 @@ def config_init(
|
|||||||
# Already known?
|
# Already known?
|
||||||
if key in default_keys:
|
if key in default_keys:
|
||||||
skipped += 1
|
skipped += 1
|
||||||
print(f"[SKIP] (defaults) {provider}/{account}/{repo_name}")
|
print(
|
||||||
|
f"[SKIP] (defaults) {provider}/{account}/{repo_name}"
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
if key in existing_keys:
|
if key in existing_keys:
|
||||||
skipped += 1
|
skipped += 1
|
||||||
print(f"[SKIP] (user-config) {provider}/{account}/{repo_name}")
|
print(
|
||||||
|
f"[SKIP] (user-config) {provider}/{account}/{repo_name}"
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
print(f"[ADD] {provider}/{account}/{repo_name}")
|
print(f"[ADD] {provider}/{account}/{repo_name}")
|
||||||
@@ -121,7 +125,9 @@ def config_init(
|
|||||||
if verified_commit:
|
if verified_commit:
|
||||||
print(f"[INFO] Latest commit: {verified_commit}")
|
print(f"[INFO] Latest commit: {verified_commit}")
|
||||||
else:
|
else:
|
||||||
print("[WARN] Could not read commit (not a git repo or no commits).")
|
print(
|
||||||
|
"[WARN] Could not read commit (not a git repo or no commits)."
|
||||||
|
)
|
||||||
|
|
||||||
entry: Dict[str, Any] = {
|
entry: Dict[str, Any] = {
|
||||||
"provider": provider,
|
"provider": provider,
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import yaml
|
import yaml
|
||||||
from pkgmgr.core.config.load import load_config
|
from pkgmgr.core.config.load import load_config
|
||||||
|
|
||||||
|
|
||||||
def show_config(selected_repos, user_config_path, full_config=False):
|
def show_config(selected_repos, user_config_path, full_config=False):
|
||||||
"""Display configuration for one or more repositories, or the entire merged config."""
|
"""Display configuration for one or more repositories, or the entire merged config."""
|
||||||
if full_config:
|
if full_config:
|
||||||
@@ -8,7 +9,9 @@ def show_config(selected_repos, user_config_path, full_config=False):
|
|||||||
print(yaml.dump(merged, default_flow_style=False))
|
print(yaml.dump(merged, default_flow_style=False))
|
||||||
else:
|
else:
|
||||||
for repo in selected_repos:
|
for repo in selected_repos:
|
||||||
identifier = f'{repo.get("provider")}/{repo.get("account")}/{repo.get("repository")}'
|
identifier = (
|
||||||
|
f"{repo.get('provider')}/{repo.get('account')}/{repo.get('repository')}"
|
||||||
|
)
|
||||||
print(f"Repository: {identifier}")
|
print(f"Repository: {identifier}")
|
||||||
for key, value in repo.items():
|
for key, value in repo.items():
|
||||||
print(f" {key}: {value}")
|
print(f" {key}: {value}")
|
||||||
|
|||||||
@@ -66,10 +66,7 @@ def _ensure_repo_dir(
|
|||||||
repo_dir = get_repo_dir(repositories_base_dir, repo)
|
repo_dir = get_repo_dir(repositories_base_dir, repo)
|
||||||
|
|
||||||
if not os.path.exists(repo_dir):
|
if not os.path.exists(repo_dir):
|
||||||
print(
|
print(f"Repository directory '{repo_dir}' does not exist. Cloning it now...")
|
||||||
f"Repository directory '{repo_dir}' does not exist. "
|
|
||||||
"Cloning it now..."
|
|
||||||
)
|
|
||||||
clone_repos(
|
clone_repos(
|
||||||
[repo],
|
[repo],
|
||||||
repositories_base_dir,
|
repositories_base_dir,
|
||||||
@@ -79,10 +76,7 @@ def _ensure_repo_dir(
|
|||||||
clone_mode,
|
clone_mode,
|
||||||
)
|
)
|
||||||
if not os.path.exists(repo_dir):
|
if not os.path.exists(repo_dir):
|
||||||
print(
|
print(f"Cloning failed for repository {identifier}. Skipping installation.")
|
||||||
f"Cloning failed for repository {identifier}. "
|
|
||||||
"Skipping installation."
|
|
||||||
)
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return repo_dir
|
return repo_dir
|
||||||
@@ -115,7 +109,9 @@ def _verify_repo(
|
|||||||
|
|
||||||
if silent:
|
if silent:
|
||||||
# Non-interactive mode: continue with a warning.
|
# Non-interactive mode: continue with a warning.
|
||||||
print(f"[Warning] Continuing despite verification failure for {identifier} (--silent).")
|
print(
|
||||||
|
f"[Warning] Continuing despite verification failure for {identifier} (--silent)."
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
choice = input("Continue anyway? [y/N]: ").strip().lower()
|
choice = input("Continue anyway? [y/N]: ").strip().lower()
|
||||||
if choice != "y":
|
if choice != "y":
|
||||||
@@ -232,12 +228,16 @@ def install_repos(
|
|||||||
code = exc.code if isinstance(exc.code, int) else str(exc.code)
|
code = exc.code if isinstance(exc.code, int) else str(exc.code)
|
||||||
failures.append((identifier, f"installer failed (exit={code})"))
|
failures.append((identifier, f"installer failed (exit={code})"))
|
||||||
if not quiet:
|
if not quiet:
|
||||||
print(f"[Warning] install: repository {identifier} failed (exit={code}). Continuing...")
|
print(
|
||||||
|
f"[Warning] install: repository {identifier} failed (exit={code}). Continuing..."
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
failures.append((identifier, f"unexpected error: {exc}"))
|
failures.append((identifier, f"unexpected error: {exc}"))
|
||||||
if not quiet:
|
if not quiet:
|
||||||
print(f"[Warning] install: repository {identifier} hit an unexpected error: {exc}. Continuing...")
|
print(
|
||||||
|
f"[Warning] install: repository {identifier} hit an unexpected error: {exc}. Continuing..."
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if failures and emit_summary and not quiet:
|
if failures and emit_summary and not quiet:
|
||||||
|
|||||||
@@ -14,6 +14,10 @@ from pkgmgr.actions.install.installers.python import PythonInstaller # noqa: F4
|
|||||||
from pkgmgr.actions.install.installers.makefile import MakefileInstaller # noqa: F401
|
from pkgmgr.actions.install.installers.makefile import MakefileInstaller # noqa: F401
|
||||||
|
|
||||||
# OS-specific installers
|
# OS-specific installers
|
||||||
from pkgmgr.actions.install.installers.os_packages.arch_pkgbuild import ArchPkgbuildInstaller # noqa: F401
|
from pkgmgr.actions.install.installers.os_packages.arch_pkgbuild import (
|
||||||
from pkgmgr.actions.install.installers.os_packages.debian_control import DebianControlInstaller # noqa: F401
|
ArchPkgbuildInstaller as ArchPkgbuildInstaller,
|
||||||
|
) # noqa: F401
|
||||||
|
from pkgmgr.actions.install.installers.os_packages.debian_control import (
|
||||||
|
DebianControlInstaller as DebianControlInstaller,
|
||||||
|
) # noqa: F401
|
||||||
from pkgmgr.actions.install.installers.os_packages.rpm_spec import RpmSpecInstaller # noqa: F401
|
from pkgmgr.actions.install.installers.os_packages.rpm_spec import RpmSpecInstaller # noqa: F401
|
||||||
|
|||||||
@@ -41,7 +41,9 @@ class BaseInstaller(ABC):
|
|||||||
return caps
|
return caps
|
||||||
|
|
||||||
for matcher in CAPABILITY_MATCHERS:
|
for matcher in CAPABILITY_MATCHERS:
|
||||||
if matcher.applies_to_layer(self.layer) and matcher.is_provided(ctx, self.layer):
|
if matcher.applies_to_layer(self.layer) and matcher.is_provided(
|
||||||
|
ctx, self.layer
|
||||||
|
):
|
||||||
caps.add(matcher.name)
|
caps.add(matcher.name)
|
||||||
|
|
||||||
return caps
|
return caps
|
||||||
|
|||||||
@@ -16,7 +16,9 @@ class MakefileInstaller(BaseInstaller):
|
|||||||
def supports(self, ctx: RepoContext) -> bool:
|
def supports(self, ctx: RepoContext) -> bool:
|
||||||
if os.environ.get("PKGMGR_DISABLE_MAKEFILE_INSTALLER") == "1":
|
if os.environ.get("PKGMGR_DISABLE_MAKEFILE_INSTALLER") == "1":
|
||||||
if not ctx.quiet:
|
if not ctx.quiet:
|
||||||
print("[INFO] PKGMGR_DISABLE_MAKEFILE_INSTALLER=1 – skipping MakefileInstaller.")
|
print(
|
||||||
|
"[INFO] PKGMGR_DISABLE_MAKEFILE_INSTALLER=1 – skipping MakefileInstaller."
|
||||||
|
)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
makefile_path = os.path.join(ctx.repo_dir, self.MAKEFILE_NAME)
|
makefile_path = os.path.join(ctx.repo_dir, self.MAKEFILE_NAME)
|
||||||
@@ -46,7 +48,9 @@ class MakefileInstaller(BaseInstaller):
|
|||||||
return
|
return
|
||||||
|
|
||||||
if not ctx.quiet:
|
if not ctx.quiet:
|
||||||
print(f"[pkgmgr] Running make install for {ctx.identifier} (MakefileInstaller)")
|
print(
|
||||||
|
f"[pkgmgr] Running make install for {ctx.identifier} (MakefileInstaller)"
|
||||||
|
)
|
||||||
|
|
||||||
run_command("make install", cwd=ctx.repo_dir, preview=ctx.preview)
|
run_command("make install", cwd=ctx.repo_dir, preview=ctx.preview)
|
||||||
|
|
||||||
|
|||||||
@@ -57,7 +57,9 @@ class NixConflictResolver:
|
|||||||
|
|
||||||
# 3) Fallback: output-name based lookup (also covers nix suggesting: `nix profile remove pkgmgr`)
|
# 3) Fallback: output-name based lookup (also covers nix suggesting: `nix profile remove pkgmgr`)
|
||||||
if not tokens:
|
if not tokens:
|
||||||
tokens = self._profile.find_remove_tokens_for_output(ctx, self._runner, output)
|
tokens = self._profile.find_remove_tokens_for_output(
|
||||||
|
ctx, self._runner, output
|
||||||
|
)
|
||||||
|
|
||||||
if tokens:
|
if tokens:
|
||||||
if not quiet:
|
if not quiet:
|
||||||
@@ -94,7 +96,9 @@ class NixConflictResolver:
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
if not quiet:
|
if not quiet:
|
||||||
print("[nix] conflict detected but could not resolve profile entries to remove.")
|
print(
|
||||||
|
"[nix] conflict detected but could not resolve profile entries to remove."
|
||||||
|
)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|||||||
@@ -75,7 +75,9 @@ class NixFlakeInstaller(BaseInstaller):
|
|||||||
# Core install path
|
# Core install path
|
||||||
# ---------------------------------------------------------------------
|
# ---------------------------------------------------------------------
|
||||||
|
|
||||||
def _install_only(self, ctx: "RepoContext", output: str, allow_failure: bool) -> None:
|
def _install_only(
|
||||||
|
self, ctx: "RepoContext", output: str, allow_failure: bool
|
||||||
|
) -> None:
|
||||||
install_cmd = f"nix profile install {self._installable(ctx, output)}"
|
install_cmd = f"nix profile install {self._installable(ctx, output)}"
|
||||||
|
|
||||||
if not ctx.quiet:
|
if not ctx.quiet:
|
||||||
@@ -96,7 +98,9 @@ class NixFlakeInstaller(BaseInstaller):
|
|||||||
output=output,
|
output=output,
|
||||||
):
|
):
|
||||||
if not ctx.quiet:
|
if not ctx.quiet:
|
||||||
print(f"[nix] output '{output}' successfully installed after conflict cleanup.")
|
print(
|
||||||
|
f"[nix] output '{output}' successfully installed after conflict cleanup."
|
||||||
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
if not ctx.quiet:
|
if not ctx.quiet:
|
||||||
@@ -107,20 +111,26 @@ class NixFlakeInstaller(BaseInstaller):
|
|||||||
|
|
||||||
# If indices are supported, try legacy index-upgrade path.
|
# If indices are supported, try legacy index-upgrade path.
|
||||||
if self._indices_supported is not False:
|
if self._indices_supported is not False:
|
||||||
indices = self._profile.find_installed_indices_for_output(ctx, self._runner, output)
|
indices = self._profile.find_installed_indices_for_output(
|
||||||
|
ctx, self._runner, output
|
||||||
|
)
|
||||||
|
|
||||||
upgraded = False
|
upgraded = False
|
||||||
for idx in indices:
|
for idx in indices:
|
||||||
if self._upgrade_index(ctx, idx):
|
if self._upgrade_index(ctx, idx):
|
||||||
upgraded = True
|
upgraded = True
|
||||||
if not ctx.quiet:
|
if not ctx.quiet:
|
||||||
print(f"[nix] output '{output}' successfully upgraded (index {idx}).")
|
print(
|
||||||
|
f"[nix] output '{output}' successfully upgraded (index {idx})."
|
||||||
|
)
|
||||||
|
|
||||||
if upgraded:
|
if upgraded:
|
||||||
return
|
return
|
||||||
|
|
||||||
if indices and not ctx.quiet:
|
if indices and not ctx.quiet:
|
||||||
print(f"[nix] upgrade failed; removing indices {indices} and reinstalling '{output}'.")
|
print(
|
||||||
|
f"[nix] upgrade failed; removing indices {indices} and reinstalling '{output}'."
|
||||||
|
)
|
||||||
|
|
||||||
for idx in indices:
|
for idx in indices:
|
||||||
self._remove_index(ctx, idx)
|
self._remove_index(ctx, idx)
|
||||||
@@ -139,7 +149,9 @@ class NixFlakeInstaller(BaseInstaller):
|
|||||||
print(f"[nix] output '{output}' successfully re-installed.")
|
print(f"[nix] output '{output}' successfully re-installed.")
|
||||||
return
|
return
|
||||||
|
|
||||||
print(f"[ERROR] Failed to install Nix flake output '{output}' (exit {final.returncode})")
|
print(
|
||||||
|
f"[ERROR] Failed to install Nix flake output '{output}' (exit {final.returncode})"
|
||||||
|
)
|
||||||
if not allow_failure:
|
if not allow_failure:
|
||||||
raise SystemExit(final.returncode)
|
raise SystemExit(final.returncode)
|
||||||
|
|
||||||
@@ -149,7 +161,9 @@ class NixFlakeInstaller(BaseInstaller):
|
|||||||
# force_update path
|
# force_update path
|
||||||
# ---------------------------------------------------------------------
|
# ---------------------------------------------------------------------
|
||||||
|
|
||||||
def _force_upgrade_output(self, ctx: "RepoContext", output: str, allow_failure: bool) -> None:
|
def _force_upgrade_output(
|
||||||
|
self, ctx: "RepoContext", output: str, allow_failure: bool
|
||||||
|
) -> None:
|
||||||
# Prefer token path if indices unsupported (new nix)
|
# Prefer token path if indices unsupported (new nix)
|
||||||
if self._indices_supported is False:
|
if self._indices_supported is False:
|
||||||
self._remove_tokens_for_output(ctx, output)
|
self._remove_tokens_for_output(ctx, output)
|
||||||
@@ -158,14 +172,18 @@ class NixFlakeInstaller(BaseInstaller):
|
|||||||
print(f"[nix] output '{output}' successfully upgraded.")
|
print(f"[nix] output '{output}' successfully upgraded.")
|
||||||
return
|
return
|
||||||
|
|
||||||
indices = self._profile.find_installed_indices_for_output(ctx, self._runner, output)
|
indices = self._profile.find_installed_indices_for_output(
|
||||||
|
ctx, self._runner, output
|
||||||
|
)
|
||||||
|
|
||||||
upgraded_any = False
|
upgraded_any = False
|
||||||
for idx in indices:
|
for idx in indices:
|
||||||
if self._upgrade_index(ctx, idx):
|
if self._upgrade_index(ctx, idx):
|
||||||
upgraded_any = True
|
upgraded_any = True
|
||||||
if not ctx.quiet:
|
if not ctx.quiet:
|
||||||
print(f"[nix] output '{output}' successfully upgraded (index {idx}).")
|
print(
|
||||||
|
f"[nix] output '{output}' successfully upgraded (index {idx})."
|
||||||
|
)
|
||||||
|
|
||||||
if upgraded_any:
|
if upgraded_any:
|
||||||
if not ctx.quiet:
|
if not ctx.quiet:
|
||||||
@@ -173,7 +191,9 @@ class NixFlakeInstaller(BaseInstaller):
|
|||||||
return
|
return
|
||||||
|
|
||||||
if indices and not ctx.quiet:
|
if indices and not ctx.quiet:
|
||||||
print(f"[nix] upgrade failed; removing indices {indices} and reinstalling '{output}'.")
|
print(
|
||||||
|
f"[nix] upgrade failed; removing indices {indices} and reinstalling '{output}'."
|
||||||
|
)
|
||||||
|
|
||||||
for idx in indices:
|
for idx in indices:
|
||||||
self._remove_index(ctx, idx)
|
self._remove_index(ctx, idx)
|
||||||
@@ -223,7 +243,9 @@ class NixFlakeInstaller(BaseInstaller):
|
|||||||
return
|
return
|
||||||
|
|
||||||
if not ctx.quiet:
|
if not ctx.quiet:
|
||||||
print(f"[nix] indices unsupported; removing by token(s): {', '.join(tokens)}")
|
print(
|
||||||
|
f"[nix] indices unsupported; removing by token(s): {', '.join(tokens)}"
|
||||||
|
)
|
||||||
|
|
||||||
for t in tokens:
|
for t in tokens:
|
||||||
self._runner.run(ctx, f"nix profile remove {t}", allow_failure=True)
|
self._runner.run(ctx, f"nix profile remove {t}", allow_failure=True)
|
||||||
|
|||||||
@@ -101,7 +101,9 @@ class NixProfileInspector:
|
|||||||
data = self.list_json(ctx, runner)
|
data = self.list_json(ctx, runner)
|
||||||
entries = normalize_elements(data)
|
entries = normalize_elements(data)
|
||||||
|
|
||||||
tokens: List[str] = [out] # critical: matches nix's own suggestion for conflicts
|
tokens: List[str] = [
|
||||||
|
out
|
||||||
|
] # critical: matches nix's own suggestion for conflicts
|
||||||
|
|
||||||
for e in entries:
|
for e in entries:
|
||||||
if entry_matches_output(e, out):
|
if entry_matches_output(e, out):
|
||||||
|
|||||||
@@ -48,7 +48,9 @@ class NixProfileListReader:
|
|||||||
|
|
||||||
return uniq
|
return uniq
|
||||||
|
|
||||||
def indices_matching_store_prefixes(self, ctx: "RepoContext", prefixes: List[str]) -> List[int]:
|
def indices_matching_store_prefixes(
|
||||||
|
self, ctx: "RepoContext", prefixes: List[str]
|
||||||
|
) -> List[int]:
|
||||||
prefixes = [self._store_prefix(p) for p in prefixes if p]
|
prefixes = [self._store_prefix(p) for p in prefixes if p]
|
||||||
prefixes = [p for p in prefixes if p]
|
prefixes = [p for p in prefixes if p]
|
||||||
if not prefixes:
|
if not prefixes:
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ if TYPE_CHECKING:
|
|||||||
from pkgmgr.actions.install.context import RepoContext
|
from pkgmgr.actions.install.context import RepoContext
|
||||||
from .runner import CommandRunner
|
from .runner import CommandRunner
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True)
|
@dataclass(frozen=True)
|
||||||
class RetryPolicy:
|
class RetryPolicy:
|
||||||
max_attempts: int = 7
|
max_attempts: int = 7
|
||||||
@@ -35,13 +36,19 @@ class GitHubRateLimitRetry:
|
|||||||
install_cmd: str,
|
install_cmd: str,
|
||||||
) -> RunResult:
|
) -> RunResult:
|
||||||
quiet = bool(getattr(ctx, "quiet", False))
|
quiet = bool(getattr(ctx, "quiet", False))
|
||||||
delays = list(self._fibonacci_backoff(self._policy.base_delay_seconds, self._policy.max_attempts))
|
delays = list(
|
||||||
|
self._fibonacci_backoff(
|
||||||
|
self._policy.base_delay_seconds, self._policy.max_attempts
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
last: RunResult | None = None
|
last: RunResult | None = None
|
||||||
|
|
||||||
for attempt, base_delay in enumerate(delays, start=1):
|
for attempt, base_delay in enumerate(delays, start=1):
|
||||||
if not quiet:
|
if not quiet:
|
||||||
print(f"[nix] attempt {attempt}/{self._policy.max_attempts}: {install_cmd}")
|
print(
|
||||||
|
f"[nix] attempt {attempt}/{self._policy.max_attempts}: {install_cmd}"
|
||||||
|
)
|
||||||
|
|
||||||
res = runner.run(ctx, install_cmd, allow_failure=True)
|
res = runner.run(ctx, install_cmd, allow_failure=True)
|
||||||
last = res
|
last = res
|
||||||
@@ -56,7 +63,9 @@ class GitHubRateLimitRetry:
|
|||||||
if attempt >= self._policy.max_attempts:
|
if attempt >= self._policy.max_attempts:
|
||||||
break
|
break
|
||||||
|
|
||||||
jitter = random.randint(self._policy.jitter_seconds_min, self._policy.jitter_seconds_max)
|
jitter = random.randint(
|
||||||
|
self._policy.jitter_seconds_min, self._policy.jitter_seconds_max
|
||||||
|
)
|
||||||
wait_time = base_delay + jitter
|
wait_time = base_delay + jitter
|
||||||
|
|
||||||
if not quiet:
|
if not quiet:
|
||||||
@@ -67,7 +76,11 @@ class GitHubRateLimitRetry:
|
|||||||
|
|
||||||
time.sleep(wait_time)
|
time.sleep(wait_time)
|
||||||
|
|
||||||
return last if last is not None else RunResult(returncode=1, stdout="", stderr="nix install retry failed")
|
return (
|
||||||
|
last
|
||||||
|
if last is not None
|
||||||
|
else RunResult(returncode=1, stdout="", stderr="nix install retry failed")
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _is_github_rate_limit_error(text: str) -> bool:
|
def _is_github_rate_limit_error(text: str) -> bool:
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ from .types import RunResult
|
|||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from pkgmgr.actions.install.context import RepoContext
|
from pkgmgr.actions.install.context import RepoContext
|
||||||
|
|
||||||
|
|
||||||
class CommandRunner:
|
class CommandRunner:
|
||||||
"""
|
"""
|
||||||
Executes commands (shell=True) inside a repository directory (if provided).
|
Executes commands (shell=True) inside a repository directory (if provided).
|
||||||
@@ -40,7 +41,9 @@ class CommandRunner:
|
|||||||
raise
|
raise
|
||||||
return RunResult(returncode=1, stdout="", stderr=str(e))
|
return RunResult(returncode=1, stdout="", stderr=str(e))
|
||||||
|
|
||||||
res = RunResult(returncode=p.returncode, stdout=p.stdout or "", stderr=p.stderr or "")
|
res = RunResult(
|
||||||
|
returncode=p.returncode, stdout=p.stdout or "", stderr=p.stderr or ""
|
||||||
|
)
|
||||||
|
|
||||||
if res.returncode != 0 and not quiet:
|
if res.returncode != 0 and not quiet:
|
||||||
self._print_compact_failure(res)
|
self._print_compact_failure(res)
|
||||||
|
|||||||
@@ -20,7 +20,9 @@ class NixConflictTextParser:
|
|||||||
tokens: List[str] = []
|
tokens: List[str] = []
|
||||||
for m in pat.finditer(text or ""):
|
for m in pat.finditer(text or ""):
|
||||||
t = (m.group(1) or "").strip()
|
t = (m.group(1) or "").strip()
|
||||||
if (t.startswith("'") and t.endswith("'")) or (t.startswith('"') and t.endswith('"')):
|
if (t.startswith("'") and t.endswith("'")) or (
|
||||||
|
t.startswith('"') and t.endswith('"')
|
||||||
|
):
|
||||||
t = t[1:-1]
|
t = t[1:-1]
|
||||||
if t:
|
if t:
|
||||||
tokens.append(t)
|
tokens.append(t)
|
||||||
|
|||||||
@@ -14,7 +14,9 @@ class PythonInstaller(BaseInstaller):
|
|||||||
|
|
||||||
def supports(self, ctx: RepoContext) -> bool:
|
def supports(self, ctx: RepoContext) -> bool:
|
||||||
if os.environ.get("PKGMGR_DISABLE_PYTHON_INSTALLER") == "1":
|
if os.environ.get("PKGMGR_DISABLE_PYTHON_INSTALLER") == "1":
|
||||||
print("[INFO] PythonInstaller disabled via PKGMGR_DISABLE_PYTHON_INSTALLER.")
|
print(
|
||||||
|
"[INFO] PythonInstaller disabled via PKGMGR_DISABLE_PYTHON_INSTALLER."
|
||||||
|
)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return os.path.exists(os.path.join(ctx.repo_dir, "pyproject.toml"))
|
return os.path.exists(os.path.join(ctx.repo_dir, "pyproject.toml"))
|
||||||
|
|||||||
@@ -132,7 +132,11 @@ class InstallationPipeline:
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
if not quiet:
|
if not quiet:
|
||||||
if ctx.force_update and state.layer is not None and installer_layer == state.layer:
|
if (
|
||||||
|
ctx.force_update
|
||||||
|
and state.layer is not None
|
||||||
|
and installer_layer == state.layer
|
||||||
|
):
|
||||||
print(
|
print(
|
||||||
f"[pkgmgr] Running installer {installer.__class__.__name__} "
|
f"[pkgmgr] Running installer {installer.__class__.__name__} "
|
||||||
f"for {identifier} in '{repo_dir}' (upgrade requested)..."
|
f"for {identifier} in '{repo_dir}' (upgrade requested)..."
|
||||||
|
|||||||
@@ -14,6 +14,7 @@ from .list_cmd import list_mirrors
|
|||||||
from .diff_cmd import diff_mirrors
|
from .diff_cmd import diff_mirrors
|
||||||
from .merge_cmd import merge_mirrors
|
from .merge_cmd import merge_mirrors
|
||||||
from .setup_cmd import setup_mirrors
|
from .setup_cmd import setup_mirrors
|
||||||
|
from .visibility_cmd import set_mirror_visibility
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"Repository",
|
"Repository",
|
||||||
@@ -22,4 +23,5 @@ __all__ = [
|
|||||||
"diff_mirrors",
|
"diff_mirrors",
|
||||||
"merge_mirrors",
|
"merge_mirrors",
|
||||||
"setup_mirrors",
|
"setup_mirrors",
|
||||||
|
"set_mirror_visibility",
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -16,6 +16,7 @@ from .types import MirrorMap, Repository
|
|||||||
# Helpers
|
# Helpers
|
||||||
# -----------------------------------------------------------------------------
|
# -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
def _repo_key(repo: Repository) -> Tuple[str, str, str]:
|
def _repo_key(repo: Repository) -> Tuple[str, str, str]:
|
||||||
"""
|
"""
|
||||||
Normalised key for identifying a repository in config files.
|
Normalised key for identifying a repository in config files.
|
||||||
@@ -47,6 +48,7 @@ def _load_user_config(path: str) -> Dict[str, object]:
|
|||||||
# Main merge command
|
# Main merge command
|
||||||
# -----------------------------------------------------------------------------
|
# -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
def merge_mirrors(
|
def merge_mirrors(
|
||||||
selected_repos: List[Repository],
|
selected_repos: List[Repository],
|
||||||
repositories_base_dir: str,
|
repositories_base_dir: str,
|
||||||
|
|||||||
@@ -11,35 +11,37 @@ from .types import Repository
|
|||||||
from .url_utils import normalize_provider_host, parse_repo_from_git_url
|
from .url_utils import normalize_provider_host, parse_repo_from_git_url
|
||||||
|
|
||||||
|
|
||||||
def ensure_remote_repository(
|
def _provider_hint_from_host(host: str) -> str | None:
|
||||||
repo: Repository,
|
h = (host or "").lower()
|
||||||
repositories_base_dir: str,
|
if h == "github.com":
|
||||||
all_repos: List[Repository],
|
return "github"
|
||||||
|
# Best-effort default for self-hosted git domains
|
||||||
|
return "gitea" if h else None
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_remote_repository_for_url(
|
||||||
|
*,
|
||||||
|
url: str,
|
||||||
|
private_default: bool,
|
||||||
|
description: str,
|
||||||
preview: bool,
|
preview: bool,
|
||||||
) -> None:
|
) -> None:
|
||||||
ctx = build_context(repo, repositories_base_dir, all_repos)
|
host_raw, owner, name = parse_repo_from_git_url(url)
|
||||||
|
|
||||||
primary_url = determine_primary_remote_url(repo, ctx)
|
|
||||||
if not primary_url:
|
|
||||||
print("[INFO] No primary URL found; skipping remote provisioning.")
|
|
||||||
return
|
|
||||||
|
|
||||||
host_raw, owner, name = parse_repo_from_git_url(primary_url)
|
|
||||||
host = normalize_provider_host(host_raw)
|
host = normalize_provider_host(host_raw)
|
||||||
|
|
||||||
if not host or not owner or not name:
|
if not host or not owner or not name:
|
||||||
print("[WARN] Could not parse remote URL:", primary_url)
|
print(f"[WARN] Could not parse repo from URL: {url}")
|
||||||
return
|
return
|
||||||
|
|
||||||
spec = RepoSpec(
|
spec = RepoSpec(
|
||||||
host=host,
|
host=host,
|
||||||
owner=owner,
|
owner=owner,
|
||||||
name=name,
|
name=name,
|
||||||
private=bool(repo.get("private", True)),
|
private=private_default,
|
||||||
description=str(repo.get("description", "")),
|
description=description,
|
||||||
)
|
)
|
||||||
|
|
||||||
provider_kind = str(repo.get("provider", "")).lower() or None
|
provider_kind = _provider_hint_from_host(host)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
result = ensure_remote_repo(
|
result = ensure_remote_repo(
|
||||||
@@ -56,4 +58,29 @@ def ensure_remote_repository(
|
|||||||
if result.url:
|
if result.url:
|
||||||
print(f"[REMOTE ENSURE] URL: {result.url}")
|
print(f"[REMOTE ENSURE] URL: {result.url}")
|
||||||
except Exception as exc: # noqa: BLE001
|
except Exception as exc: # noqa: BLE001
|
||||||
print(f"[ERROR] Remote provisioning failed: {exc}")
|
print(f"[ERROR] Remote provisioning failed for {url!r}: {exc}")
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_remote_repository(
|
||||||
|
repo: Repository,
|
||||||
|
repositories_base_dir: str,
|
||||||
|
all_repos: List[Repository],
|
||||||
|
preview: bool,
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Backwards-compatible wrapper: ensure the *primary* remote repository
|
||||||
|
derived from the primary URL.
|
||||||
|
"""
|
||||||
|
ctx = build_context(repo, repositories_base_dir, all_repos)
|
||||||
|
|
||||||
|
primary_url = determine_primary_remote_url(repo, ctx)
|
||||||
|
if not primary_url:
|
||||||
|
print("[INFO] No primary URL found; skipping remote provisioning.")
|
||||||
|
return
|
||||||
|
|
||||||
|
ensure_remote_repository_for_url(
|
||||||
|
url=primary_url,
|
||||||
|
private_default=bool(repo.get("private", True)),
|
||||||
|
description=str(repo.get("description", "")),
|
||||||
|
preview=preview,
|
||||||
|
)
|
||||||
|
|||||||
@@ -2,12 +2,15 @@ from __future__ import annotations
|
|||||||
|
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
from pkgmgr.core.git.queries import probe_remote_reachable
|
from pkgmgr.core.git.queries import probe_remote_reachable_detail
|
||||||
|
from pkgmgr.core.remote_provisioning import ProviderHint, RepoSpec, set_repo_visibility
|
||||||
|
from pkgmgr.core.remote_provisioning.visibility import VisibilityOptions
|
||||||
|
|
||||||
from .context import build_context
|
from .context import build_context
|
||||||
from .git_remote import ensure_origin_remote, determine_primary_remote_url
|
from .git_remote import determine_primary_remote_url, ensure_origin_remote
|
||||||
from .remote_provision import ensure_remote_repository
|
from .remote_provision import ensure_remote_repository_for_url
|
||||||
from .types import Repository
|
from .types import Repository
|
||||||
|
from .url_utils import normalize_provider_host, parse_repo_from_git_url
|
||||||
|
|
||||||
|
|
||||||
def _is_git_remote_url(url: str) -> bool:
|
def _is_git_remote_url(url: str) -> bool:
|
||||||
@@ -25,6 +28,64 @@ def _is_git_remote_url(url: str) -> bool:
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _provider_hint_from_host(host: str) -> str | None:
|
||||||
|
h = (host or "").lower()
|
||||||
|
if h == "github.com":
|
||||||
|
return "github"
|
||||||
|
return "gitea" if h else None
|
||||||
|
|
||||||
|
|
||||||
|
def _apply_visibility_for_url(
|
||||||
|
*,
|
||||||
|
url: str,
|
||||||
|
private: bool,
|
||||||
|
description: str,
|
||||||
|
preview: bool,
|
||||||
|
) -> None:
|
||||||
|
host_raw, owner, name = parse_repo_from_git_url(url)
|
||||||
|
host = normalize_provider_host(host_raw)
|
||||||
|
|
||||||
|
if not host or not owner or not name:
|
||||||
|
print(f"[WARN] Could not parse repo from URL: {url}")
|
||||||
|
return
|
||||||
|
|
||||||
|
spec = RepoSpec(
|
||||||
|
host=host,
|
||||||
|
owner=owner,
|
||||||
|
name=name,
|
||||||
|
private=private,
|
||||||
|
description=description,
|
||||||
|
)
|
||||||
|
|
||||||
|
provider_kind = _provider_hint_from_host(host)
|
||||||
|
res = set_repo_visibility(
|
||||||
|
spec,
|
||||||
|
private=private,
|
||||||
|
provider_hint=ProviderHint(kind=provider_kind),
|
||||||
|
options=VisibilityOptions(preview=preview),
|
||||||
|
)
|
||||||
|
print(f"[REMOTE VISIBILITY] {res.status.upper()}: {res.message}")
|
||||||
|
|
||||||
|
|
||||||
|
def _print_probe_result(name: str | None, url: str, *, cwd: str) -> None:
|
||||||
|
"""
|
||||||
|
Print probe result for a git remote URL, including a short failure reason.
|
||||||
|
"""
|
||||||
|
ok, reason = probe_remote_reachable_detail(url, cwd=cwd)
|
||||||
|
|
||||||
|
prefix = f"{name}: " if name else ""
|
||||||
|
if ok:
|
||||||
|
print(f"[OK] {prefix}{url}")
|
||||||
|
return
|
||||||
|
|
||||||
|
print(f"[WARN] {prefix}{url}")
|
||||||
|
if reason:
|
||||||
|
reason = reason.strip()
|
||||||
|
if len(reason) > 240:
|
||||||
|
reason = reason[:240].rstrip() + "…"
|
||||||
|
print(f" reason: {reason}")
|
||||||
|
|
||||||
|
|
||||||
def _setup_local_mirrors_for_repo(
|
def _setup_local_mirrors_for_repo(
|
||||||
repo: Repository,
|
repo: Repository,
|
||||||
repositories_base_dir: str,
|
repositories_base_dir: str,
|
||||||
@@ -48,6 +109,7 @@ def _setup_remote_mirrors_for_repo(
|
|||||||
all_repos: List[Repository],
|
all_repos: List[Repository],
|
||||||
preview: bool,
|
preview: bool,
|
||||||
ensure_remote: bool,
|
ensure_remote: bool,
|
||||||
|
ensure_visibility: str | None,
|
||||||
) -> None:
|
) -> None:
|
||||||
ctx = build_context(repo, repositories_base_dir, all_repos)
|
ctx = build_context(repo, repositories_base_dir, all_repos)
|
||||||
|
|
||||||
@@ -56,33 +118,78 @@ def _setup_remote_mirrors_for_repo(
|
|||||||
print(f"[MIRROR SETUP:REMOTE] dir: {ctx.repo_dir}")
|
print(f"[MIRROR SETUP:REMOTE] dir: {ctx.repo_dir}")
|
||||||
print("------------------------------------------------------------")
|
print("------------------------------------------------------------")
|
||||||
|
|
||||||
if ensure_remote:
|
git_mirrors = {
|
||||||
ensure_remote_repository(
|
k: v for k, v in ctx.resolved_mirrors.items() if _is_git_remote_url(v)
|
||||||
repo,
|
}
|
||||||
repositories_base_dir,
|
|
||||||
all_repos,
|
|
||||||
preview,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Probe only git URLs (do not try ls-remote against PyPI etc.)
|
def _desired_private_default() -> bool:
|
||||||
# If there are no mirrors at all, probe the primary git URL.
|
# default behavior: repo['private'] (or True)
|
||||||
git_mirrors = {k: v for k, v in ctx.resolved_mirrors.items() if _is_git_remote_url(v)}
|
if ensure_visibility == "public":
|
||||||
|
return False
|
||||||
|
if ensure_visibility == "private":
|
||||||
|
return True
|
||||||
|
return bool(repo.get("private", True))
|
||||||
|
|
||||||
|
def _should_enforce_visibility() -> bool:
|
||||||
|
return ensure_visibility in ("public", "private")
|
||||||
|
|
||||||
|
def _visibility_private_value() -> bool:
|
||||||
|
return ensure_visibility == "private"
|
||||||
|
|
||||||
|
description = str(repo.get("description", ""))
|
||||||
|
|
||||||
|
# If there are no git mirrors, fall back to primary (git) URL.
|
||||||
if not git_mirrors:
|
if not git_mirrors:
|
||||||
primary = determine_primary_remote_url(repo, ctx)
|
primary = determine_primary_remote_url(repo, ctx)
|
||||||
if not primary or not _is_git_remote_url(primary):
|
if not primary or not _is_git_remote_url(primary):
|
||||||
print("[INFO] No git mirrors to probe.")
|
print("[INFO] No git mirrors to probe or provision.")
|
||||||
print()
|
print()
|
||||||
return
|
return
|
||||||
|
|
||||||
ok = probe_remote_reachable(primary, cwd=ctx.repo_dir)
|
if ensure_remote:
|
||||||
print("[OK]" if ok else "[WARN]", primary)
|
print(f"[REMOTE ENSURE] ensuring primary: {primary}")
|
||||||
|
ensure_remote_repository_for_url(
|
||||||
|
url=primary,
|
||||||
|
private_default=_desired_private_default(),
|
||||||
|
description=description,
|
||||||
|
preview=preview,
|
||||||
|
)
|
||||||
|
# IMPORTANT: enforce visibility only if requested
|
||||||
|
if _should_enforce_visibility():
|
||||||
|
_apply_visibility_for_url(
|
||||||
|
url=primary,
|
||||||
|
private=_visibility_private_value(),
|
||||||
|
description=description,
|
||||||
|
preview=preview,
|
||||||
|
)
|
||||||
|
print()
|
||||||
|
|
||||||
|
_print_probe_result(None, primary, cwd=ctx.repo_dir)
|
||||||
print()
|
print()
|
||||||
return
|
return
|
||||||
|
|
||||||
|
# Provision ALL git mirrors (if requested)
|
||||||
|
if ensure_remote:
|
||||||
for name, url in git_mirrors.items():
|
for name, url in git_mirrors.items():
|
||||||
ok = probe_remote_reachable(url, cwd=ctx.repo_dir)
|
print(f"[REMOTE ENSURE] ensuring mirror {name!r}: {url}")
|
||||||
print(f"[OK] {name}: {url}" if ok else f"[WARN] {name}: {url}")
|
ensure_remote_repository_for_url(
|
||||||
|
url=url,
|
||||||
|
private_default=_desired_private_default(),
|
||||||
|
description=description,
|
||||||
|
preview=preview,
|
||||||
|
)
|
||||||
|
if _should_enforce_visibility():
|
||||||
|
_apply_visibility_for_url(
|
||||||
|
url=url,
|
||||||
|
private=_visibility_private_value(),
|
||||||
|
description=description,
|
||||||
|
preview=preview,
|
||||||
|
)
|
||||||
|
print()
|
||||||
|
|
||||||
|
# Probe ALL git mirrors
|
||||||
|
for name, url in git_mirrors.items():
|
||||||
|
_print_probe_result(name, url, cwd=ctx.repo_dir)
|
||||||
|
|
||||||
print()
|
print()
|
||||||
|
|
||||||
@@ -95,6 +202,7 @@ def setup_mirrors(
|
|||||||
local: bool = True,
|
local: bool = True,
|
||||||
remote: bool = True,
|
remote: bool = True,
|
||||||
ensure_remote: bool = False,
|
ensure_remote: bool = False,
|
||||||
|
ensure_visibility: str | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
for repo in selected_repos:
|
for repo in selected_repos:
|
||||||
if local:
|
if local:
|
||||||
@@ -112,4 +220,5 @@ def setup_mirrors(
|
|||||||
all_repos,
|
all_repos,
|
||||||
preview,
|
preview,
|
||||||
ensure_remote,
|
ensure_remote,
|
||||||
|
ensure_visibility,
|
||||||
)
|
)
|
||||||
|
|||||||
134
src/pkgmgr/actions/mirror/visibility_cmd.py
Normal file
134
src/pkgmgr/actions/mirror/visibility_cmd.py
Normal file
@@ -0,0 +1,134 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
from pkgmgr.core.remote_provisioning import ProviderHint, RepoSpec, set_repo_visibility
|
||||||
|
from pkgmgr.core.remote_provisioning.visibility import VisibilityOptions
|
||||||
|
|
||||||
|
from .context import build_context
|
||||||
|
from .git_remote import determine_primary_remote_url
|
||||||
|
from .types import Repository
|
||||||
|
from .url_utils import normalize_provider_host, parse_repo_from_git_url
|
||||||
|
|
||||||
|
|
||||||
|
def _is_git_remote_url(url: str) -> bool:
|
||||||
|
# Keep same semantics as setup_cmd.py / git_remote.py
|
||||||
|
u = (url or "").strip()
|
||||||
|
if not u:
|
||||||
|
return False
|
||||||
|
if u.startswith("git@"):
|
||||||
|
return True
|
||||||
|
if u.startswith("ssh://"):
|
||||||
|
return True
|
||||||
|
if (u.startswith("https://") or u.startswith("http://")) and u.endswith(".git"):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _provider_hint_from_host(host: str) -> str | None:
|
||||||
|
h = (host or "").lower()
|
||||||
|
if h == "github.com":
|
||||||
|
return "github"
|
||||||
|
# Best-effort default for self-hosted git domains
|
||||||
|
return "gitea" if h else None
|
||||||
|
|
||||||
|
|
||||||
|
def _apply_visibility_for_url(
|
||||||
|
*,
|
||||||
|
url: str,
|
||||||
|
private: bool,
|
||||||
|
description: str,
|
||||||
|
preview: bool,
|
||||||
|
) -> None:
|
||||||
|
host_raw, owner, name = parse_repo_from_git_url(url)
|
||||||
|
host = normalize_provider_host(host_raw)
|
||||||
|
|
||||||
|
if not host or not owner or not name:
|
||||||
|
print(f"[WARN] Could not parse repo from URL: {url}")
|
||||||
|
return
|
||||||
|
|
||||||
|
spec = RepoSpec(
|
||||||
|
host=host,
|
||||||
|
owner=owner,
|
||||||
|
name=name,
|
||||||
|
private=private,
|
||||||
|
description=description,
|
||||||
|
)
|
||||||
|
|
||||||
|
provider_kind = _provider_hint_from_host(host)
|
||||||
|
res = set_repo_visibility(
|
||||||
|
spec,
|
||||||
|
private=private,
|
||||||
|
provider_hint=ProviderHint(kind=provider_kind),
|
||||||
|
options=VisibilityOptions(preview=preview),
|
||||||
|
)
|
||||||
|
print(f"[REMOTE VISIBILITY] {res.status.upper()}: {res.message}")
|
||||||
|
|
||||||
|
|
||||||
|
def set_mirror_visibility(
|
||||||
|
selected_repos: List[Repository],
|
||||||
|
repositories_base_dir: str,
|
||||||
|
all_repos: List[Repository],
|
||||||
|
*,
|
||||||
|
visibility: str,
|
||||||
|
preview: bool = False,
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Set remote repository visibility for all git mirrors of each selected repo.
|
||||||
|
|
||||||
|
visibility:
|
||||||
|
- "private"
|
||||||
|
- "public"
|
||||||
|
"""
|
||||||
|
v = (visibility or "").strip().lower()
|
||||||
|
if v not in ("private", "public"):
|
||||||
|
raise ValueError("visibility must be 'private' or 'public'")
|
||||||
|
|
||||||
|
desired_private = v == "private"
|
||||||
|
|
||||||
|
for repo in selected_repos:
|
||||||
|
ctx = build_context(repo, repositories_base_dir, all_repos)
|
||||||
|
|
||||||
|
print("------------------------------------------------------------")
|
||||||
|
print(f"[MIRROR VISIBILITY] {ctx.identifier}")
|
||||||
|
print(f"[MIRROR VISIBILITY] dir: {ctx.repo_dir}")
|
||||||
|
print(f"[MIRROR VISIBILITY] target: {v}")
|
||||||
|
print("------------------------------------------------------------")
|
||||||
|
|
||||||
|
git_mirrors = {
|
||||||
|
name: url
|
||||||
|
for name, url in ctx.resolved_mirrors.items()
|
||||||
|
if url and _is_git_remote_url(url)
|
||||||
|
}
|
||||||
|
|
||||||
|
# If there are no git mirrors, fall back to primary (git) URL.
|
||||||
|
if not git_mirrors:
|
||||||
|
primary = determine_primary_remote_url(repo, ctx)
|
||||||
|
if not primary or not _is_git_remote_url(primary):
|
||||||
|
print(
|
||||||
|
"[INFO] No git mirrors found (and no primary git URL). Nothing to do."
|
||||||
|
)
|
||||||
|
print()
|
||||||
|
continue
|
||||||
|
|
||||||
|
print(f"[MIRROR VISIBILITY] applying to primary: {primary}")
|
||||||
|
_apply_visibility_for_url(
|
||||||
|
url=primary,
|
||||||
|
private=desired_private,
|
||||||
|
description=str(repo.get("description", "")),
|
||||||
|
preview=preview,
|
||||||
|
)
|
||||||
|
print()
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Apply to ALL git mirrors
|
||||||
|
for name, url in git_mirrors.items():
|
||||||
|
print(f"[MIRROR VISIBILITY] applying to mirror {name!r}: {url}")
|
||||||
|
_apply_visibility_for_url(
|
||||||
|
url=url,
|
||||||
|
private=desired_private,
|
||||||
|
description=str(repo.get("description", "")),
|
||||||
|
preview=preview,
|
||||||
|
)
|
||||||
|
|
||||||
|
print()
|
||||||
@@ -4,7 +4,16 @@ from pkgmgr.core.repository.dir import get_repo_dir
|
|||||||
from pkgmgr.core.command.run import run_command
|
from pkgmgr.core.command.run import run_command
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
def exec_proxy_command(proxy_prefix: str, selected_repos, repositories_base_dir, all_repos, proxy_command: str, extra_args, preview: bool):
|
|
||||||
|
def exec_proxy_command(
|
||||||
|
proxy_prefix: str,
|
||||||
|
selected_repos,
|
||||||
|
repositories_base_dir,
|
||||||
|
all_repos,
|
||||||
|
proxy_command: str,
|
||||||
|
extra_args,
|
||||||
|
preview: bool,
|
||||||
|
):
|
||||||
"""Execute a given proxy command with extra arguments for each repository."""
|
"""Execute a given proxy command with extra arguments for each repository."""
|
||||||
error_repos = []
|
error_repos = []
|
||||||
max_exit_code = 0
|
max_exit_code = 0
|
||||||
@@ -22,7 +31,9 @@ def exec_proxy_command(proxy_prefix: str, selected_repos, repositories_base_dir,
|
|||||||
try:
|
try:
|
||||||
run_command(full_cmd, cwd=repo_dir, preview=preview)
|
run_command(full_cmd, cwd=repo_dir, preview=preview)
|
||||||
except SystemExit as e:
|
except SystemExit as e:
|
||||||
print(f"[ERROR] Command failed in {repo_identifier} with exit code {e.code}.")
|
print(
|
||||||
|
f"[ERROR] Command failed in {repo_identifier} with exit code {e.code}."
|
||||||
|
)
|
||||||
error_repos.append((repo_identifier, e.code))
|
error_repos.append((repo_identifier, e.code))
|
||||||
max_exit_code = max(max_exit_code, e.code)
|
max_exit_code = max(max_exit_code, e.code)
|
||||||
|
|
||||||
|
|||||||
@@ -1,519 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
"""
|
|
||||||
File and metadata update helpers for the release workflow.
|
|
||||||
|
|
||||||
Responsibilities:
|
|
||||||
- Update pyproject.toml with the new version.
|
|
||||||
- Update flake.nix, PKGBUILD, RPM spec files where present.
|
|
||||||
- Prepend release entries to CHANGELOG.md.
|
|
||||||
- Maintain distribution-specific changelog files:
|
|
||||||
* debian/changelog
|
|
||||||
* RPM spec %changelog section
|
|
||||||
including maintainer metadata where applicable.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
import tempfile
|
|
||||||
from datetime import date, datetime
|
|
||||||
from typing import Optional, Tuple
|
|
||||||
|
|
||||||
from pkgmgr.core.git.queries import get_config_value
|
|
||||||
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
# Editor helper for interactive changelog messages
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
|
|
||||||
|
|
||||||
def _open_editor_for_changelog(initial_message: Optional[str] = None) -> str:
|
|
||||||
"""
|
|
||||||
Open $EDITOR (fallback 'nano') so the user can enter a changelog message.
|
|
||||||
|
|
||||||
The temporary file is pre-filled with commented instructions and an
|
|
||||||
optional initial_message. Lines starting with '#' are ignored when the
|
|
||||||
message is read back.
|
|
||||||
|
|
||||||
Returns the final message (may be empty string if user leaves it blank).
|
|
||||||
"""
|
|
||||||
editor = os.environ.get("EDITOR", "nano")
|
|
||||||
|
|
||||||
with tempfile.NamedTemporaryFile(
|
|
||||||
mode="w+",
|
|
||||||
delete=False,
|
|
||||||
encoding="utf-8",
|
|
||||||
) as tmp:
|
|
||||||
tmp_path = tmp.name
|
|
||||||
tmp.write(
|
|
||||||
"# Write the changelog entry for this release.\n"
|
|
||||||
"# Lines starting with '#' will be ignored.\n"
|
|
||||||
"# Empty result will fall back to a generic message.\n\n"
|
|
||||||
)
|
|
||||||
if initial_message:
|
|
||||||
tmp.write(initial_message.strip() + "\n")
|
|
||||||
tmp.flush()
|
|
||||||
|
|
||||||
try:
|
|
||||||
subprocess.call([editor, tmp_path])
|
|
||||||
except FileNotFoundError:
|
|
||||||
print(
|
|
||||||
f"[WARN] Editor {editor!r} not found; proceeding without "
|
|
||||||
"interactive changelog message."
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open(tmp_path, "r", encoding="utf-8") as f:
|
|
||||||
content = f.read()
|
|
||||||
finally:
|
|
||||||
try:
|
|
||||||
os.remove(tmp_path)
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
lines = [line for line in content.splitlines() if not line.strip().startswith("#")]
|
|
||||||
return "\n".join(lines).strip()
|
|
||||||
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
# File update helpers (pyproject + extra packaging + changelog)
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
|
|
||||||
|
|
||||||
def update_pyproject_version(
|
|
||||||
pyproject_path: str,
|
|
||||||
new_version: str,
|
|
||||||
preview: bool = False,
|
|
||||||
) -> None:
|
|
||||||
"""
|
|
||||||
Update the version in pyproject.toml with the new version.
|
|
||||||
|
|
||||||
The function looks for a line matching:
|
|
||||||
|
|
||||||
version = "X.Y.Z"
|
|
||||||
|
|
||||||
and replaces the version part with the given new_version string.
|
|
||||||
|
|
||||||
If the file does not exist, it is skipped without failing the release.
|
|
||||||
"""
|
|
||||||
if not os.path.exists(pyproject_path):
|
|
||||||
print(
|
|
||||||
f"[INFO] pyproject.toml not found at: {pyproject_path}, "
|
|
||||||
"skipping version update."
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open(pyproject_path, "r", encoding="utf-8") as f:
|
|
||||||
content = f.read()
|
|
||||||
except OSError as exc:
|
|
||||||
print(
|
|
||||||
f"[WARN] Could not read pyproject.toml at {pyproject_path}: {exc}. "
|
|
||||||
"Skipping version update."
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
pattern = r'^(version\s*=\s*")([^"]+)(")'
|
|
||||||
new_content, count = re.subn(
|
|
||||||
pattern,
|
|
||||||
lambda m: f'{m.group(1)}{new_version}{m.group(3)}',
|
|
||||||
content,
|
|
||||||
flags=re.MULTILINE,
|
|
||||||
)
|
|
||||||
|
|
||||||
if count == 0:
|
|
||||||
print("[ERROR] Could not find version line in pyproject.toml")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
if preview:
|
|
||||||
print(f"[PREVIEW] Would update pyproject.toml version to {new_version}")
|
|
||||||
return
|
|
||||||
|
|
||||||
with open(pyproject_path, "w", encoding="utf-8") as f:
|
|
||||||
f.write(new_content)
|
|
||||||
|
|
||||||
print(f"Updated pyproject.toml version to {new_version}")
|
|
||||||
|
|
||||||
|
|
||||||
def update_flake_version(
|
|
||||||
flake_path: str,
|
|
||||||
new_version: str,
|
|
||||||
preview: bool = False,
|
|
||||||
) -> None:
|
|
||||||
"""
|
|
||||||
Update the version in flake.nix, if present.
|
|
||||||
"""
|
|
||||||
if not os.path.exists(flake_path):
|
|
||||||
print("[INFO] flake.nix not found, skipping.")
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open(flake_path, "r", encoding="utf-8") as f:
|
|
||||||
content = f.read()
|
|
||||||
except Exception as exc:
|
|
||||||
print(f"[WARN] Could not read flake.nix: {exc}")
|
|
||||||
return
|
|
||||||
|
|
||||||
pattern = r'(version\s*=\s*")([^"]+)(")'
|
|
||||||
new_content, count = re.subn(
|
|
||||||
pattern,
|
|
||||||
lambda m: f'{m.group(1)}{new_version}{m.group(3)}',
|
|
||||||
content,
|
|
||||||
)
|
|
||||||
|
|
||||||
if count == 0:
|
|
||||||
print("[WARN] No version assignment found in flake.nix, skipping.")
|
|
||||||
return
|
|
||||||
|
|
||||||
if preview:
|
|
||||||
print(f"[PREVIEW] Would update flake.nix version to {new_version}")
|
|
||||||
return
|
|
||||||
|
|
||||||
with open(flake_path, "w", encoding="utf-8") as f:
|
|
||||||
f.write(new_content)
|
|
||||||
|
|
||||||
print(f"Updated flake.nix version to {new_version}")
|
|
||||||
|
|
||||||
|
|
||||||
def update_pkgbuild_version(
|
|
||||||
pkgbuild_path: str,
|
|
||||||
new_version: str,
|
|
||||||
preview: bool = False,
|
|
||||||
) -> None:
|
|
||||||
"""
|
|
||||||
Update the version in PKGBUILD, if present.
|
|
||||||
|
|
||||||
Expects:
|
|
||||||
pkgver=1.2.3
|
|
||||||
pkgrel=1
|
|
||||||
"""
|
|
||||||
if not os.path.exists(pkgbuild_path):
|
|
||||||
print("[INFO] PKGBUILD not found, skipping.")
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open(pkgbuild_path, "r", encoding="utf-8") as f:
|
|
||||||
content = f.read()
|
|
||||||
except Exception as exc:
|
|
||||||
print(f"[WARN] Could not read PKGBUILD: {exc}")
|
|
||||||
return
|
|
||||||
|
|
||||||
ver_pattern = r"^(pkgver\s*=\s*)(.+)$"
|
|
||||||
new_content, ver_count = re.subn(
|
|
||||||
ver_pattern,
|
|
||||||
lambda m: f"{m.group(1)}{new_version}",
|
|
||||||
content,
|
|
||||||
flags=re.MULTILINE,
|
|
||||||
)
|
|
||||||
|
|
||||||
if ver_count == 0:
|
|
||||||
print("[WARN] No pkgver line found in PKGBUILD.")
|
|
||||||
new_content = content
|
|
||||||
|
|
||||||
rel_pattern = r"^(pkgrel\s*=\s*)(.+)$"
|
|
||||||
new_content, rel_count = re.subn(
|
|
||||||
rel_pattern,
|
|
||||||
lambda m: f"{m.group(1)}1",
|
|
||||||
new_content,
|
|
||||||
flags=re.MULTILINE,
|
|
||||||
)
|
|
||||||
|
|
||||||
if rel_count == 0:
|
|
||||||
print("[WARN] No pkgrel line found in PKGBUILD.")
|
|
||||||
|
|
||||||
if preview:
|
|
||||||
print(f"[PREVIEW] Would update PKGBUILD to pkgver={new_version}, pkgrel=1")
|
|
||||||
return
|
|
||||||
|
|
||||||
with open(pkgbuild_path, "w", encoding="utf-8") as f:
|
|
||||||
f.write(new_content)
|
|
||||||
|
|
||||||
print(f"Updated PKGBUILD to pkgver={new_version}, pkgrel=1")
|
|
||||||
|
|
||||||
|
|
||||||
def update_spec_version(
|
|
||||||
spec_path: str,
|
|
||||||
new_version: str,
|
|
||||||
preview: bool = False,
|
|
||||||
) -> None:
|
|
||||||
"""
|
|
||||||
Update the version in an RPM spec file, if present.
|
|
||||||
"""
|
|
||||||
if not os.path.exists(spec_path):
|
|
||||||
print("[INFO] RPM spec file not found, skipping.")
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open(spec_path, "r", encoding="utf-8") as f:
|
|
||||||
content = f.read()
|
|
||||||
except Exception as exc:
|
|
||||||
print(f"[WARN] Could not read spec file: {exc}")
|
|
||||||
return
|
|
||||||
|
|
||||||
ver_pattern = r"^(Version:\s*)(.+)$"
|
|
||||||
new_content, ver_count = re.subn(
|
|
||||||
ver_pattern,
|
|
||||||
lambda m: f"{m.group(1)}{new_version}",
|
|
||||||
content,
|
|
||||||
flags=re.MULTILINE,
|
|
||||||
)
|
|
||||||
|
|
||||||
if ver_count == 0:
|
|
||||||
print("[WARN] No 'Version:' line found in spec file.")
|
|
||||||
|
|
||||||
rel_pattern = r"^(Release:\s*)(.+)$"
|
|
||||||
|
|
||||||
def _release_repl(m: re.Match[str]) -> str: # type: ignore[name-defined]
|
|
||||||
rest = m.group(2).strip()
|
|
||||||
match = re.match(r"^(\d+)(.*)$", rest)
|
|
||||||
if match:
|
|
||||||
suffix = match.group(2)
|
|
||||||
else:
|
|
||||||
suffix = ""
|
|
||||||
return f"{m.group(1)}1{suffix}"
|
|
||||||
|
|
||||||
new_content, rel_count = re.subn(
|
|
||||||
rel_pattern,
|
|
||||||
_release_repl,
|
|
||||||
new_content,
|
|
||||||
flags=re.MULTILINE,
|
|
||||||
)
|
|
||||||
|
|
||||||
if rel_count == 0:
|
|
||||||
print("[WARN] No 'Release:' line found in spec file.")
|
|
||||||
|
|
||||||
if preview:
|
|
||||||
print(
|
|
||||||
"[PREVIEW] Would update spec file "
|
|
||||||
f"{os.path.basename(spec_path)} to Version: {new_version}, Release: 1..."
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
with open(spec_path, "w", encoding="utf-8") as f:
|
|
||||||
f.write(new_content)
|
|
||||||
|
|
||||||
print(
|
|
||||||
f"Updated spec file {os.path.basename(spec_path)} "
|
|
||||||
f"to Version: {new_version}, Release: 1..."
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def update_changelog(
|
|
||||||
changelog_path: str,
|
|
||||||
new_version: str,
|
|
||||||
message: Optional[str] = None,
|
|
||||||
preview: bool = False,
|
|
||||||
) -> str:
|
|
||||||
"""
|
|
||||||
Prepend a new release section to CHANGELOG.md with the new version,
|
|
||||||
current date, and a message.
|
|
||||||
"""
|
|
||||||
today = date.today().isoformat()
|
|
||||||
|
|
||||||
if message is None:
|
|
||||||
if preview:
|
|
||||||
message = "Automated release."
|
|
||||||
else:
|
|
||||||
print(
|
|
||||||
"\n[INFO] No release message provided, opening editor for "
|
|
||||||
"changelog entry...\n"
|
|
||||||
)
|
|
||||||
editor_message = _open_editor_for_changelog()
|
|
||||||
if not editor_message:
|
|
||||||
message = "Automated release."
|
|
||||||
else:
|
|
||||||
message = editor_message
|
|
||||||
|
|
||||||
header = f"## [{new_version}] - {today}\n"
|
|
||||||
header += f"\n* {message}\n\n"
|
|
||||||
|
|
||||||
if os.path.exists(changelog_path):
|
|
||||||
try:
|
|
||||||
with open(changelog_path, "r", encoding="utf-8") as f:
|
|
||||||
changelog = f.read()
|
|
||||||
except Exception as exc:
|
|
||||||
print(f"[WARN] Could not read existing CHANGELOG.md: {exc}")
|
|
||||||
changelog = ""
|
|
||||||
else:
|
|
||||||
changelog = ""
|
|
||||||
|
|
||||||
new_changelog = header + "\n" + changelog if changelog else header
|
|
||||||
|
|
||||||
print("\n================ CHANGELOG ENTRY ================")
|
|
||||||
print(header.rstrip())
|
|
||||||
print("=================================================\n")
|
|
||||||
|
|
||||||
if preview:
|
|
||||||
print(f"[PREVIEW] Would prepend new entry for {new_version} to CHANGELOG.md")
|
|
||||||
return message
|
|
||||||
|
|
||||||
with open(changelog_path, "w", encoding="utf-8") as f:
|
|
||||||
f.write(new_changelog)
|
|
||||||
|
|
||||||
print(f"Updated CHANGELOG.md with version {new_version}")
|
|
||||||
|
|
||||||
return message
|
|
||||||
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
# Debian changelog helpers (with Git config fallback for maintainer)
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
|
|
||||||
|
|
||||||
def _get_debian_author() -> Tuple[str, str]:
|
|
||||||
"""
|
|
||||||
Determine the maintainer name/email for debian/changelog entries.
|
|
||||||
"""
|
|
||||||
name = os.environ.get("DEBFULLNAME")
|
|
||||||
email = os.environ.get("DEBEMAIL")
|
|
||||||
|
|
||||||
if not name:
|
|
||||||
name = os.environ.get("GIT_AUTHOR_NAME")
|
|
||||||
if not email:
|
|
||||||
email = os.environ.get("GIT_AUTHOR_EMAIL")
|
|
||||||
|
|
||||||
if not name:
|
|
||||||
name = get_config_value("user.name")
|
|
||||||
if not email:
|
|
||||||
email = get_config_value("user.email")
|
|
||||||
|
|
||||||
if not name:
|
|
||||||
name = "Unknown Maintainer"
|
|
||||||
if not email:
|
|
||||||
email = "unknown@example.com"
|
|
||||||
|
|
||||||
return name, email
|
|
||||||
|
|
||||||
|
|
||||||
def update_debian_changelog(
|
|
||||||
debian_changelog_path: str,
|
|
||||||
package_name: str,
|
|
||||||
new_version: str,
|
|
||||||
message: Optional[str] = None,
|
|
||||||
preview: bool = False,
|
|
||||||
) -> None:
|
|
||||||
"""
|
|
||||||
Prepend a new entry to debian/changelog, if it exists.
|
|
||||||
"""
|
|
||||||
if not os.path.exists(debian_changelog_path):
|
|
||||||
print("[INFO] debian/changelog not found, skipping.")
|
|
||||||
return
|
|
||||||
|
|
||||||
debian_version = f"{new_version}-1"
|
|
||||||
now = datetime.now().astimezone()
|
|
||||||
date_str = now.strftime("%a, %d %b %Y %H:%M:%S %z")
|
|
||||||
|
|
||||||
author_name, author_email = _get_debian_author()
|
|
||||||
|
|
||||||
first_line = f"{package_name} ({debian_version}) unstable; urgency=medium"
|
|
||||||
body_line = message.strip() if message else f"Automated release {new_version}."
|
|
||||||
stanza = (
|
|
||||||
f"{first_line}\n\n"
|
|
||||||
f" * {body_line}\n\n"
|
|
||||||
f" -- {author_name} <{author_email}> {date_str}\n\n"
|
|
||||||
)
|
|
||||||
|
|
||||||
if preview:
|
|
||||||
print(
|
|
||||||
"[PREVIEW] Would prepend the following stanza to debian/changelog:\n"
|
|
||||||
f"{stanza}"
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open(debian_changelog_path, "r", encoding="utf-8") as f:
|
|
||||||
existing = f.read()
|
|
||||||
except Exception as exc:
|
|
||||||
print(f"[WARN] Could not read debian/changelog: {exc}")
|
|
||||||
existing = ""
|
|
||||||
|
|
||||||
new_content = stanza + existing
|
|
||||||
|
|
||||||
with open(debian_changelog_path, "w", encoding="utf-8") as f:
|
|
||||||
f.write(new_content)
|
|
||||||
|
|
||||||
print(f"Updated debian/changelog with version {debian_version}")
|
|
||||||
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
# Fedora / RPM spec %changelog helper
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
|
|
||||||
|
|
||||||
def update_spec_changelog(
|
|
||||||
spec_path: str,
|
|
||||||
package_name: str,
|
|
||||||
new_version: str,
|
|
||||||
message: Optional[str] = None,
|
|
||||||
preview: bool = False,
|
|
||||||
) -> None:
|
|
||||||
"""
|
|
||||||
Prepend a new entry to the %changelog section of an RPM spec file,
|
|
||||||
if present.
|
|
||||||
|
|
||||||
Typical RPM-style entry:
|
|
||||||
|
|
||||||
* Tue Dec 09 2025 John Doe <john@example.com> - 0.5.1-1
|
|
||||||
- Your changelog message
|
|
||||||
"""
|
|
||||||
if not os.path.exists(spec_path):
|
|
||||||
print("[INFO] RPM spec file not found, skipping spec changelog update.")
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open(spec_path, "r", encoding="utf-8") as f:
|
|
||||||
content = f.read()
|
|
||||||
except Exception as exc:
|
|
||||||
print(f"[WARN] Could not read spec file for changelog update: {exc}")
|
|
||||||
return
|
|
||||||
|
|
||||||
debian_version = f"{new_version}-1"
|
|
||||||
now = datetime.now().astimezone()
|
|
||||||
date_str = now.strftime("%a %b %d %Y")
|
|
||||||
|
|
||||||
# Reuse Debian maintainer discovery for author name/email.
|
|
||||||
author_name, author_email = _get_debian_author()
|
|
||||||
|
|
||||||
body_line = message.strip() if message else f"Automated release {new_version}."
|
|
||||||
|
|
||||||
stanza = (
|
|
||||||
f"* {date_str} {author_name} <{author_email}> - {debian_version}\n"
|
|
||||||
f"- {body_line}\n\n"
|
|
||||||
)
|
|
||||||
|
|
||||||
marker = "%changelog"
|
|
||||||
idx = content.find(marker)
|
|
||||||
|
|
||||||
if idx == -1:
|
|
||||||
# No %changelog section yet: append one at the end.
|
|
||||||
new_content = content.rstrip() + "\n\n%changelog\n" + stanza
|
|
||||||
else:
|
|
||||||
# Insert stanza right after the %changelog line.
|
|
||||||
before = content[: idx + len(marker)]
|
|
||||||
after = content[idx + len(marker) :]
|
|
||||||
new_content = before + "\n" + stanza + after.lstrip("\n")
|
|
||||||
|
|
||||||
if preview:
|
|
||||||
print(
|
|
||||||
"[PREVIEW] Would update RPM %changelog section with the following "
|
|
||||||
"stanza:\n"
|
|
||||||
f"{stanza}"
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open(spec_path, "w", encoding="utf-8") as f:
|
|
||||||
f.write(new_content)
|
|
||||||
except Exception as exc:
|
|
||||||
print(f"[WARN] Failed to write updated spec changelog section: {exc}")
|
|
||||||
return
|
|
||||||
|
|
||||||
print(
|
|
||||||
f"Updated RPM %changelog section in {os.path.basename(spec_path)} "
|
|
||||||
f"for {package_name} {debian_version}"
|
|
||||||
)
|
|
||||||
35
src/pkgmgr/actions/release/files/__init__.py
Normal file
35
src/pkgmgr/actions/release/files/__init__.py
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
"""
|
||||||
|
Backwards-compatible facade for the release file update helpers.
|
||||||
|
|
||||||
|
Implementations live in this package:
|
||||||
|
pkgmgr.actions.release.files.*
|
||||||
|
|
||||||
|
Keep this package stable so existing imports continue to work, e.g.:
|
||||||
|
from pkgmgr.actions.release.files import update_pyproject_version
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from .editor import _open_editor_for_changelog
|
||||||
|
from .pyproject import update_pyproject_version
|
||||||
|
from .flake import update_flake_version
|
||||||
|
from .pkgbuild import update_pkgbuild_version
|
||||||
|
from .rpm_spec import update_spec_version
|
||||||
|
from .changelog_md import update_changelog
|
||||||
|
from .debian import _get_debian_author, update_debian_changelog
|
||||||
|
from .rpm_changelog import update_spec_changelog
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"_open_editor_for_changelog",
|
||||||
|
"update_pyproject_version",
|
||||||
|
"update_flake_version",
|
||||||
|
"update_pkgbuild_version",
|
||||||
|
"update_spec_version",
|
||||||
|
"update_changelog",
|
||||||
|
"_get_debian_author",
|
||||||
|
"update_debian_changelog",
|
||||||
|
"update_spec_changelog",
|
||||||
|
]
|
||||||
62
src/pkgmgr/actions/release/files/changelog_md.py
Normal file
62
src/pkgmgr/actions/release/files/changelog_md.py
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
from datetime import date
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from .editor import _open_editor_for_changelog
|
||||||
|
|
||||||
|
|
||||||
|
def update_changelog(
|
||||||
|
changelog_path: str,
|
||||||
|
new_version: str,
|
||||||
|
message: Optional[str] = None,
|
||||||
|
preview: bool = False,
|
||||||
|
) -> str:
|
||||||
|
"""
|
||||||
|
Prepend a new release section to CHANGELOG.md with the new version,
|
||||||
|
current date, and a message.
|
||||||
|
"""
|
||||||
|
today = date.today().isoformat()
|
||||||
|
|
||||||
|
if message is None:
|
||||||
|
if preview:
|
||||||
|
message = "Automated release."
|
||||||
|
else:
|
||||||
|
print(
|
||||||
|
"\n[INFO] No release message provided, opening editor for changelog entry...\n"
|
||||||
|
)
|
||||||
|
editor_message = _open_editor_for_changelog()
|
||||||
|
if not editor_message:
|
||||||
|
message = "Automated release."
|
||||||
|
else:
|
||||||
|
message = editor_message
|
||||||
|
|
||||||
|
header = f"## [{new_version}] - {today}\n"
|
||||||
|
header += f"\n* {message}\n\n"
|
||||||
|
|
||||||
|
if os.path.exists(changelog_path):
|
||||||
|
try:
|
||||||
|
with open(changelog_path, "r", encoding="utf-8") as f:
|
||||||
|
changelog = f.read()
|
||||||
|
except Exception as exc:
|
||||||
|
print(f"[WARN] Could not read existing CHANGELOG.md: {exc}")
|
||||||
|
changelog = ""
|
||||||
|
else:
|
||||||
|
changelog = ""
|
||||||
|
|
||||||
|
new_changelog = header + "\n" + changelog if changelog else header
|
||||||
|
|
||||||
|
print("\n================ CHANGELOG ENTRY ================")
|
||||||
|
print(header.rstrip())
|
||||||
|
print("=================================================\n")
|
||||||
|
|
||||||
|
if preview:
|
||||||
|
print(f"[PREVIEW] Would prepend new entry for {new_version} to CHANGELOG.md")
|
||||||
|
return message
|
||||||
|
|
||||||
|
with open(changelog_path, "w", encoding="utf-8") as f:
|
||||||
|
f.write(new_changelog)
|
||||||
|
|
||||||
|
print(f"Updated CHANGELOG.md with version {new_version}")
|
||||||
|
return message
|
||||||
74
src/pkgmgr/actions/release/files/debian.py
Normal file
74
src/pkgmgr/actions/release/files/debian.py
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional, Tuple
|
||||||
|
|
||||||
|
from pkgmgr.core.git.queries import get_config_value
|
||||||
|
|
||||||
|
|
||||||
|
def _get_debian_author() -> Tuple[str, str]:
|
||||||
|
name = os.environ.get("DEBFULLNAME")
|
||||||
|
email = os.environ.get("DEBEMAIL")
|
||||||
|
|
||||||
|
if not name:
|
||||||
|
name = os.environ.get("GIT_AUTHOR_NAME")
|
||||||
|
if not email:
|
||||||
|
email = os.environ.get("GIT_AUTHOR_EMAIL")
|
||||||
|
|
||||||
|
if not name:
|
||||||
|
name = get_config_value("user.name")
|
||||||
|
if not email:
|
||||||
|
email = get_config_value("user.email")
|
||||||
|
|
||||||
|
if not name:
|
||||||
|
name = "Unknown Maintainer"
|
||||||
|
if not email:
|
||||||
|
email = "unknown@example.com"
|
||||||
|
|
||||||
|
return name, email
|
||||||
|
|
||||||
|
|
||||||
|
def update_debian_changelog(
|
||||||
|
debian_changelog_path: str,
|
||||||
|
package_name: str,
|
||||||
|
new_version: str,
|
||||||
|
message: Optional[str] = None,
|
||||||
|
preview: bool = False,
|
||||||
|
) -> None:
|
||||||
|
if not os.path.exists(debian_changelog_path):
|
||||||
|
print("[INFO] debian/changelog not found, skipping.")
|
||||||
|
return
|
||||||
|
|
||||||
|
debian_version = f"{new_version}-1"
|
||||||
|
now = datetime.now().astimezone()
|
||||||
|
date_str = now.strftime("%a, %d %b %Y %H:%M:%S %z")
|
||||||
|
|
||||||
|
author_name, author_email = _get_debian_author()
|
||||||
|
|
||||||
|
first_line = f"{package_name} ({debian_version}) unstable; urgency=medium"
|
||||||
|
body_line = message.strip() if message else f"Automated release {new_version}."
|
||||||
|
stanza = (
|
||||||
|
f"{first_line}\n\n"
|
||||||
|
f" * {body_line}\n\n"
|
||||||
|
f" -- {author_name} <{author_email}> {date_str}\n\n"
|
||||||
|
)
|
||||||
|
|
||||||
|
if preview:
|
||||||
|
print(
|
||||||
|
"[PREVIEW] Would prepend the following stanza to debian/changelog:\n"
|
||||||
|
f"{stanza}"
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(debian_changelog_path, "r", encoding="utf-8") as f:
|
||||||
|
existing = f.read()
|
||||||
|
except Exception as exc:
|
||||||
|
print(f"[WARN] Could not read debian/changelog: {exc}")
|
||||||
|
existing = ""
|
||||||
|
|
||||||
|
with open(debian_changelog_path, "w", encoding="utf-8") as f:
|
||||||
|
f.write(stanza + existing)
|
||||||
|
|
||||||
|
print(f"Updated debian/changelog with version {debian_version}")
|
||||||
45
src/pkgmgr/actions/release/files/editor.py
Normal file
45
src/pkgmgr/actions/release/files/editor.py
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
import tempfile
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
|
||||||
|
def _open_editor_for_changelog(initial_message: Optional[str] = None) -> str:
|
||||||
|
editor = os.environ.get("EDITOR", "nano")
|
||||||
|
|
||||||
|
with tempfile.NamedTemporaryFile(
|
||||||
|
mode="w+",
|
||||||
|
delete=False,
|
||||||
|
encoding="utf-8",
|
||||||
|
) as tmp:
|
||||||
|
tmp_path = tmp.name
|
||||||
|
tmp.write(
|
||||||
|
"# Write the changelog entry for this release.\n"
|
||||||
|
"# Lines starting with '#' will be ignored.\n"
|
||||||
|
"# Empty result will fall back to a generic message.\n\n"
|
||||||
|
)
|
||||||
|
if initial_message:
|
||||||
|
tmp.write(initial_message.strip() + "\n")
|
||||||
|
tmp.flush()
|
||||||
|
|
||||||
|
try:
|
||||||
|
subprocess.call([editor, tmp_path])
|
||||||
|
except FileNotFoundError:
|
||||||
|
print(
|
||||||
|
f"[WARN] Editor {editor!r} not found; proceeding without "
|
||||||
|
"interactive changelog message."
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(tmp_path, "r", encoding="utf-8") as f:
|
||||||
|
content = f.read()
|
||||||
|
finally:
|
||||||
|
try:
|
||||||
|
os.remove(tmp_path)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
lines = [line for line in content.splitlines() if not line.strip().startswith("#")]
|
||||||
|
return "\n".join(lines).strip()
|
||||||
39
src/pkgmgr/actions/release/files/flake.py
Normal file
39
src/pkgmgr/actions/release/files/flake.py
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
|
||||||
|
|
||||||
|
def update_flake_version(
|
||||||
|
flake_path: str, new_version: str, preview: bool = False
|
||||||
|
) -> None:
|
||||||
|
if not os.path.exists(flake_path):
|
||||||
|
print("[INFO] flake.nix not found, skipping.")
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(flake_path, "r", encoding="utf-8") as f:
|
||||||
|
content = f.read()
|
||||||
|
except Exception as exc:
|
||||||
|
print(f"[WARN] Could not read flake.nix: {exc}")
|
||||||
|
return
|
||||||
|
|
||||||
|
pattern = r'(version\s*=\s*")([^"]+)(")'
|
||||||
|
new_content, count = re.subn(
|
||||||
|
pattern,
|
||||||
|
lambda m: f"{m.group(1)}{new_version}{m.group(3)}",
|
||||||
|
content,
|
||||||
|
)
|
||||||
|
|
||||||
|
if count == 0:
|
||||||
|
print("[WARN] No version found in flake.nix.")
|
||||||
|
return
|
||||||
|
|
||||||
|
if preview:
|
||||||
|
print(f"[PREVIEW] Would update flake.nix version to {new_version}")
|
||||||
|
return
|
||||||
|
|
||||||
|
with open(flake_path, "w", encoding="utf-8") as f:
|
||||||
|
f.write(new_content)
|
||||||
|
|
||||||
|
print(f"Updated flake.nix version to {new_version}")
|
||||||
41
src/pkgmgr/actions/release/files/pkgbuild.py
Normal file
41
src/pkgmgr/actions/release/files/pkgbuild.py
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
|
||||||
|
|
||||||
|
def update_pkgbuild_version(
|
||||||
|
pkgbuild_path: str, new_version: str, preview: bool = False
|
||||||
|
) -> None:
|
||||||
|
if not os.path.exists(pkgbuild_path):
|
||||||
|
print("[INFO] PKGBUILD not found, skipping.")
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(pkgbuild_path, "r", encoding="utf-8") as f:
|
||||||
|
content = f.read()
|
||||||
|
except Exception as exc:
|
||||||
|
print(f"[WARN] Could not read PKGBUILD: {exc}")
|
||||||
|
return
|
||||||
|
|
||||||
|
content, _ = re.subn(
|
||||||
|
r"^(pkgver\s*=\s*)(.+)$",
|
||||||
|
lambda m: f"{m.group(1)}{new_version}",
|
||||||
|
content,
|
||||||
|
flags=re.MULTILINE,
|
||||||
|
)
|
||||||
|
content, _ = re.subn(
|
||||||
|
r"^(pkgrel\s*=\s*)(.+)$",
|
||||||
|
lambda m: f"{m.group(1)}1",
|
||||||
|
content,
|
||||||
|
flags=re.MULTILINE,
|
||||||
|
)
|
||||||
|
|
||||||
|
if preview:
|
||||||
|
print(f"[PREVIEW] Would update PKGBUILD to pkgver={new_version}, pkgrel=1")
|
||||||
|
return
|
||||||
|
|
||||||
|
with open(pkgbuild_path, "w", encoding="utf-8") as f:
|
||||||
|
f.write(content)
|
||||||
|
|
||||||
|
print(f"Updated PKGBUILD to pkgver={new_version}, pkgrel=1")
|
||||||
45
src/pkgmgr/actions/release/files/pyproject.py
Normal file
45
src/pkgmgr/actions/release/files/pyproject.py
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
|
||||||
|
|
||||||
|
def update_pyproject_version(
|
||||||
|
pyproject_path: str, new_version: str, preview: bool = False
|
||||||
|
) -> None:
|
||||||
|
if not os.path.exists(pyproject_path):
|
||||||
|
print(f"[INFO] pyproject.toml not found at: {pyproject_path}, skipping.")
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(pyproject_path, "r", encoding="utf-8") as f:
|
||||||
|
content = f.read()
|
||||||
|
except OSError as exc:
|
||||||
|
print(f"[WARN] Could not read pyproject.toml: {exc}")
|
||||||
|
return
|
||||||
|
|
||||||
|
m = re.search(r"(?ms)^\s*\[project\]\s*$.*?(?=^\s*\[|\Z)", content)
|
||||||
|
if not m:
|
||||||
|
raise RuntimeError("Missing [project] section in pyproject.toml")
|
||||||
|
|
||||||
|
project_block = m.group(0)
|
||||||
|
ver_pat = r'(?m)^(\s*version\s*=\s*")([^"]+)(")\s*$'
|
||||||
|
|
||||||
|
new_block, count = re.subn(
|
||||||
|
ver_pat,
|
||||||
|
lambda mm: f"{mm.group(1)}{new_version}{mm.group(3)}",
|
||||||
|
project_block,
|
||||||
|
)
|
||||||
|
if count == 0:
|
||||||
|
raise RuntimeError("Missing version key in [project] section")
|
||||||
|
|
||||||
|
new_content = content[: m.start()] + new_block + content[m.end() :]
|
||||||
|
|
||||||
|
if preview:
|
||||||
|
print(f"[PREVIEW] Would update pyproject.toml version to {new_version}")
|
||||||
|
return
|
||||||
|
|
||||||
|
with open(pyproject_path, "w", encoding="utf-8") as f:
|
||||||
|
f.write(new_content)
|
||||||
|
|
||||||
|
print(f"Updated pyproject.toml version to {new_version}")
|
||||||
67
src/pkgmgr/actions/release/files/rpm_changelog.py
Normal file
67
src/pkgmgr/actions/release/files/rpm_changelog.py
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from .debian import _get_debian_author
|
||||||
|
|
||||||
|
|
||||||
|
def update_spec_changelog(
|
||||||
|
spec_path: str,
|
||||||
|
package_name: str,
|
||||||
|
new_version: str,
|
||||||
|
message: Optional[str] = None,
|
||||||
|
preview: bool = False,
|
||||||
|
) -> None:
|
||||||
|
if not os.path.exists(spec_path):
|
||||||
|
print("[INFO] RPM spec file not found, skipping spec changelog update.")
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(spec_path, "r", encoding="utf-8") as f:
|
||||||
|
content = f.read()
|
||||||
|
except Exception as exc:
|
||||||
|
print(f"[WARN] Could not read spec file for changelog update: {exc}")
|
||||||
|
return
|
||||||
|
|
||||||
|
debian_version = f"{new_version}-1"
|
||||||
|
now = datetime.now().astimezone()
|
||||||
|
date_str = now.strftime("%a %b %d %Y")
|
||||||
|
|
||||||
|
author_name, author_email = _get_debian_author()
|
||||||
|
body_line = message.strip() if message else f"Automated release {new_version}."
|
||||||
|
|
||||||
|
stanza = (
|
||||||
|
f"* {date_str} {author_name} <{author_email}> - {debian_version}\n"
|
||||||
|
f"- {body_line}\n\n"
|
||||||
|
)
|
||||||
|
|
||||||
|
marker = "%changelog"
|
||||||
|
idx = content.find(marker)
|
||||||
|
|
||||||
|
if idx == -1:
|
||||||
|
new_content = content.rstrip() + "\n\n%changelog\n" + stanza
|
||||||
|
else:
|
||||||
|
before = content[: idx + len(marker)]
|
||||||
|
after = content[idx + len(marker) :]
|
||||||
|
new_content = before + "\n" + stanza + after.lstrip("\n")
|
||||||
|
|
||||||
|
if preview:
|
||||||
|
print(
|
||||||
|
"[PREVIEW] Would update RPM %changelog section with the following stanza:\n"
|
||||||
|
f"{stanza}"
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(spec_path, "w", encoding="utf-8") as f:
|
||||||
|
f.write(new_content)
|
||||||
|
except Exception as exc:
|
||||||
|
print(f"[WARN] Failed to write updated spec changelog section: {exc}")
|
||||||
|
return
|
||||||
|
|
||||||
|
print(
|
||||||
|
f"Updated RPM %changelog section in {os.path.basename(spec_path)} "
|
||||||
|
f"for {package_name} {debian_version}"
|
||||||
|
)
|
||||||
66
src/pkgmgr/actions/release/files/rpm_spec.py
Normal file
66
src/pkgmgr/actions/release/files/rpm_spec.py
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
|
||||||
|
|
||||||
|
def update_spec_version(
|
||||||
|
spec_path: str, new_version: str, preview: bool = False
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Update the version in an RPM spec file, if present.
|
||||||
|
"""
|
||||||
|
if not os.path.exists(spec_path):
|
||||||
|
print("[INFO] RPM spec file not found, skipping.")
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(spec_path, "r", encoding="utf-8") as f:
|
||||||
|
content = f.read()
|
||||||
|
except Exception as exc:
|
||||||
|
print(f"[WARN] Could not read spec file: {exc}")
|
||||||
|
return
|
||||||
|
|
||||||
|
ver_pattern = r"^(Version:\s*)(.+)$"
|
||||||
|
new_content, ver_count = re.subn(
|
||||||
|
ver_pattern,
|
||||||
|
lambda m: f"{m.group(1)}{new_version}",
|
||||||
|
content,
|
||||||
|
flags=re.MULTILINE,
|
||||||
|
)
|
||||||
|
|
||||||
|
if ver_count == 0:
|
||||||
|
print("[WARN] No 'Version:' line found in spec file.")
|
||||||
|
|
||||||
|
rel_pattern = r"^(Release:\s*)(.+)$"
|
||||||
|
|
||||||
|
def _release_repl(m: re.Match[str]) -> str:
|
||||||
|
rest = m.group(2).strip()
|
||||||
|
match = re.match(r"^(\d+)(.*)$", rest)
|
||||||
|
suffix = match.group(2) if match else ""
|
||||||
|
return f"{m.group(1)}1{suffix}"
|
||||||
|
|
||||||
|
new_content, rel_count = re.subn(
|
||||||
|
rel_pattern,
|
||||||
|
_release_repl,
|
||||||
|
new_content,
|
||||||
|
flags=re.MULTILINE,
|
||||||
|
)
|
||||||
|
|
||||||
|
if rel_count == 0:
|
||||||
|
print("[WARN] No 'Release:' line found in spec file.")
|
||||||
|
|
||||||
|
if preview:
|
||||||
|
print(
|
||||||
|
"[PREVIEW] Would update spec file "
|
||||||
|
f"{os.path.basename(spec_path)} to Version: {new_version}, Release: 1..."
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
with open(spec_path, "w", encoding="utf-8") as f:
|
||||||
|
f.write(new_content)
|
||||||
|
|
||||||
|
print(
|
||||||
|
f"Updated spec file {os.path.basename(spec_path)} "
|
||||||
|
f"to Version: {new_version}, Release: 1..."
|
||||||
|
)
|
||||||
@@ -80,7 +80,9 @@ def is_highest_version_tag(tag: str) -> bool:
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
latest = max(parsed_all)
|
latest = max(parsed_all)
|
||||||
print(f"[INFO] Latest tag (parsed): v{'.'.join(map(str, latest))}, Current tag: {tag}")
|
print(
|
||||||
|
f"[INFO] Latest tag (parsed): v{'.'.join(map(str, latest))}, Current tag: {tag}"
|
||||||
|
)
|
||||||
return parsed_current >= latest
|
return parsed_current >= latest
|
||||||
|
|
||||||
|
|
||||||
@@ -93,7 +95,9 @@ def update_latest_tag(new_tag: str, *, preview: bool = False) -> None:
|
|||||||
- 'latest' is forced (floating tag), therefore the push uses --force.
|
- 'latest' is forced (floating tag), therefore the push uses --force.
|
||||||
"""
|
"""
|
||||||
target_ref = f"{new_tag}^{{}}"
|
target_ref = f"{new_tag}^{{}}"
|
||||||
print(f"[INFO] Updating 'latest' tag to point at {new_tag} (commit {target_ref})...")
|
print(
|
||||||
|
f"[INFO] Updating 'latest' tag to point at {new_tag} (commit {target_ref})..."
|
||||||
|
)
|
||||||
|
|
||||||
tag_force_annotated(
|
tag_force_annotated(
|
||||||
name="latest",
|
name="latest",
|
||||||
|
|||||||
@@ -76,7 +76,9 @@ def _release_impl(
|
|||||||
if paths.arch_pkgbuild:
|
if paths.arch_pkgbuild:
|
||||||
update_pkgbuild_version(paths.arch_pkgbuild, new_ver_str, preview=preview)
|
update_pkgbuild_version(paths.arch_pkgbuild, new_ver_str, preview=preview)
|
||||||
else:
|
else:
|
||||||
print("[INFO] No PKGBUILD found (packaging/arch/PKGBUILD or PKGBUILD). Skipping.")
|
print(
|
||||||
|
"[INFO] No PKGBUILD found (packaging/arch/PKGBUILD or PKGBUILD). Skipping."
|
||||||
|
)
|
||||||
|
|
||||||
if paths.rpm_spec:
|
if paths.rpm_spec:
|
||||||
update_spec_version(paths.rpm_spec, new_ver_str, preview=preview)
|
update_spec_version(paths.rpm_spec, new_ver_str, preview=preview)
|
||||||
@@ -123,7 +125,9 @@ def _release_impl(
|
|||||||
paths.rpm_spec,
|
paths.rpm_spec,
|
||||||
paths.debian_changelog,
|
paths.debian_changelog,
|
||||||
]
|
]
|
||||||
existing_files = [p for p in files_to_add if isinstance(p, str) and p and os.path.exists(p)]
|
existing_files = [
|
||||||
|
p for p in files_to_add if isinstance(p, str) and p and os.path.exists(p)
|
||||||
|
]
|
||||||
|
|
||||||
if preview:
|
if preview:
|
||||||
add(existing_files, preview=True)
|
add(existing_files, preview=True)
|
||||||
@@ -135,13 +139,17 @@ def _release_impl(
|
|||||||
if is_highest_version_tag(new_tag):
|
if is_highest_version_tag(new_tag):
|
||||||
update_latest_tag(new_tag, preview=True)
|
update_latest_tag(new_tag, preview=True)
|
||||||
else:
|
else:
|
||||||
print(f"[PREVIEW] Skipping 'latest' update (tag {new_tag} is not the highest).")
|
print(
|
||||||
|
f"[PREVIEW] Skipping 'latest' update (tag {new_tag} is not the highest)."
|
||||||
|
)
|
||||||
|
|
||||||
if close and branch not in ("main", "master"):
|
if close and branch not in ("main", "master"):
|
||||||
if force:
|
if force:
|
||||||
print(f"[PREVIEW] Would delete branch {branch} (forced).")
|
print(f"[PREVIEW] Would delete branch {branch} (forced).")
|
||||||
else:
|
else:
|
||||||
print(f"[PREVIEW] Would ask whether to delete branch {branch} after release.")
|
print(
|
||||||
|
f"[PREVIEW] Would ask whether to delete branch {branch} after release."
|
||||||
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
add(existing_files, preview=False)
|
add(existing_files, preview=False)
|
||||||
@@ -157,7 +165,9 @@ def _release_impl(
|
|||||||
if is_highest_version_tag(new_tag):
|
if is_highest_version_tag(new_tag):
|
||||||
update_latest_tag(new_tag, preview=False)
|
update_latest_tag(new_tag, preview=False)
|
||||||
else:
|
else:
|
||||||
print(f"[INFO] Skipping 'latest' update (tag {new_tag} is not the highest).")
|
print(
|
||||||
|
f"[INFO] Skipping 'latest' update (tag {new_tag} is not the highest)."
|
||||||
|
)
|
||||||
except GitRunError as exc:
|
except GitRunError as exc:
|
||||||
print(f"[WARN] Failed to update floating 'latest' tag for {new_tag}: {exc}")
|
print(f"[WARN] Failed to update floating 'latest' tag for {new_tag}: {exc}")
|
||||||
print("'latest' tag was not updated.")
|
print("'latest' tag was not updated.")
|
||||||
@@ -166,7 +176,9 @@ def _release_impl(
|
|||||||
|
|
||||||
if close:
|
if close:
|
||||||
if branch in ("main", "master"):
|
if branch in ("main", "master"):
|
||||||
print(f"[INFO] close=True but current branch is {branch}; skipping branch deletion.")
|
print(
|
||||||
|
f"[INFO] close=True but current branch is {branch}; skipping branch deletion."
|
||||||
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
if not should_delete_branch(force=force):
|
if not should_delete_branch(force=force):
|
||||||
|
|||||||
@@ -55,7 +55,9 @@ def clone_repos(
|
|||||||
|
|
||||||
clone_url = _build_clone_url(repo, clone_mode)
|
clone_url = _build_clone_url(repo, clone_mode)
|
||||||
if not clone_url:
|
if not clone_url:
|
||||||
print(f"[WARNING] Cannot build clone URL for '{repo_identifier}'. Skipping.")
|
print(
|
||||||
|
f"[WARNING] Cannot build clone URL for '{repo_identifier}'. Skipping."
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
shallow = clone_mode == "shallow"
|
shallow = clone_mode == "shallow"
|
||||||
@@ -84,7 +86,11 @@ def clone_repos(
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
print(f"[WARNING] SSH clone failed for '{repo_identifier}': {exc}")
|
print(f"[WARNING] SSH clone failed for '{repo_identifier}': {exc}")
|
||||||
choice = input("Do you want to attempt HTTPS clone instead? (y/N): ").strip().lower()
|
choice = (
|
||||||
|
input("Do you want to attempt HTTPS clone instead? (y/N): ")
|
||||||
|
.strip()
|
||||||
|
.lower()
|
||||||
|
)
|
||||||
if choice != "y":
|
if choice != "y":
|
||||||
print(f"[INFO] HTTPS clone not attempted for '{repo_identifier}'.")
|
print(f"[INFO] HTTPS clone not attempted for '{repo_identifier}'.")
|
||||||
continue
|
continue
|
||||||
|
|||||||
@@ -63,6 +63,4 @@ def _strip_git_suffix(name: str) -> str:
|
|||||||
|
|
||||||
def _ensure_valid_repo_name(name: str) -> None:
|
def _ensure_valid_repo_name(name: str) -> None:
|
||||||
if not _NAME_RE.fullmatch(name):
|
if not _NAME_RE.fullmatch(name):
|
||||||
raise ValueError(
|
raise ValueError("Repository name must match: lowercase a-z, 0-9, '_' and '-'.")
|
||||||
"Repository name must match: lowercase a-z, 0-9, '_' and '-'."
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -66,9 +66,7 @@ class TemplateRenderer:
|
|||||||
for root, _, files in os.walk(self.templates_dir):
|
for root, _, files in os.walk(self.templates_dir):
|
||||||
for fn in files:
|
for fn in files:
|
||||||
if fn.endswith(".j2"):
|
if fn.endswith(".j2"):
|
||||||
rel = os.path.relpath(
|
rel = os.path.relpath(os.path.join(root, fn), self.templates_dir)
|
||||||
os.path.join(root, fn), self.templates_dir
|
|
||||||
)
|
|
||||||
print(f"[Preview] Would render template: {rel} -> {rel[:-3]}")
|
print(f"[Preview] Would render template: {rel} -> {rel[:-3]}")
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
|||||||
@@ -24,9 +24,13 @@ def deinstall_repos(
|
|||||||
|
|
||||||
# Remove alias link/file (interactive)
|
# Remove alias link/file (interactive)
|
||||||
if os.path.exists(alias_path):
|
if os.path.exists(alias_path):
|
||||||
confirm = input(
|
confirm = (
|
||||||
|
input(
|
||||||
f"Are you sure you want to delete link '{alias_path}' for {repo_identifier}? [y/N]: "
|
f"Are you sure you want to delete link '{alias_path}' for {repo_identifier}? [y/N]: "
|
||||||
).strip().lower()
|
)
|
||||||
|
.strip()
|
||||||
|
.lower()
|
||||||
|
)
|
||||||
if confirm == "y":
|
if confirm == "y":
|
||||||
if preview:
|
if preview:
|
||||||
print(f"[Preview] Would remove link '{alias_path}'.")
|
print(f"[Preview] Would remove link '{alias_path}'.")
|
||||||
|
|||||||
@@ -3,19 +3,30 @@ import os
|
|||||||
from pkgmgr.core.repository.identifier import get_repo_identifier
|
from pkgmgr.core.repository.identifier import get_repo_identifier
|
||||||
from pkgmgr.core.repository.dir import get_repo_dir
|
from pkgmgr.core.repository.dir import get_repo_dir
|
||||||
|
|
||||||
|
|
||||||
def delete_repos(selected_repos, repositories_base_dir, all_repos, preview=False):
|
def delete_repos(selected_repos, repositories_base_dir, all_repos, preview=False):
|
||||||
for repo in selected_repos:
|
for repo in selected_repos:
|
||||||
repo_identifier = get_repo_identifier(repo, all_repos)
|
repo_identifier = get_repo_identifier(repo, all_repos)
|
||||||
repo_dir = get_repo_dir(repositories_base_dir, repo)
|
repo_dir = get_repo_dir(repositories_base_dir, repo)
|
||||||
if os.path.exists(repo_dir):
|
if os.path.exists(repo_dir):
|
||||||
confirm = input(f"Are you sure you want to delete directory '{repo_dir}' for {repo_identifier}? [y/N]: ").strip().lower()
|
confirm = (
|
||||||
|
input(
|
||||||
|
f"Are you sure you want to delete directory '{repo_dir}' for {repo_identifier}? [y/N]: "
|
||||||
|
)
|
||||||
|
.strip()
|
||||||
|
.lower()
|
||||||
|
)
|
||||||
if confirm == "y":
|
if confirm == "y":
|
||||||
if preview:
|
if preview:
|
||||||
print(f"[Preview] Would delete directory '{repo_dir}' for {repo_identifier}.")
|
print(
|
||||||
|
f"[Preview] Would delete directory '{repo_dir}' for {repo_identifier}."
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
shutil.rmtree(repo_dir)
|
shutil.rmtree(repo_dir)
|
||||||
print(f"Deleted repository directory '{repo_dir}' for {repo_identifier}.")
|
print(
|
||||||
|
f"Deleted repository directory '{repo_dir}' for {repo_identifier}."
|
||||||
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Error deleting '{repo_dir}' for {repo_identifier}: {e}")
|
print(f"Error deleting '{repo_dir}' for {repo_identifier}: {e}")
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -233,9 +233,7 @@ def list_repositories(
|
|||||||
categories.append(str(repo["category"]))
|
categories.append(str(repo["category"]))
|
||||||
|
|
||||||
yaml_tags: List[str] = list(map(str, repo.get("tags", [])))
|
yaml_tags: List[str] = list(map(str, repo.get("tags", [])))
|
||||||
display_tags: List[str] = sorted(
|
display_tags: List[str] = sorted(set(yaml_tags + list(map(str, extra_tags))))
|
||||||
set(yaml_tags + list(map(str, extra_tags)))
|
|
||||||
)
|
|
||||||
|
|
||||||
rows.append(
|
rows.append(
|
||||||
{
|
{
|
||||||
@@ -288,13 +286,7 @@ def list_repositories(
|
|||||||
status_padded = status.ljust(status_width)
|
status_padded = status.ljust(status_width)
|
||||||
status_colored = _color_status(status_padded)
|
status_colored = _color_status(status_padded)
|
||||||
|
|
||||||
print(
|
print(f"{ident_col} {status_colored} {cat_col} {tag_col} {dir_col}")
|
||||||
f"{ident_col} "
|
|
||||||
f"{status_colored} "
|
|
||||||
f"{cat_col} "
|
|
||||||
f"{tag_col} "
|
|
||||||
f"{dir_col}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
# Detailed section (alias value red, same status coloring)
|
# Detailed section (alias value red, same status coloring)
|
||||||
|
|||||||
@@ -55,12 +55,16 @@ class UpdateManager:
|
|||||||
code = exc.code if isinstance(exc.code, int) else str(exc.code)
|
code = exc.code if isinstance(exc.code, int) else str(exc.code)
|
||||||
failures.append((identifier, f"pull failed (exit={code})"))
|
failures.append((identifier, f"pull failed (exit={code})"))
|
||||||
if not quiet:
|
if not quiet:
|
||||||
print(f"[Warning] update: pull failed for {identifier} (exit={code}). Continuing...")
|
print(
|
||||||
|
f"[Warning] update: pull failed for {identifier} (exit={code}). Continuing..."
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
failures.append((identifier, f"pull failed: {exc}"))
|
failures.append((identifier, f"pull failed: {exc}"))
|
||||||
if not quiet:
|
if not quiet:
|
||||||
print(f"[Warning] update: pull failed for {identifier}: {exc}. Continuing...")
|
print(
|
||||||
|
f"[Warning] update: pull failed for {identifier}: {exc}. Continuing..."
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -82,12 +86,16 @@ class UpdateManager:
|
|||||||
code = exc.code if isinstance(exc.code, int) else str(exc.code)
|
code = exc.code if isinstance(exc.code, int) else str(exc.code)
|
||||||
failures.append((identifier, f"install failed (exit={code})"))
|
failures.append((identifier, f"install failed (exit={code})"))
|
||||||
if not quiet:
|
if not quiet:
|
||||||
print(f"[Warning] update: install failed for {identifier} (exit={code}). Continuing...")
|
print(
|
||||||
|
f"[Warning] update: install failed for {identifier} (exit={code}). Continuing..."
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
failures.append((identifier, f"install failed: {exc}"))
|
failures.append((identifier, f"install failed: {exc}"))
|
||||||
if not quiet:
|
if not quiet:
|
||||||
print(f"[Warning] update: install failed for {identifier}: {exc}. Continuing...")
|
print(
|
||||||
|
f"[Warning] update: install failed for {identifier}: {exc}. Continuing..."
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if failures and not quiet:
|
if failures and not quiet:
|
||||||
|
|||||||
@@ -31,6 +31,7 @@ class OSReleaseInfo:
|
|||||||
"""
|
"""
|
||||||
Minimal /etc/os-release representation for distro detection.
|
Minimal /etc/os-release representation for distro detection.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
id: str = ""
|
id: str = ""
|
||||||
id_like: str = ""
|
id_like: str = ""
|
||||||
pretty_name: str = ""
|
pretty_name: str = ""
|
||||||
@@ -63,4 +64,6 @@ class OSReleaseInfo:
|
|||||||
|
|
||||||
def is_fedora_family(self) -> bool:
|
def is_fedora_family(self) -> bool:
|
||||||
ids = self.ids()
|
ids = self.ids()
|
||||||
return bool(ids.intersection({"fedora", "rhel", "centos", "rocky", "almalinux"}))
|
return bool(
|
||||||
|
ids.intersection({"fedora", "rhel", "centos", "rocky", "almalinux"})
|
||||||
|
)
|
||||||
|
|||||||
@@ -58,7 +58,9 @@ class SystemUpdater:
|
|||||||
run_command("sudo pacman -Syu --noconfirm", preview=preview)
|
run_command("sudo pacman -Syu --noconfirm", preview=preview)
|
||||||
return
|
return
|
||||||
|
|
||||||
print("[Warning] Cannot update Arch system: missing required tools (sudo/yay/pacman).")
|
print(
|
||||||
|
"[Warning] Cannot update Arch system: missing required tools (sudo/yay/pacman)."
|
||||||
|
)
|
||||||
|
|
||||||
def _update_debian(self, *, preview: bool) -> None:
|
def _update_debian(self, *, preview: bool) -> None:
|
||||||
from pkgmgr.core.command.run import run_command
|
from pkgmgr.core.command.run import run_command
|
||||||
@@ -67,7 +69,9 @@ class SystemUpdater:
|
|||||||
apt_get = shutil.which("apt-get")
|
apt_get = shutil.which("apt-get")
|
||||||
|
|
||||||
if not (sudo and apt_get):
|
if not (sudo and apt_get):
|
||||||
print("[Warning] Cannot update Debian/Ubuntu system: missing required tools (sudo/apt-get).")
|
print(
|
||||||
|
"[Warning] Cannot update Debian/Ubuntu system: missing required tools (sudo/apt-get)."
|
||||||
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
env = "DEBIAN_FRONTEND=noninteractive"
|
env = "DEBIAN_FRONTEND=noninteractive"
|
||||||
|
|||||||
@@ -29,6 +29,7 @@ For details on any command, run:
|
|||||||
\033[1mpkgmgr <command> --help\033[0m
|
\033[1mpkgmgr <command> --help\033[0m
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
def main() -> None:
|
def main() -> None:
|
||||||
"""
|
"""
|
||||||
Entry point for the pkgmgr CLI.
|
Entry point for the pkgmgr CLI.
|
||||||
@@ -41,9 +42,7 @@ def main() -> None:
|
|||||||
repositories_dir = os.path.expanduser(
|
repositories_dir = os.path.expanduser(
|
||||||
directories.get("repositories", "~/Repositories")
|
directories.get("repositories", "~/Repositories")
|
||||||
)
|
)
|
||||||
binaries_dir = os.path.expanduser(
|
binaries_dir = os.path.expanduser(directories.get("binaries", "~/.local/bin"))
|
||||||
directories.get("binaries", "~/.local/bin")
|
|
||||||
)
|
|
||||||
|
|
||||||
# Ensure the merged config actually contains the resolved directories
|
# Ensure the merged config actually contains the resolved directories
|
||||||
config_merged.setdefault("directories", {})
|
config_merged.setdefault("directories", {})
|
||||||
|
|||||||
@@ -135,9 +135,7 @@ def handle_changelog(
|
|||||||
target_tag=range_arg,
|
target_tag=range_arg,
|
||||||
)
|
)
|
||||||
if cur_tag is None:
|
if cur_tag is None:
|
||||||
print(
|
print(f"[WARN] Tag {range_arg!r} not found or not a SemVer tag.")
|
||||||
f"[WARN] Tag {range_arg!r} not found or not a SemVer tag."
|
|
||||||
)
|
|
||||||
print("[INFO] Falling back to full history.")
|
print("[INFO] Falling back to full history.")
|
||||||
from_ref = None
|
from_ref = None
|
||||||
to_ref = None
|
to_ref = None
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
# src/pkgmgr/cli/commands/config.py
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
@@ -38,27 +39,16 @@ def _load_user_config(user_config_path: str) -> Dict[str, Any]:
|
|||||||
|
|
||||||
def _find_defaults_source_dir() -> Optional[str]:
|
def _find_defaults_source_dir() -> Optional[str]:
|
||||||
"""
|
"""
|
||||||
Find the directory inside the installed pkgmgr package OR the
|
Find the directory inside the installed pkgmgr package that contains
|
||||||
project root that contains default config files.
|
the default config files.
|
||||||
|
|
||||||
Preferred locations (in dieser Reihenfolge):
|
Preferred location:
|
||||||
- <pkg_root>/config_defaults
|
|
||||||
- <pkg_root>/config
|
- <pkg_root>/config
|
||||||
- <project_root>/config_defaults
|
|
||||||
- <project_root>/config
|
|
||||||
"""
|
"""
|
||||||
import pkgmgr # local import to avoid circular deps
|
import pkgmgr # local import to avoid circular deps
|
||||||
|
|
||||||
pkg_root = Path(pkgmgr.__file__).resolve().parent
|
pkg_root = Path(pkgmgr.__file__).resolve().parent
|
||||||
project_root = pkg_root.parent
|
cand = pkg_root / "config"
|
||||||
|
|
||||||
candidates = [
|
|
||||||
pkg_root / "config_defaults",
|
|
||||||
pkg_root / "config",
|
|
||||||
project_root / "config_defaults",
|
|
||||||
project_root / "config",
|
|
||||||
]
|
|
||||||
for cand in candidates:
|
|
||||||
if cand.is_dir():
|
if cand.is_dir():
|
||||||
return str(cand)
|
return str(cand)
|
||||||
return None
|
return None
|
||||||
@@ -73,7 +63,7 @@ def _update_default_configs(user_config_path: str) -> None:
|
|||||||
source_dir = _find_defaults_source_dir()
|
source_dir = _find_defaults_source_dir()
|
||||||
if not source_dir:
|
if not source_dir:
|
||||||
print(
|
print(
|
||||||
"[WARN] No config_defaults or config directory found in "
|
"[WARN] No config directory found in "
|
||||||
"pkgmgr installation. Nothing to update."
|
"pkgmgr installation. Nothing to update."
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
@@ -88,7 +78,6 @@ def _update_default_configs(user_config_path: str) -> None:
|
|||||||
if not (lower.endswith(".yml") or lower.endswith(".yaml")):
|
if not (lower.endswith(".yml") or lower.endswith(".yaml")):
|
||||||
continue
|
continue
|
||||||
if name == "config.yaml":
|
if name == "config.yaml":
|
||||||
# Never overwrite the user config template / live config
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
src = os.path.join(source_dir, name)
|
src = os.path.join(source_dir, name)
|
||||||
@@ -102,48 +91,28 @@ def handle_config(args, ctx: CLIContext) -> None:
|
|||||||
"""
|
"""
|
||||||
Handle 'pkgmgr config' subcommands.
|
Handle 'pkgmgr config' subcommands.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
user_config_path = ctx.user_config_path
|
user_config_path = ctx.user_config_path
|
||||||
|
|
||||||
# ------------------------------------------------------------
|
|
||||||
# config show
|
|
||||||
# ------------------------------------------------------------
|
|
||||||
if args.subcommand == "show":
|
if args.subcommand == "show":
|
||||||
if args.all or (not args.identifiers):
|
if args.all or (not args.identifiers):
|
||||||
# Full merged config view
|
|
||||||
show_config([], user_config_path, full_config=True)
|
show_config([], user_config_path, full_config=True)
|
||||||
else:
|
else:
|
||||||
# Show only matching entries from user config
|
|
||||||
user_config = _load_user_config(user_config_path)
|
user_config = _load_user_config(user_config_path)
|
||||||
selected = resolve_repos(
|
selected = resolve_repos(
|
||||||
args.identifiers,
|
args.identifiers, user_config.get("repositories", [])
|
||||||
user_config.get("repositories", []),
|
|
||||||
)
|
)
|
||||||
if selected:
|
if selected:
|
||||||
show_config(
|
show_config(selected, user_config_path, full_config=False)
|
||||||
selected,
|
|
||||||
user_config_path,
|
|
||||||
full_config=False,
|
|
||||||
)
|
|
||||||
return
|
return
|
||||||
|
|
||||||
# ------------------------------------------------------------
|
|
||||||
# config add
|
|
||||||
# ------------------------------------------------------------
|
|
||||||
if args.subcommand == "add":
|
if args.subcommand == "add":
|
||||||
interactive_add(ctx.config_merged, user_config_path)
|
interactive_add(ctx.config_merged, user_config_path)
|
||||||
return
|
return
|
||||||
|
|
||||||
# ------------------------------------------------------------
|
|
||||||
# config edit
|
|
||||||
# ------------------------------------------------------------
|
|
||||||
if args.subcommand == "edit":
|
if args.subcommand == "edit":
|
||||||
run_command(f"nano {user_config_path}")
|
run_command(f"nano {user_config_path}")
|
||||||
return
|
return
|
||||||
|
|
||||||
# ------------------------------------------------------------
|
|
||||||
# config init
|
|
||||||
# ------------------------------------------------------------
|
|
||||||
if args.subcommand == "init":
|
if args.subcommand == "init":
|
||||||
user_config = _load_user_config(user_config_path)
|
user_config = _load_user_config(user_config_path)
|
||||||
config_init(
|
config_init(
|
||||||
@@ -154,9 +123,6 @@ def handle_config(args, ctx: CLIContext) -> None:
|
|||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
# ------------------------------------------------------------
|
|
||||||
# config delete
|
|
||||||
# ------------------------------------------------------------
|
|
||||||
if args.subcommand == "delete":
|
if args.subcommand == "delete":
|
||||||
user_config = _load_user_config(user_config_path)
|
user_config = _load_user_config(user_config_path)
|
||||||
|
|
||||||
@@ -167,10 +133,7 @@ def handle_config(args, ctx: CLIContext) -> None:
|
|||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
to_delete = resolve_repos(
|
to_delete = resolve_repos(args.identifiers, user_config.get("repositories", []))
|
||||||
args.identifiers,
|
|
||||||
user_config.get("repositories", []),
|
|
||||||
)
|
|
||||||
new_repos = [
|
new_repos = [
|
||||||
entry
|
entry
|
||||||
for entry in user_config.get("repositories", [])
|
for entry in user_config.get("repositories", [])
|
||||||
@@ -181,9 +144,6 @@ def handle_config(args, ctx: CLIContext) -> None:
|
|||||||
print(f"Deleted {len(to_delete)} entries from user config.")
|
print(f"Deleted {len(to_delete)} entries from user config.")
|
||||||
return
|
return
|
||||||
|
|
||||||
# ------------------------------------------------------------
|
|
||||||
# config ignore
|
|
||||||
# ------------------------------------------------------------
|
|
||||||
if args.subcommand == "ignore":
|
if args.subcommand == "ignore":
|
||||||
user_config = _load_user_config(user_config_path)
|
user_config = _load_user_config(user_config_path)
|
||||||
|
|
||||||
@@ -194,17 +154,10 @@ def handle_config(args, ctx: CLIContext) -> None:
|
|||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
to_modify = resolve_repos(
|
to_modify = resolve_repos(args.identifiers, user_config.get("repositories", []))
|
||||||
args.identifiers,
|
|
||||||
user_config.get("repositories", []),
|
|
||||||
)
|
|
||||||
|
|
||||||
for entry in user_config["repositories"]:
|
for entry in user_config["repositories"]:
|
||||||
key = (
|
key = (entry.get("provider"), entry.get("account"), entry.get("repository"))
|
||||||
entry.get("provider"),
|
|
||||||
entry.get("account"),
|
|
||||||
entry.get("repository"),
|
|
||||||
)
|
|
||||||
for mod in to_modify:
|
for mod in to_modify:
|
||||||
mod_key = (
|
mod_key = (
|
||||||
mod.get("provider"),
|
mod.get("provider"),
|
||||||
@@ -213,28 +166,14 @@ def handle_config(args, ctx: CLIContext) -> None:
|
|||||||
)
|
)
|
||||||
if key == mod_key:
|
if key == mod_key:
|
||||||
entry["ignore"] = args.set == "true"
|
entry["ignore"] = args.set == "true"
|
||||||
print(
|
print(f"Set ignore for {key} to {entry['ignore']}")
|
||||||
f"Set ignore for {key} to {entry['ignore']}"
|
|
||||||
)
|
|
||||||
|
|
||||||
save_user_config(user_config, user_config_path)
|
save_user_config(user_config, user_config_path)
|
||||||
return
|
return
|
||||||
|
|
||||||
# ------------------------------------------------------------
|
|
||||||
# config update
|
|
||||||
# ------------------------------------------------------------
|
|
||||||
if args.subcommand == "update":
|
if args.subcommand == "update":
|
||||||
"""
|
|
||||||
Copy default YAML configs from the installed package into the
|
|
||||||
user's ~/.config/pkgmgr directory.
|
|
||||||
|
|
||||||
This will overwrite files with the same name (except config.yaml).
|
|
||||||
"""
|
|
||||||
_update_default_configs(user_config_path)
|
_update_default_configs(user_config_path)
|
||||||
return
|
return
|
||||||
|
|
||||||
# ------------------------------------------------------------
|
|
||||||
# Unknown subcommand
|
|
||||||
# ------------------------------------------------------------
|
|
||||||
print(f"Unknown config subcommand: {args.subcommand}")
|
print(f"Unknown config subcommand: {args.subcommand}")
|
||||||
sys.exit(2)
|
sys.exit(2)
|
||||||
|
|||||||
@@ -4,7 +4,13 @@ from __future__ import annotations
|
|||||||
import sys
|
import sys
|
||||||
from typing import Any, Dict, List
|
from typing import Any, Dict, List
|
||||||
|
|
||||||
from pkgmgr.actions.mirror import diff_mirrors, list_mirrors, merge_mirrors, setup_mirrors
|
from pkgmgr.actions.mirror import (
|
||||||
|
diff_mirrors,
|
||||||
|
list_mirrors,
|
||||||
|
merge_mirrors,
|
||||||
|
set_mirror_visibility,
|
||||||
|
setup_mirrors,
|
||||||
|
)
|
||||||
from pkgmgr.cli.context import CLIContext
|
from pkgmgr.cli.context import CLIContext
|
||||||
|
|
||||||
Repository = Dict[str, Any]
|
Repository = Dict[str, Any]
|
||||||
@@ -25,6 +31,7 @@ def handle_mirror_command(
|
|||||||
- mirror setup
|
- mirror setup
|
||||||
- mirror check
|
- mirror check
|
||||||
- mirror provision
|
- mirror provision
|
||||||
|
- mirror visibility
|
||||||
"""
|
"""
|
||||||
if not selected:
|
if not selected:
|
||||||
print("[INFO] No repositories selected for 'mirror' command.")
|
print("[INFO] No repositories selected for 'mirror' command.")
|
||||||
@@ -56,11 +63,15 @@ def handle_mirror_command(
|
|||||||
preview = getattr(args, "preview", False)
|
preview = getattr(args, "preview", False)
|
||||||
|
|
||||||
if source == target:
|
if source == target:
|
||||||
print("[ERROR] For 'mirror merge', source and target must differ (config vs file).")
|
print(
|
||||||
|
"[ERROR] For 'mirror merge', source and target must differ (config vs file)."
|
||||||
|
)
|
||||||
sys.exit(2)
|
sys.exit(2)
|
||||||
|
|
||||||
explicit_config_path = getattr(args, "config_path", None)
|
explicit_config_path = getattr(args, "config_path", None)
|
||||||
user_config_path = explicit_config_path or getattr(ctx, "user_config_path", None)
|
user_config_path = explicit_config_path or getattr(
|
||||||
|
ctx, "user_config_path", None
|
||||||
|
)
|
||||||
|
|
||||||
merge_mirrors(
|
merge_mirrors(
|
||||||
selected_repos=selected,
|
selected_repos=selected,
|
||||||
@@ -83,6 +94,7 @@ def handle_mirror_command(
|
|||||||
local=True,
|
local=True,
|
||||||
remote=False,
|
remote=False,
|
||||||
ensure_remote=False,
|
ensure_remote=False,
|
||||||
|
ensure_visibility=None,
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -96,11 +108,14 @@ def handle_mirror_command(
|
|||||||
local=False,
|
local=False,
|
||||||
remote=True,
|
remote=True,
|
||||||
ensure_remote=False,
|
ensure_remote=False,
|
||||||
|
ensure_visibility=None,
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
if subcommand == "provision":
|
if subcommand == "provision":
|
||||||
preview = getattr(args, "preview", False)
|
preview = getattr(args, "preview", False)
|
||||||
|
public = bool(getattr(args, "public", False))
|
||||||
|
|
||||||
setup_mirrors(
|
setup_mirrors(
|
||||||
selected_repos=selected,
|
selected_repos=selected,
|
||||||
repositories_base_dir=ctx.repositories_base_dir,
|
repositories_base_dir=ctx.repositories_base_dir,
|
||||||
@@ -109,6 +124,23 @@ def handle_mirror_command(
|
|||||||
local=False,
|
local=False,
|
||||||
remote=True,
|
remote=True,
|
||||||
ensure_remote=True,
|
ensure_remote=True,
|
||||||
|
ensure_visibility="public" if public else None,
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
if subcommand == "visibility":
|
||||||
|
preview = getattr(args, "preview", False)
|
||||||
|
visibility = getattr(args, "visibility", None)
|
||||||
|
if visibility not in ("private", "public"):
|
||||||
|
print("[ERROR] mirror visibility expects 'private' or 'public'.")
|
||||||
|
sys.exit(2)
|
||||||
|
|
||||||
|
set_mirror_visibility(
|
||||||
|
selected_repos=selected,
|
||||||
|
repositories_base_dir=ctx.repositories_base_dir,
|
||||||
|
all_repos=ctx.all_repositories,
|
||||||
|
visibility=visibility,
|
||||||
|
preview=preview,
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|||||||
@@ -18,7 +18,9 @@ def handle_publish(args, ctx: CLIContext, selected: List[Repository]) -> None:
|
|||||||
|
|
||||||
for repo in selected:
|
for repo in selected:
|
||||||
identifier = get_repo_identifier(repo, ctx.all_repositories)
|
identifier = get_repo_identifier(repo, ctx.all_repositories)
|
||||||
repo_dir = repo.get("directory") or get_repo_dir(ctx.repositories_base_dir, repo)
|
repo_dir = repo.get("directory") or get_repo_dir(
|
||||||
|
ctx.repositories_base_dir, repo
|
||||||
|
)
|
||||||
|
|
||||||
if not os.path.isdir(repo_dir):
|
if not os.path.isdir(repo_dir):
|
||||||
print(f"[WARN] Skipping {identifier}: directory missing.")
|
print(f"[WARN] Skipping {identifier}: directory missing.")
|
||||||
|
|||||||
@@ -36,9 +36,13 @@ def handle_release(
|
|||||||
identifier = get_repo_identifier(repo, ctx.all_repositories)
|
identifier = get_repo_identifier(repo, ctx.all_repositories)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
repo_dir = repo.get("directory") or get_repo_dir(ctx.repositories_base_dir, repo)
|
repo_dir = repo.get("directory") or get_repo_dir(
|
||||||
|
ctx.repositories_base_dir, repo
|
||||||
|
)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
print(f"[WARN] Skipping repository {identifier}: failed to resolve directory: {exc}")
|
print(
|
||||||
|
f"[WARN] Skipping repository {identifier}: failed to resolve directory: {exc}"
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if not os.path.isdir(repo_dir):
|
if not os.path.isdir(repo_dir):
|
||||||
|
|||||||
@@ -32,9 +32,8 @@ def _resolve_repository_directory(repository: Repository, ctx: CLIContext) -> st
|
|||||||
if repo_dir:
|
if repo_dir:
|
||||||
return repo_dir
|
return repo_dir
|
||||||
|
|
||||||
base_dir = (
|
base_dir = getattr(ctx, "repositories_base_dir", None) or getattr(
|
||||||
getattr(ctx, "repositories_base_dir", None)
|
ctx, "repositories_dir", None
|
||||||
or getattr(ctx, "repositories_dir", None)
|
|
||||||
)
|
)
|
||||||
if not base_dir:
|
if not base_dir:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
|
|||||||
@@ -33,8 +33,7 @@ def add_branch_subparsers(
|
|||||||
"name",
|
"name",
|
||||||
nargs="?",
|
nargs="?",
|
||||||
help=(
|
help=(
|
||||||
"Name of the new branch (optional; will be asked interactively "
|
"Name of the new branch (optional; will be asked interactively if omitted)"
|
||||||
"if omitted)"
|
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
branch_open.add_argument(
|
branch_open.add_argument(
|
||||||
@@ -54,8 +53,7 @@ def add_branch_subparsers(
|
|||||||
"name",
|
"name",
|
||||||
nargs="?",
|
nargs="?",
|
||||||
help=(
|
help=(
|
||||||
"Name of the branch to close (optional; current branch is used "
|
"Name of the branch to close (optional; current branch is used if omitted)"
|
||||||
"if omitted)"
|
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
branch_close.add_argument(
|
branch_close.add_argument(
|
||||||
@@ -84,8 +82,7 @@ def add_branch_subparsers(
|
|||||||
"name",
|
"name",
|
||||||
nargs="?",
|
nargs="?",
|
||||||
help=(
|
help=(
|
||||||
"Name of the branch to drop (optional; current branch is used "
|
"Name of the branch to drop (optional; current branch is used if omitted)"
|
||||||
"if omitted)"
|
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
branch_drop.add_argument(
|
branch_drop.add_argument(
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# src/pkgmgr/cli/parser/mirror_cmd.py
|
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
@@ -12,7 +11,7 @@ from .common import add_identifier_arguments
|
|||||||
def add_mirror_subparsers(subparsers: argparse._SubParsersAction) -> None:
|
def add_mirror_subparsers(subparsers: argparse._SubParsersAction) -> None:
|
||||||
mirror_parser = subparsers.add_parser(
|
mirror_parser = subparsers.add_parser(
|
||||||
"mirror",
|
"mirror",
|
||||||
help="Mirror-related utilities (list, diff, merge, setup, check, provision)",
|
help="Mirror-related utilities (list, diff, merge, setup, check, provision, visibility)",
|
||||||
)
|
)
|
||||||
mirror_subparsers = mirror_parser.add_subparsers(
|
mirror_subparsers = mirror_parser.add_subparsers(
|
||||||
dest="subcommand",
|
dest="subcommand",
|
||||||
@@ -20,7 +19,9 @@ def add_mirror_subparsers(subparsers: argparse._SubParsersAction) -> None:
|
|||||||
required=True,
|
required=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
mirror_list = mirror_subparsers.add_parser("list", help="List configured mirrors for repositories")
|
mirror_list = mirror_subparsers.add_parser(
|
||||||
|
"list", help="List configured mirrors for repositories"
|
||||||
|
)
|
||||||
add_identifier_arguments(mirror_list)
|
add_identifier_arguments(mirror_list)
|
||||||
mirror_list.add_argument(
|
mirror_list.add_argument(
|
||||||
"--source",
|
"--source",
|
||||||
@@ -29,15 +30,21 @@ def add_mirror_subparsers(subparsers: argparse._SubParsersAction) -> None:
|
|||||||
help="Which mirror source to show.",
|
help="Which mirror source to show.",
|
||||||
)
|
)
|
||||||
|
|
||||||
mirror_diff = mirror_subparsers.add_parser("diff", help="Show differences between config mirrors and MIRRORS file")
|
mirror_diff = mirror_subparsers.add_parser(
|
||||||
|
"diff", help="Show differences between config mirrors and MIRRORS file"
|
||||||
|
)
|
||||||
add_identifier_arguments(mirror_diff)
|
add_identifier_arguments(mirror_diff)
|
||||||
|
|
||||||
mirror_merge = mirror_subparsers.add_parser(
|
mirror_merge = mirror_subparsers.add_parser(
|
||||||
"merge",
|
"merge",
|
||||||
help="Merge mirrors between config and MIRRORS file (example: pkgmgr mirror merge config file --all)",
|
help="Merge mirrors between config and MIRRORS file (example: pkgmgr mirror merge config file --all)",
|
||||||
)
|
)
|
||||||
mirror_merge.add_argument("source", choices=["config", "file"], help="Source of mirrors.")
|
mirror_merge.add_argument(
|
||||||
mirror_merge.add_argument("target", choices=["config", "file"], help="Target of mirrors.")
|
"source", choices=["config", "file"], help="Source of mirrors."
|
||||||
|
)
|
||||||
|
mirror_merge.add_argument(
|
||||||
|
"target", choices=["config", "file"], help="Target of mirrors."
|
||||||
|
)
|
||||||
add_identifier_arguments(mirror_merge)
|
add_identifier_arguments(mirror_merge)
|
||||||
mirror_merge.add_argument(
|
mirror_merge.add_argument(
|
||||||
"--config-path",
|
"--config-path",
|
||||||
@@ -60,4 +67,20 @@ def add_mirror_subparsers(subparsers: argparse._SubParsersAction) -> None:
|
|||||||
"provision",
|
"provision",
|
||||||
help="Provision remote repositories via provider APIs (create missing repos).",
|
help="Provision remote repositories via provider APIs (create missing repos).",
|
||||||
)
|
)
|
||||||
|
mirror_provision.add_argument(
|
||||||
|
"--public",
|
||||||
|
action="store_true",
|
||||||
|
help="After ensuring repos exist, enforce public visibility on the remote provider.",
|
||||||
|
)
|
||||||
add_identifier_arguments(mirror_provision)
|
add_identifier_arguments(mirror_provision)
|
||||||
|
|
||||||
|
mirror_visibility = mirror_subparsers.add_parser(
|
||||||
|
"visibility",
|
||||||
|
help="Set visibility (public/private) for all remote git mirrors via provider APIs.",
|
||||||
|
)
|
||||||
|
mirror_visibility.add_argument(
|
||||||
|
"visibility",
|
||||||
|
choices=["private", "public"],
|
||||||
|
help="Target visibility for all git mirrors.",
|
||||||
|
)
|
||||||
|
add_identifier_arguments(mirror_visibility)
|
||||||
|
|||||||
@@ -48,9 +48,6 @@ def add_navigation_subparsers(
|
|||||||
"--command",
|
"--command",
|
||||||
nargs=argparse.REMAINDER,
|
nargs=argparse.REMAINDER,
|
||||||
dest="shell_command",
|
dest="shell_command",
|
||||||
help=(
|
help=("The shell command (and its arguments) to execute in each repository"),
|
||||||
"The shell command (and its arguments) to execute in each "
|
|
||||||
"repository"
|
|
||||||
),
|
|
||||||
default=[],
|
default=[],
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -53,10 +53,7 @@ def _add_proxy_identifier_arguments(parser: argparse.ArgumentParser) -> None:
|
|||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"identifiers",
|
"identifiers",
|
||||||
nargs="*",
|
nargs="*",
|
||||||
help=(
|
help=("Identifier(s) for repositories. Default: Repository of current folder."),
|
||||||
"Identifier(s) for repositories. "
|
|
||||||
"Default: Repository of current folder."
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--all",
|
"--all",
|
||||||
@@ -118,12 +115,7 @@ def _proxy_has_explicit_selection(args: argparse.Namespace) -> bool:
|
|||||||
string_filter = getattr(args, "string", "") or ""
|
string_filter = getattr(args, "string", "") or ""
|
||||||
|
|
||||||
# Proxy commands currently do not support --tag, so it is not checked here.
|
# Proxy commands currently do not support --tag, so it is not checked here.
|
||||||
return bool(
|
return bool(use_all or identifiers or categories or string_filter)
|
||||||
use_all
|
|
||||||
or identifiers
|
|
||||||
or categories
|
|
||||||
or string_filter
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _select_repo_for_current_directory(
|
def _select_repo_for_current_directory(
|
||||||
@@ -204,9 +196,7 @@ def maybe_handle_proxy(args: argparse.Namespace, ctx: CLIContext) -> bool:
|
|||||||
If the top-level command is one of the proxy subcommands
|
If the top-level command is one of the proxy subcommands
|
||||||
(git / docker / docker compose), handle it here and return True.
|
(git / docker / docker compose), handle it here and return True.
|
||||||
"""
|
"""
|
||||||
all_proxy_subcommands = {
|
all_proxy_subcommands = {sub for subs in PROXY_COMMANDS.values() for sub in subs}
|
||||||
sub for subs in PROXY_COMMANDS.values() for sub in subs
|
|
||||||
}
|
|
||||||
|
|
||||||
if args.command not in all_proxy_subcommands:
|
if args.command not in all_proxy_subcommands:
|
||||||
return False
|
return False
|
||||||
|
|||||||
@@ -22,9 +22,8 @@ def resolve_repository_path(repository: Repository, ctx: CLIContext) -> str:
|
|||||||
if value:
|
if value:
|
||||||
return value
|
return value
|
||||||
|
|
||||||
base_dir = (
|
base_dir = getattr(ctx, "repositories_base_dir", None) or getattr(
|
||||||
getattr(ctx, "repositories_base_dir", None)
|
ctx, "repositories_dir", None
|
||||||
or getattr(ctx, "repositories_dir", None)
|
|
||||||
)
|
)
|
||||||
if not base_dir:
|
if not base_dir:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
|
|||||||
@@ -57,7 +57,9 @@ def _build_workspace_filename(identifiers: List[str]) -> str:
|
|||||||
return "_".join(sorted_identifiers) + ".code-workspace"
|
return "_".join(sorted_identifiers) + ".code-workspace"
|
||||||
|
|
||||||
|
|
||||||
def _build_workspace_data(selected: List[Repository], ctx: CLIContext) -> Dict[str, Any]:
|
def _build_workspace_data(
|
||||||
|
selected: List[Repository], ctx: CLIContext
|
||||||
|
) -> Dict[str, Any]:
|
||||||
folders = [{"path": resolve_repository_path(repo, ctx)} for repo in selected]
|
folders = [{"path": resolve_repository_path(repo, ctx)} for repo in selected]
|
||||||
return {
|
return {
|
||||||
"folders": folders,
|
"folders": folders,
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ import os
|
|||||||
import hashlib
|
import hashlib
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
|
||||||
def generate_alias(repo, bin_dir, existing_aliases):
|
def generate_alias(repo, bin_dir, existing_aliases):
|
||||||
"""
|
"""
|
||||||
Generate an alias for a repository based on its repository name.
|
Generate an alias for a repository based on its repository name.
|
||||||
|
|||||||
@@ -98,8 +98,7 @@ def create_ink(
|
|||||||
if alias_name == repo_identifier:
|
if alias_name == repo_identifier:
|
||||||
if not quiet:
|
if not quiet:
|
||||||
print(
|
print(
|
||||||
f"Alias '{alias_name}' equals identifier. "
|
f"Alias '{alias_name}' equals identifier. Skipping alias creation."
|
||||||
"Skipping alias creation."
|
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ class CliLayer(str, Enum):
|
|||||||
"""
|
"""
|
||||||
CLI layer precedence (lower number = stronger layer).
|
CLI layer precedence (lower number = stronger layer).
|
||||||
"""
|
"""
|
||||||
|
|
||||||
OS_PACKAGES = "os-packages"
|
OS_PACKAGES = "os-packages"
|
||||||
NIX = "nix"
|
NIX = "nix"
|
||||||
PYTHON = "python"
|
PYTHON = "python"
|
||||||
|
|||||||
@@ -34,11 +34,7 @@ def _nix_binary_candidates(home: str, names: List[str]) -> List[str]:
|
|||||||
"""
|
"""
|
||||||
Build possible Nix profile binary paths for a list of candidate names.
|
Build possible Nix profile binary paths for a list of candidate names.
|
||||||
"""
|
"""
|
||||||
return [
|
return [os.path.join(home, ".nix-profile", "bin", name) for name in names if name]
|
||||||
os.path.join(home, ".nix-profile", "bin", name)
|
|
||||||
for name in names
|
|
||||||
if name
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def _path_binary_candidates(names: List[str]) -> List[str]:
|
def _path_binary_candidates(names: List[str]) -> List[str]:
|
||||||
@@ -148,7 +144,8 @@ def resolve_command_for_repo(
|
|||||||
|
|
||||||
# c) Nix profile binaries
|
# c) Nix profile binaries
|
||||||
nix_binaries = [
|
nix_binaries = [
|
||||||
path for path in _nix_binary_candidates(home, candidate_names)
|
path
|
||||||
|
for path in _nix_binary_candidates(home, candidate_names)
|
||||||
if _is_executable(path)
|
if _is_executable(path)
|
||||||
]
|
]
|
||||||
nix_binary = nix_binaries[0] if nix_binaries else None
|
nix_binary = nix_binaries[0] if nix_binaries else None
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
# src/pkgmgr/core/config/load.py
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
@@ -7,31 +8,28 @@ Load and merge pkgmgr configuration.
|
|||||||
Layering rules:
|
Layering rules:
|
||||||
|
|
||||||
1. Defaults / category files:
|
1. Defaults / category files:
|
||||||
- Zuerst werden alle *.yml/*.yaml (außer config.yaml) im
|
- First load all *.yml/*.yaml (except config.yaml) from the user directory:
|
||||||
Benutzerverzeichnis geladen:
|
|
||||||
~/.config/pkgmgr/
|
~/.config/pkgmgr/
|
||||||
|
|
||||||
- Falls dort keine passenden Dateien existieren, wird auf die im
|
- If no matching files exist there, fall back to defaults shipped with pkgmgr:
|
||||||
Paket / Projekt mitgelieferten Config-Verzeichnisse zurückgegriffen:
|
|
||||||
|
|
||||||
<pkg_root>/config_defaults
|
|
||||||
<pkg_root>/config
|
<pkg_root>/config
|
||||||
<project_root>/config_defaults
|
|
||||||
<project_root>/config
|
|
||||||
|
|
||||||
Dabei werden ebenfalls alle *.yml/*.yaml als Layer geladen.
|
During development (src-layout), we optionally also check:
|
||||||
|
<repo_root>/config
|
||||||
|
|
||||||
- Der Dateiname ohne Endung (stem) wird als Kategorie-Name
|
All *.yml/*.yaml files are loaded as layers.
|
||||||
verwendet und in repo["category_files"] eingetragen.
|
|
||||||
|
- The filename stem is used as category name and stored in repo["category_files"].
|
||||||
|
|
||||||
2. User config:
|
2. User config:
|
||||||
- ~/.config/pkgmgr/config.yaml (oder der übergebene Pfad)
|
- ~/.config/pkgmgr/config.yaml (or the provided path)
|
||||||
wird geladen und PER LISTEN-MERGE über die Defaults gelegt:
|
is loaded and merged over defaults:
|
||||||
- directories: dict deep-merge
|
- directories: dict deep-merge
|
||||||
- repositories: per _merge_repo_lists (kein Löschen!)
|
- repositories: per _merge_repo_lists (no deletions!)
|
||||||
|
|
||||||
3. Ergebnis:
|
3. Result:
|
||||||
- Ein dict mit mindestens:
|
- A dict with at least:
|
||||||
config["directories"] (dict)
|
config["directories"] (dict)
|
||||||
config["repositories"] (list[dict])
|
config["repositories"] (list[dict])
|
||||||
"""
|
"""
|
||||||
@@ -40,7 +38,7 @@ from __future__ import annotations
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, Dict, List, Tuple, Optional
|
from typing import Any, Dict, List, Optional, Tuple
|
||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
@@ -48,9 +46,10 @@ Repo = Dict[str, Any]
|
|||||||
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
# Hilfsfunktionen
|
# Helper functions
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
def _deep_merge(base: Dict[str, Any], override: Dict[str, Any]) -> Dict[str, Any]:
|
def _deep_merge(base: Dict[str, Any], override: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
"""
|
"""
|
||||||
Recursively merge two dictionaries.
|
Recursively merge two dictionaries.
|
||||||
@@ -58,11 +57,7 @@ def _deep_merge(base: Dict[str, Any], override: Dict[str, Any]) -> Dict[str, Any
|
|||||||
Values from `override` win over values in `base`.
|
Values from `override` win over values in `base`.
|
||||||
"""
|
"""
|
||||||
for key, value in override.items():
|
for key, value in override.items():
|
||||||
if (
|
if key in base and isinstance(base[key], dict) and isinstance(value, dict):
|
||||||
key in base
|
|
||||||
and isinstance(base[key], dict)
|
|
||||||
and isinstance(value, dict)
|
|
||||||
):
|
|
||||||
_deep_merge(base[key], value)
|
_deep_merge(base[key], value)
|
||||||
else:
|
else:
|
||||||
base[key] = value
|
base[key] = value
|
||||||
@@ -88,19 +83,16 @@ def _merge_repo_lists(
|
|||||||
"""
|
"""
|
||||||
Merge two repository lists, matching by (provider, account, repository).
|
Merge two repository lists, matching by (provider, account, repository).
|
||||||
|
|
||||||
- Wenn ein Repo aus new_list noch nicht existiert, wird es hinzugefügt.
|
- If a repo from new_list does not exist, it is added.
|
||||||
- Wenn es existiert, werden seine Felder per Deep-Merge überschrieben.
|
- If it exists, its fields are deep-merged (override wins).
|
||||||
- Wenn category_name gesetzt ist, wird dieser in
|
- If category_name is set, it is appended to repo["category_files"].
|
||||||
repo["category_files"] eingetragen.
|
|
||||||
"""
|
"""
|
||||||
index: Dict[Tuple[str, str, str], Repo] = {
|
index: Dict[Tuple[str, str, str], Repo] = {_repo_key(r): r for r in base_list}
|
||||||
_repo_key(r): r for r in base_list
|
|
||||||
}
|
|
||||||
|
|
||||||
for src in new_list:
|
for src in new_list:
|
||||||
key = _repo_key(src)
|
key = _repo_key(src)
|
||||||
if key == ("", "", ""):
|
if key == ("", "", ""):
|
||||||
# Unvollständiger Schlüssel -> einfach anhängen
|
# Incomplete key -> append as-is
|
||||||
dst = dict(src)
|
dst = dict(src)
|
||||||
if category_name:
|
if category_name:
|
||||||
dst.setdefault("category_files", [])
|
dst.setdefault("category_files", [])
|
||||||
@@ -148,10 +140,9 @@ def _load_layer_dir(
|
|||||||
"""
|
"""
|
||||||
Load all *.yml/*.yaml from a directory as layered defaults.
|
Load all *.yml/*.yaml from a directory as layered defaults.
|
||||||
|
|
||||||
- skip_filename: Dateiname (z.B. "config.yaml"), der ignoriert
|
- skip_filename: filename (e.g. "config.yaml") to ignore.
|
||||||
werden soll (z.B. User-Config).
|
|
||||||
|
|
||||||
Rückgabe:
|
Returns:
|
||||||
{
|
{
|
||||||
"directories": {...},
|
"directories": {...},
|
||||||
"repositories": [...],
|
"repositories": [...],
|
||||||
@@ -176,7 +167,7 @@ def _load_layer_dir(
|
|||||||
|
|
||||||
for path in yaml_files:
|
for path in yaml_files:
|
||||||
data = _load_yaml_file(path)
|
data = _load_yaml_file(path)
|
||||||
category_name = path.stem # Dateiname ohne .yml/.yaml
|
category_name = path.stem
|
||||||
|
|
||||||
dirs = data.get("directories")
|
dirs = data.get("directories")
|
||||||
if isinstance(dirs, dict):
|
if isinstance(dirs, dict):
|
||||||
@@ -197,8 +188,11 @@ def _load_layer_dir(
|
|||||||
|
|
||||||
def _load_defaults_from_package_or_project() -> Dict[str, Any]:
|
def _load_defaults_from_package_or_project() -> Dict[str, Any]:
|
||||||
"""
|
"""
|
||||||
Fallback: load default configs from various possible install or development
|
Fallback: load default configs from possible install or dev layouts.
|
||||||
layouts (pip-installed, editable install, source repo with src/ layout).
|
|
||||||
|
Supported locations:
|
||||||
|
- <pkg_root>/config (installed wheel / editable)
|
||||||
|
- <repo_root>/config (optional dev fallback when pkg_root is src/pkgmgr)
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
import pkgmgr # type: ignore
|
import pkgmgr # type: ignore
|
||||||
@@ -206,25 +200,16 @@ def _load_defaults_from_package_or_project() -> Dict[str, Any]:
|
|||||||
return {"directories": {}, "repositories": []}
|
return {"directories": {}, "repositories": []}
|
||||||
|
|
||||||
pkg_root = Path(pkgmgr.__file__).resolve().parent
|
pkg_root = Path(pkgmgr.__file__).resolve().parent
|
||||||
roots = set()
|
candidates: List[Path] = []
|
||||||
|
|
||||||
# Case 1: installed package (site-packages/pkgmgr)
|
# Always prefer package-internal config dir
|
||||||
roots.add(pkg_root)
|
candidates.append(pkg_root / "config")
|
||||||
|
|
||||||
# Case 2: parent directory (site-packages/, src/)
|
# Dev fallback: repo_root/src/pkgmgr -> repo_root/config
|
||||||
roots.add(pkg_root.parent)
|
|
||||||
|
|
||||||
# Case 3: src-layout during development:
|
|
||||||
# repo_root/src/pkgmgr -> repo_root
|
|
||||||
parent = pkg_root.parent
|
parent = pkg_root.parent
|
||||||
if parent.name == "src":
|
if parent.name == "src":
|
||||||
roots.add(parent.parent)
|
repo_root = parent.parent
|
||||||
|
candidates.append(repo_root / "config")
|
||||||
# Candidate config dirs
|
|
||||||
candidates = []
|
|
||||||
for root in roots:
|
|
||||||
candidates.append(root / "config_defaults")
|
|
||||||
candidates.append(root / "config")
|
|
||||||
|
|
||||||
for cand in candidates:
|
for cand in candidates:
|
||||||
defaults = _load_layer_dir(cand, skip_filename=None)
|
defaults = _load_layer_dir(cand, skip_filename=None)
|
||||||
@@ -233,66 +218,68 @@ def _load_defaults_from_package_or_project() -> Dict[str, Any]:
|
|||||||
|
|
||||||
return {"directories": {}, "repositories": []}
|
return {"directories": {}, "repositories": []}
|
||||||
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
# Hauptfunktion
|
# Public API
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
def load_config(user_config_path: str) -> Dict[str, Any]:
|
def load_config(user_config_path: str) -> Dict[str, Any]:
|
||||||
"""
|
"""
|
||||||
Load and merge configuration for pkgmgr.
|
Load and merge configuration for pkgmgr.
|
||||||
|
|
||||||
Schritte:
|
Steps:
|
||||||
1. Ermittle ~/.config/pkgmgr/ (oder das Verzeichnis von user_config_path).
|
1. Determine ~/.config/pkgmgr/ (or dir of user_config_path).
|
||||||
2. Lade alle *.yml/*.yaml dort (außer der User-Config selbst) als
|
2. Load all *.yml/*.yaml in that dir (except the user config file) as defaults.
|
||||||
Defaults / Kategorie-Layer.
|
3. If nothing found, fall back to package defaults.
|
||||||
3. Wenn dort nichts gefunden wurde, Fallback auf Paket/Projekt.
|
4. Load the user config file (if present).
|
||||||
4. Lade die User-Config-Datei selbst (falls vorhanden).
|
|
||||||
5. Merge:
|
5. Merge:
|
||||||
- directories: deep-merge (Defaults <- User)
|
- directories: deep-merge (defaults <- user)
|
||||||
- repositories: _merge_repo_lists (Defaults <- User)
|
- repositories: _merge_repo_lists (defaults <- user)
|
||||||
"""
|
"""
|
||||||
user_config_path_expanded = os.path.expanduser(user_config_path)
|
user_config_path_expanded = os.path.expanduser(user_config_path)
|
||||||
user_cfg_path = Path(user_config_path_expanded)
|
user_cfg_path = Path(user_config_path_expanded)
|
||||||
|
|
||||||
config_dir = user_cfg_path.parent
|
config_dir = user_cfg_path.parent
|
||||||
if not str(config_dir):
|
if not str(config_dir):
|
||||||
# Fallback, falls jemand nur "config.yaml" übergibt
|
|
||||||
config_dir = Path(os.path.expanduser("~/.config/pkgmgr"))
|
config_dir = Path(os.path.expanduser("~/.config/pkgmgr"))
|
||||||
config_dir.mkdir(parents=True, exist_ok=True)
|
config_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
user_cfg_name = user_cfg_path.name
|
user_cfg_name = user_cfg_path.name
|
||||||
|
|
||||||
# 1+2) Defaults / Kategorie-Layer aus dem User-Verzeichnis
|
# 1+2) Defaults from user directory
|
||||||
defaults = _load_layer_dir(config_dir, skip_filename=user_cfg_name)
|
defaults = _load_layer_dir(config_dir, skip_filename=user_cfg_name)
|
||||||
|
|
||||||
# 3) Falls dort nichts gefunden wurde, Fallback auf Paket/Projekt
|
# 3) Fallback to package defaults
|
||||||
if not defaults["directories"] and not defaults["repositories"]:
|
if not defaults["directories"] and not defaults["repositories"]:
|
||||||
defaults = _load_defaults_from_package_or_project()
|
defaults = _load_defaults_from_package_or_project()
|
||||||
|
|
||||||
defaults.setdefault("directories", {})
|
defaults.setdefault("directories", {})
|
||||||
defaults.setdefault("repositories", [])
|
defaults.setdefault("repositories", [])
|
||||||
|
|
||||||
# 4) User-Config
|
# 4) User config
|
||||||
user_cfg: Dict[str, Any] = {}
|
user_cfg: Dict[str, Any] = {}
|
||||||
if user_cfg_path.is_file():
|
if user_cfg_path.is_file():
|
||||||
user_cfg = _load_yaml_file(user_cfg_path)
|
user_cfg = _load_yaml_file(user_cfg_path)
|
||||||
user_cfg.setdefault("directories", {})
|
user_cfg.setdefault("directories", {})
|
||||||
user_cfg.setdefault("repositories", [])
|
user_cfg.setdefault("repositories", [])
|
||||||
|
|
||||||
# 5) Merge: directories deep-merge, repositories listen-merge
|
# 5) Merge
|
||||||
merged: Dict[str, Any] = {}
|
merged: Dict[str, Any] = {}
|
||||||
|
|
||||||
# directories
|
|
||||||
merged["directories"] = {}
|
merged["directories"] = {}
|
||||||
_deep_merge(merged["directories"], defaults["directories"])
|
_deep_merge(merged["directories"], defaults["directories"])
|
||||||
_deep_merge(merged["directories"], user_cfg["directories"])
|
_deep_merge(merged["directories"], user_cfg["directories"])
|
||||||
|
|
||||||
# repositories
|
|
||||||
merged["repositories"] = []
|
merged["repositories"] = []
|
||||||
_merge_repo_lists(merged["repositories"], defaults["repositories"], category_name=None)
|
_merge_repo_lists(
|
||||||
_merge_repo_lists(merged["repositories"], user_cfg["repositories"], category_name=None)
|
merged["repositories"], defaults["repositories"], category_name=None
|
||||||
|
)
|
||||||
|
_merge_repo_lists(
|
||||||
|
merged["repositories"], user_cfg["repositories"], category_name=None
|
||||||
|
)
|
||||||
|
|
||||||
# andere Top-Level-Keys (falls vorhanden)
|
# Merge other top-level keys
|
||||||
other_keys = (set(defaults.keys()) | set(user_cfg.keys())) - {
|
other_keys = (set(defaults.keys()) | set(user_cfg.keys())) - {
|
||||||
"directories",
|
"directories",
|
||||||
"repositories",
|
"repositories",
|
||||||
|
|||||||
@@ -1,9 +1,10 @@
|
|||||||
import yaml
|
import yaml
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
|
||||||
def save_user_config(user_config, USER_CONFIG_PATH: str):
|
def save_user_config(user_config, USER_CONFIG_PATH: str):
|
||||||
"""Save the user configuration to USER_CONFIG_PATH."""
|
"""Save the user configuration to USER_CONFIG_PATH."""
|
||||||
os.makedirs(os.path.dirname(USER_CONFIG_PATH), exist_ok=True)
|
os.makedirs(os.path.dirname(USER_CONFIG_PATH), exist_ok=True)
|
||||||
with open(USER_CONFIG_PATH, 'w') as f:
|
with open(USER_CONFIG_PATH, "w") as f:
|
||||||
yaml.dump(user_config, f)
|
yaml.dump(user_config, f)
|
||||||
print(f"User configuration updated in {USER_CONFIG_PATH}.")
|
print(f"User configuration updated in {USER_CONFIG_PATH}.")
|
||||||
@@ -16,7 +16,9 @@ class EnvTokenProvider:
|
|||||||
source_name: str = "env"
|
source_name: str = "env"
|
||||||
|
|
||||||
def get(self, request: TokenRequest) -> Optional[TokenResult]:
|
def get(self, request: TokenRequest) -> Optional[TokenResult]:
|
||||||
for key in env_var_candidates(request.provider_kind, request.host, request.owner):
|
for key in env_var_candidates(
|
||||||
|
request.provider_kind, request.host, request.owner
|
||||||
|
):
|
||||||
val = os.environ.get(key)
|
val = os.environ.get(key)
|
||||||
if val:
|
if val:
|
||||||
return TokenResult(token=val.strip(), source=self.source_name)
|
return TokenResult(token=val.strip(), source=self.source_name)
|
||||||
|
|||||||
@@ -15,6 +15,7 @@ class GhTokenProvider:
|
|||||||
|
|
||||||
This does NOT persist anything; it only reads what `gh` already knows.
|
This does NOT persist anything; it only reads what `gh` already knows.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
source_name: str = "gh"
|
source_name: str = "gh"
|
||||||
|
|
||||||
def get(self, request: TokenRequest) -> Optional[TokenResult]:
|
def get(self, request: TokenRequest) -> Optional[TokenResult]:
|
||||||
|
|||||||
@@ -21,9 +21,7 @@ def _import_keyring():
|
|||||||
try:
|
try:
|
||||||
import keyring # type: ignore
|
import keyring # type: ignore
|
||||||
except Exception as exc: # noqa: BLE001
|
except Exception as exc: # noqa: BLE001
|
||||||
raise KeyringUnavailableError(
|
raise KeyringUnavailableError("python-keyring is not installed.") from exc
|
||||||
"python-keyring is not installed."
|
|
||||||
) from exc
|
|
||||||
|
|
||||||
# Some environments have keyring installed but no usable backend.
|
# Some environments have keyring installed but no usable backend.
|
||||||
# We do a lightweight "backend sanity check" by attempting to read the backend.
|
# We do a lightweight "backend sanity check" by attempting to read the backend.
|
||||||
|
|||||||
@@ -9,7 +9,12 @@ from .providers.env import EnvTokenProvider
|
|||||||
from .providers.gh import GhTokenProvider
|
from .providers.gh import GhTokenProvider
|
||||||
from .providers.keyring import KeyringTokenProvider
|
from .providers.keyring import KeyringTokenProvider
|
||||||
from .providers.prompt import PromptTokenProvider
|
from .providers.prompt import PromptTokenProvider
|
||||||
from .types import KeyringUnavailableError, NoCredentialsError, TokenRequest, TokenResult
|
from .types import (
|
||||||
|
KeyringUnavailableError,
|
||||||
|
NoCredentialsError,
|
||||||
|
TokenRequest,
|
||||||
|
TokenResult,
|
||||||
|
)
|
||||||
from .validate import validate_token
|
from .validate import validate_token
|
||||||
|
|
||||||
|
|
||||||
@@ -55,7 +60,10 @@ class TokenResolver:
|
|||||||
print(f" {msg}", file=sys.stderr)
|
print(f" {msg}", file=sys.stderr)
|
||||||
print(" Tokens will NOT be persisted securely.", file=sys.stderr)
|
print(" Tokens will NOT be persisted securely.", file=sys.stderr)
|
||||||
print("", file=sys.stderr)
|
print("", file=sys.stderr)
|
||||||
print(" To enable secure token storage, install python-keyring:", file=sys.stderr)
|
print(
|
||||||
|
" To enable secure token storage, install python-keyring:",
|
||||||
|
file=sys.stderr,
|
||||||
|
)
|
||||||
print(" pip install keyring", file=sys.stderr)
|
print(" pip install keyring", file=sys.stderr)
|
||||||
print("", file=sys.stderr)
|
print("", file=sys.stderr)
|
||||||
print(" Or install via system packages:", file=sys.stderr)
|
print(" Or install via system packages:", file=sys.stderr)
|
||||||
|
|||||||
@@ -13,7 +13,9 @@ class KeyringKey:
|
|||||||
username: str
|
username: str
|
||||||
|
|
||||||
|
|
||||||
def build_keyring_key(provider_kind: str, host: str, owner: Optional[str]) -> KeyringKey:
|
def build_keyring_key(
|
||||||
|
provider_kind: str, host: str, owner: Optional[str]
|
||||||
|
) -> KeyringKey:
|
||||||
"""Build a stable keyring key.
|
"""Build a stable keyring key.
|
||||||
|
|
||||||
- service: "pkgmgr:<provider>"
|
- service: "pkgmgr:<provider>"
|
||||||
@@ -21,11 +23,15 @@ def build_keyring_key(provider_kind: str, host: str, owner: Optional[str]) -> Ke
|
|||||||
"""
|
"""
|
||||||
provider_kind = str(provider_kind).strip().lower()
|
provider_kind = str(provider_kind).strip().lower()
|
||||||
host = str(host).strip()
|
host = str(host).strip()
|
||||||
owner_part = (str(owner).strip() if owner else "-")
|
owner_part = str(owner).strip() if owner else "-"
|
||||||
return KeyringKey(service=f"pkgmgr:{provider_kind}", username=f"{host}|{owner_part}")
|
return KeyringKey(
|
||||||
|
service=f"pkgmgr:{provider_kind}", username=f"{host}|{owner_part}"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def env_var_candidates(provider_kind: str, host: str, owner: Optional[str]) -> list[str]:
|
def env_var_candidates(
|
||||||
|
provider_kind: str, host: str, owner: Optional[str]
|
||||||
|
) -> list[str]:
|
||||||
"""Return a list of environment variable names to try.
|
"""Return a list of environment variable names to try.
|
||||||
|
|
||||||
Order is from most specific to most generic.
|
Order is from most specific to most generic.
|
||||||
|
|||||||
@@ -18,4 +18,6 @@ def add_all(*, cwd: str = ".", preview: bool = False) -> None:
|
|||||||
try:
|
try:
|
||||||
run(["add", "-A"], cwd=cwd, preview=preview)
|
run(["add", "-A"], cwd=cwd, preview=preview)
|
||||||
except GitRunError as exc:
|
except GitRunError as exc:
|
||||||
raise GitAddAllError("Failed to stage all changes with `git add -A`.", cwd=cwd) from exc
|
raise GitAddAllError(
|
||||||
|
"Failed to stage all changes with `git add -A`.", cwd=cwd
|
||||||
|
) from exc
|
||||||
|
|||||||
@@ -18,4 +18,6 @@ def branch_move(branch: str, *, cwd: str = ".", preview: bool = False) -> None:
|
|||||||
try:
|
try:
|
||||||
run(["branch", "-M", branch], cwd=cwd, preview=preview)
|
run(["branch", "-M", branch], cwd=cwd, preview=preview)
|
||||||
except GitRunError as exc:
|
except GitRunError as exc:
|
||||||
raise GitBranchMoveError(f"Failed to move/rename current branch to {branch!r}.", cwd=cwd) from exc
|
raise GitBranchMoveError(
|
||||||
|
f"Failed to move/rename current branch to {branch!r}.", cwd=cwd
|
||||||
|
) from exc
|
||||||
|
|||||||
@@ -4,21 +4,26 @@ from __future__ import annotations
|
|||||||
class GitBaseError(RuntimeError):
|
class GitBaseError(RuntimeError):
|
||||||
"""Base error raised for Git related failures."""
|
"""Base error raised for Git related failures."""
|
||||||
|
|
||||||
|
|
||||||
class GitRunError(GitBaseError):
|
class GitRunError(GitBaseError):
|
||||||
"""Base error raised for Git related failures."""
|
"""Base error raised for Git related failures."""
|
||||||
|
|
||||||
|
|
||||||
class GitNotRepositoryError(GitBaseError):
|
class GitNotRepositoryError(GitBaseError):
|
||||||
"""Raised when the current working directory is not a git repository."""
|
"""Raised when the current working directory is not a git repository."""
|
||||||
|
|
||||||
|
|
||||||
class GitQueryError(GitRunError):
|
class GitQueryError(GitRunError):
|
||||||
"""Base class for read-only git query failures."""
|
"""Base class for read-only git query failures."""
|
||||||
|
|
||||||
|
|
||||||
class GitCommandError(GitRunError):
|
class GitCommandError(GitRunError):
|
||||||
"""
|
"""
|
||||||
Base class for state-changing git command failures.
|
Base class for state-changing git command failures.
|
||||||
|
|
||||||
Use subclasses to provide stable error types for callers.
|
Use subclasses to provide stable error types for callers.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, message: str, *, cwd: str = ".") -> None:
|
def __init__(self, message: str, *, cwd: str = ".") -> None:
|
||||||
super().__init__(message)
|
super().__init__(message)
|
||||||
if cwd in locals():
|
if cwd in locals():
|
||||||
|
|||||||
@@ -20,7 +20,10 @@ from .get_tags_at_ref import GitTagsAtRefQueryError, get_tags_at_ref
|
|||||||
from .get_upstream_ref import get_upstream_ref
|
from .get_upstream_ref import get_upstream_ref
|
||||||
from .list_remotes import list_remotes
|
from .list_remotes import list_remotes
|
||||||
from .list_tags import list_tags
|
from .list_tags import list_tags
|
||||||
from .probe_remote_reachable import probe_remote_reachable
|
from .probe_remote_reachable import (
|
||||||
|
probe_remote_reachable,
|
||||||
|
probe_remote_reachable_detail,
|
||||||
|
)
|
||||||
from .resolve_base_branch import GitBaseBranchNotFoundError, resolve_base_branch
|
from .resolve_base_branch import GitBaseBranchNotFoundError, resolve_base_branch
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
@@ -37,6 +40,7 @@ __all__ = [
|
|||||||
"list_remotes",
|
"list_remotes",
|
||||||
"get_remote_push_urls",
|
"get_remote_push_urls",
|
||||||
"probe_remote_reachable",
|
"probe_remote_reachable",
|
||||||
|
"probe_remote_reachable_detail",
|
||||||
"get_changelog",
|
"get_changelog",
|
||||||
"GitChangelogQueryError",
|
"GitChangelogQueryError",
|
||||||
"get_tags_at_ref",
|
"get_tags_at_ref",
|
||||||
|
|||||||
@@ -16,6 +16,7 @@ def _is_missing_key_error(exc: GitRunError) -> bool:
|
|||||||
# 'git config --get' returns exit code 1 when the key is not set.
|
# 'git config --get' returns exit code 1 when the key is not set.
|
||||||
return "exit code: 1" in msg
|
return "exit code: 1" in msg
|
||||||
|
|
||||||
|
|
||||||
def get_config_value(key: str, *, cwd: str = ".") -> Optional[str]:
|
def get_config_value(key: str, *, cwd: str = ".") -> Optional[str]:
|
||||||
"""
|
"""
|
||||||
Return a value from `git config --get <key>`, or None if not set.
|
Return a value from `git config --get <key>`, or None if not set.
|
||||||
|
|||||||
@@ -30,4 +30,4 @@ def get_remote_head_commit(
|
|||||||
) from exc
|
) from exc
|
||||||
|
|
||||||
# minimal parsing: first token is the hash
|
# minimal parsing: first token is the hash
|
||||||
return (out.split()[0].strip() if out else "")
|
return out.split()[0].strip() if out else ""
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ from typing import Set
|
|||||||
|
|
||||||
from ..run import run
|
from ..run import run
|
||||||
|
|
||||||
|
|
||||||
def get_remote_push_urls(remote: str, cwd: str = ".") -> Set[str]:
|
def get_remote_push_urls(remote: str, cwd: str = ".") -> Set[str]:
|
||||||
"""
|
"""
|
||||||
Return all push URLs configured for a remote.
|
Return all push URLs configured for a remote.
|
||||||
|
|||||||
@@ -1,21 +1,121 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Tuple
|
||||||
|
|
||||||
from ..errors import GitRunError
|
from ..errors import GitRunError
|
||||||
from ..run import run
|
from ..run import run
|
||||||
|
|
||||||
|
|
||||||
def probe_remote_reachable(url: str, cwd: str = ".") -> bool:
|
def _first_useful_line(text: str) -> str:
|
||||||
|
lines: list[str] = []
|
||||||
|
for line in (text or "").splitlines():
|
||||||
|
s = line.strip()
|
||||||
|
if s:
|
||||||
|
lines.append(s)
|
||||||
|
|
||||||
|
if not lines:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
preferred_keywords = (
|
||||||
|
"fatal:",
|
||||||
|
"permission denied",
|
||||||
|
"repository not found",
|
||||||
|
"could not read from remote repository",
|
||||||
|
"connection refused",
|
||||||
|
"connection timed out",
|
||||||
|
"no route to host",
|
||||||
|
"name or service not known",
|
||||||
|
"temporary failure in name resolution",
|
||||||
|
"host key verification failed",
|
||||||
|
"could not resolve hostname",
|
||||||
|
"authentication failed",
|
||||||
|
"publickey",
|
||||||
|
"the authenticity of host",
|
||||||
|
"known_hosts",
|
||||||
|
)
|
||||||
|
for s in lines:
|
||||||
|
low = s.lower()
|
||||||
|
if any(k in low for k in preferred_keywords):
|
||||||
|
return s
|
||||||
|
|
||||||
|
# Avoid returning a meaningless "error:" if possible
|
||||||
|
for s in lines:
|
||||||
|
if s.lower() not in ("error:", "error"):
|
||||||
|
return s
|
||||||
|
|
||||||
|
return lines[0]
|
||||||
|
|
||||||
|
|
||||||
|
def _looks_like_real_transport_error(text: str) -> bool:
|
||||||
"""
|
"""
|
||||||
Check whether a remote URL is reachable.
|
True if stderr/stdout contains strong indicators that the remote is NOT usable.
|
||||||
|
"""
|
||||||
|
low = (text or "").lower()
|
||||||
|
indicators = (
|
||||||
|
"repository not found",
|
||||||
|
"could not read from remote repository",
|
||||||
|
"permission denied",
|
||||||
|
"authentication failed",
|
||||||
|
"publickey",
|
||||||
|
"host key verification failed",
|
||||||
|
"could not resolve hostname",
|
||||||
|
"name or service not known",
|
||||||
|
"connection refused",
|
||||||
|
"connection timed out",
|
||||||
|
"no route to host",
|
||||||
|
)
|
||||||
|
return any(i in low for i in indicators)
|
||||||
|
|
||||||
Equivalent to:
|
|
||||||
git ls-remote --exit-code <url>
|
|
||||||
|
|
||||||
Returns:
|
def _format_reason(exc: GitRunError, *, url: str) -> str:
|
||||||
True if reachable, False otherwise.
|
stderr = getattr(exc, "stderr", "") or ""
|
||||||
|
stdout = getattr(exc, "stdout", "") or ""
|
||||||
|
rc = getattr(exc, "returncode", None)
|
||||||
|
|
||||||
|
reason = (
|
||||||
|
_first_useful_line(stderr)
|
||||||
|
or _first_useful_line(stdout)
|
||||||
|
or _first_useful_line(str(exc))
|
||||||
|
)
|
||||||
|
|
||||||
|
if rc is not None:
|
||||||
|
reason = f"(exit {rc}) {reason}".strip() if reason else f"(exit {rc})"
|
||||||
|
|
||||||
|
# If we still have nothing useful, provide a hint to debug SSH transport
|
||||||
|
if not reason or reason.lower() in ("(exit 2)", "(exit 128)"):
|
||||||
|
reason = (
|
||||||
|
f"{reason} | hint: run "
|
||||||
|
f"GIT_SSH_COMMAND='ssh -vvv' git ls-remote --exit-code {url!r}"
|
||||||
|
).strip()
|
||||||
|
|
||||||
|
return reason.strip()
|
||||||
|
|
||||||
|
|
||||||
|
def probe_remote_reachable_detail(url: str, cwd: str = ".") -> Tuple[bool, str]:
|
||||||
|
"""
|
||||||
|
Probe whether a remote URL is reachable.
|
||||||
|
|
||||||
|
Implementation detail:
|
||||||
|
- We run `git ls-remote --exit-code <url>`.
|
||||||
|
- Git may return exit code 2 when the remote is reachable but no refs exist
|
||||||
|
(e.g. an empty repository). We treat that as reachable.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
run(["ls-remote", "--exit-code", url], cwd=cwd)
|
run(["ls-remote", "--exit-code", url], cwd=cwd)
|
||||||
return True
|
return True, ""
|
||||||
except GitRunError:
|
except GitRunError as exc:
|
||||||
return False
|
rc = getattr(exc, "returncode", None)
|
||||||
|
stderr = getattr(exc, "stderr", "") or ""
|
||||||
|
stdout = getattr(exc, "stdout", "") or ""
|
||||||
|
|
||||||
|
# Important: `git ls-remote --exit-code` uses exit code 2 when no refs match.
|
||||||
|
# For a completely empty repo, this can happen even though auth/transport is OK.
|
||||||
|
if rc == 2 and not _looks_like_real_transport_error(stderr + "\n" + stdout):
|
||||||
|
return True, "remote reachable, but no refs found yet (empty repository)"
|
||||||
|
|
||||||
|
return False, _format_reason(exc, url=url)
|
||||||
|
|
||||||
|
|
||||||
|
def probe_remote_reachable(url: str, cwd: str = ".") -> bool:
|
||||||
|
ok, _ = probe_remote_reachable_detail(url, cwd=cwd)
|
||||||
|
return ok
|
||||||
|
|||||||
@@ -42,18 +42,34 @@ def run(
|
|||||||
)
|
)
|
||||||
except subprocess.CalledProcessError as exc:
|
except subprocess.CalledProcessError as exc:
|
||||||
stderr = exc.stderr or ""
|
stderr = exc.stderr or ""
|
||||||
if _is_not_repo_error(stderr):
|
stdout = exc.stdout or ""
|
||||||
raise GitNotRepositoryError(
|
|
||||||
f"Not a git repository: {cwd!r}\n"
|
|
||||||
f"Command: {cmd_str}\n"
|
|
||||||
f"STDERR:\n{stderr}"
|
|
||||||
) from exc
|
|
||||||
|
|
||||||
raise GitRunError(
|
if _is_not_repo_error(stderr):
|
||||||
|
err = GitNotRepositoryError(
|
||||||
|
f"Not a git repository: {cwd!r}\nCommand: {cmd_str}\nSTDERR:\n{stderr}"
|
||||||
|
)
|
||||||
|
# Attach details for callers who want to debug
|
||||||
|
err.cwd = cwd
|
||||||
|
err.cmd = cmd
|
||||||
|
err.cmd_str = cmd_str
|
||||||
|
err.returncode = exc.returncode
|
||||||
|
err.stdout = stdout
|
||||||
|
err.stderr = stderr
|
||||||
|
raise err from exc
|
||||||
|
|
||||||
|
err = GitRunError(
|
||||||
f"Git command failed in {cwd!r}: {cmd_str}\n"
|
f"Git command failed in {cwd!r}: {cmd_str}\n"
|
||||||
f"Exit code: {exc.returncode}\n"
|
f"Exit code: {exc.returncode}\n"
|
||||||
f"STDOUT:\n{exc.stdout}\n"
|
f"STDOUT:\n{stdout}\n"
|
||||||
f"STDERR:\n{stderr}"
|
f"STDERR:\n{stderr}"
|
||||||
) from exc
|
)
|
||||||
|
# Attach details for callers who want to debug
|
||||||
|
err.cwd = cwd
|
||||||
|
err.cmd = cmd
|
||||||
|
err.cmd_str = cmd_str
|
||||||
|
err.returncode = exc.returncode
|
||||||
|
err.stdout = stdout
|
||||||
|
err.stderr = stderr
|
||||||
|
raise err from exc
|
||||||
|
|
||||||
return result.stdout.strip()
|
return result.stdout.strip()
|
||||||
|
|||||||
@@ -1,12 +1,13 @@
|
|||||||
# src/pkgmgr/core/remote_provisioning/__init__.py
|
|
||||||
"""Remote repository provisioning (ensure remote repo exists)."""
|
"""Remote repository provisioning (ensure remote repo exists)."""
|
||||||
|
|
||||||
from .ensure import ensure_remote_repo
|
from .ensure import ensure_remote_repo
|
||||||
from .registry import ProviderRegistry
|
from .registry import ProviderRegistry
|
||||||
from .types import EnsureResult, ProviderHint, RepoSpec
|
from .types import EnsureResult, ProviderHint, RepoSpec
|
||||||
|
from .visibility import set_repo_visibility
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"ensure_remote_repo",
|
"ensure_remote_repo",
|
||||||
|
"set_repo_visibility",
|
||||||
"RepoSpec",
|
"RepoSpec",
|
||||||
"EnsureResult",
|
"EnsureResult",
|
||||||
"ProviderHint",
|
"ProviderHint",
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# src/pkgmgr/core/remote_provisioning/providers/base.py
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
@@ -23,7 +22,26 @@ class RemoteProvider(ABC):
|
|||||||
def create_repo(self, token: str, spec: RepoSpec) -> EnsureResult:
|
def create_repo(self, token: str, spec: RepoSpec) -> EnsureResult:
|
||||||
"""Create a repository (owner may be user or org)."""
|
"""Create a repository (owner may be user or org)."""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_repo_private(self, token: str, spec: RepoSpec) -> bool | None:
|
||||||
|
"""
|
||||||
|
Return current repo privacy, or None if repo not found / inaccessible.
|
||||||
|
|
||||||
|
IMPORTANT:
|
||||||
|
- Must NOT create repositories.
|
||||||
|
- Should return None on 404 (not found) or when the repo cannot be accessed.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def set_repo_private(self, token: str, spec: RepoSpec, *, private: bool) -> None:
|
||||||
|
"""
|
||||||
|
Update repo privacy (PATCH). Must NOT create repositories.
|
||||||
|
|
||||||
|
Implementations should raise HttpError on API failure.
|
||||||
|
"""
|
||||||
|
|
||||||
def ensure_repo(self, token: str, spec: RepoSpec) -> EnsureResult:
|
def ensure_repo(self, token: str, spec: RepoSpec) -> EnsureResult:
|
||||||
|
"""Ensure repository exists (create if missing)."""
|
||||||
if self.repo_exists(token, spec):
|
if self.repo_exists(token, spec):
|
||||||
return EnsureResult(status="exists", message="Repository exists.")
|
return EnsureResult(status="exists", message="Repository exists.")
|
||||||
return self.create_repo(token, spec)
|
return self.create_repo(token, spec)
|
||||||
|
|||||||
@@ -52,6 +52,39 @@ class GiteaProvider(RemoteProvider):
|
|||||||
return False
|
return False
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
def get_repo_private(self, token: str, spec: RepoSpec) -> bool | None:
|
||||||
|
base = self._api_base(spec.host)
|
||||||
|
url = f"{base}/api/v1/repos/{spec.owner}/{spec.name}"
|
||||||
|
try:
|
||||||
|
resp = self._http.request_json("GET", url, headers=self._headers(token))
|
||||||
|
except HttpError as exc:
|
||||||
|
if exc.status == 404:
|
||||||
|
return None
|
||||||
|
raise
|
||||||
|
|
||||||
|
if not (200 <= resp.status < 300):
|
||||||
|
return None
|
||||||
|
data = resp.json or {}
|
||||||
|
return bool(data.get("private", False))
|
||||||
|
|
||||||
|
def set_repo_private(self, token: str, spec: RepoSpec, *, private: bool) -> None:
|
||||||
|
base = self._api_base(spec.host)
|
||||||
|
url = f"{base}/api/v1/repos/{spec.owner}/{spec.name}"
|
||||||
|
payload: Dict[str, Any] = {"private": bool(private)}
|
||||||
|
|
||||||
|
resp = self._http.request_json(
|
||||||
|
"PATCH",
|
||||||
|
url,
|
||||||
|
headers=self._headers(token),
|
||||||
|
payload=payload,
|
||||||
|
)
|
||||||
|
if not (200 <= resp.status < 300):
|
||||||
|
raise HttpError(
|
||||||
|
status=resp.status,
|
||||||
|
message="Failed to update repository.",
|
||||||
|
body=resp.text,
|
||||||
|
)
|
||||||
|
|
||||||
def create_repo(self, token: str, spec: RepoSpec) -> EnsureResult:
|
def create_repo(self, token: str, spec: RepoSpec) -> EnsureResult:
|
||||||
base = self._api_base(spec.host)
|
base = self._api_base(spec.host)
|
||||||
|
|
||||||
|
|||||||
@@ -54,6 +54,39 @@ class GitHubProvider(RemoteProvider):
|
|||||||
return False
|
return False
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
def get_repo_private(self, token: str, spec: RepoSpec) -> bool | None:
|
||||||
|
api = self._api_base(spec.host)
|
||||||
|
url = f"{api}/repos/{spec.owner}/{spec.name}"
|
||||||
|
try:
|
||||||
|
resp = self._http.request_json("GET", url, headers=self._headers(token))
|
||||||
|
except HttpError as exc:
|
||||||
|
if exc.status == 404:
|
||||||
|
return None
|
||||||
|
raise
|
||||||
|
|
||||||
|
if not (200 <= resp.status < 300):
|
||||||
|
return None
|
||||||
|
data = resp.json or {}
|
||||||
|
return bool(data.get("private", False))
|
||||||
|
|
||||||
|
def set_repo_private(self, token: str, spec: RepoSpec, *, private: bool) -> None:
|
||||||
|
api = self._api_base(spec.host)
|
||||||
|
url = f"{api}/repos/{spec.owner}/{spec.name}"
|
||||||
|
payload: Dict[str, Any] = {"private": bool(private)}
|
||||||
|
|
||||||
|
resp = self._http.request_json(
|
||||||
|
"PATCH",
|
||||||
|
url,
|
||||||
|
headers=self._headers(token),
|
||||||
|
payload=payload,
|
||||||
|
)
|
||||||
|
if not (200 <= resp.status < 300):
|
||||||
|
raise HttpError(
|
||||||
|
status=resp.status,
|
||||||
|
message="Failed to update repository.",
|
||||||
|
body=resp.text,
|
||||||
|
)
|
||||||
|
|
||||||
def create_repo(self, token: str, spec: RepoSpec) -> EnsureResult:
|
def create_repo(self, token: str, spec: RepoSpec) -> EnsureResult:
|
||||||
api = self._api_base(spec.host)
|
api = self._api_base(spec.host)
|
||||||
|
|
||||||
|
|||||||
@@ -1,10 +1,17 @@
|
|||||||
# src/pkgmgr/core/remote_provisioning/types.py
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import Literal, Optional
|
from typing import Literal, Optional
|
||||||
|
|
||||||
EnsureStatus = Literal["exists", "created", "skipped", "failed"]
|
EnsureStatus = Literal[
|
||||||
|
"exists",
|
||||||
|
"created",
|
||||||
|
"updated",
|
||||||
|
"noop",
|
||||||
|
"notfound",
|
||||||
|
"skipped",
|
||||||
|
"failed",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True)
|
@dataclass(frozen=True)
|
||||||
|
|||||||
118
src/pkgmgr/core/remote_provisioning/visibility.py
Normal file
118
src/pkgmgr/core/remote_provisioning/visibility.py
Normal file
@@ -0,0 +1,118 @@
|
|||||||
|
# src/pkgmgr/core/remote_provisioning/visibility.py
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from pkgmgr.core.credentials.resolver import ResolutionOptions, TokenResolver
|
||||||
|
|
||||||
|
from .http.errors import HttpError
|
||||||
|
from .registry import ProviderRegistry
|
||||||
|
from .types import (
|
||||||
|
AuthError,
|
||||||
|
EnsureResult,
|
||||||
|
NetworkError,
|
||||||
|
PermissionError,
|
||||||
|
ProviderHint,
|
||||||
|
RepoSpec,
|
||||||
|
UnsupportedProviderError,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class VisibilityOptions:
|
||||||
|
"""Options controlling remote visibility updates."""
|
||||||
|
|
||||||
|
preview: bool = False
|
||||||
|
interactive: bool = True
|
||||||
|
allow_prompt: bool = True
|
||||||
|
save_prompt_token_to_keyring: bool = True
|
||||||
|
|
||||||
|
|
||||||
|
def _raise_mapped_http_error(exc: HttpError, host: str) -> None:
|
||||||
|
"""Map HttpError into domain-specific error types."""
|
||||||
|
if exc.status == 0:
|
||||||
|
raise NetworkError(f"Network error while talking to {host}: {exc}") from exc
|
||||||
|
if exc.status == 401:
|
||||||
|
raise AuthError(f"Authentication failed for {host} (401).") from exc
|
||||||
|
if exc.status == 403:
|
||||||
|
raise PermissionError(f"Permission denied for {host} (403).") from exc
|
||||||
|
|
||||||
|
raise NetworkError(
|
||||||
|
f"HTTP error from {host}: status={exc.status}, message={exc}, body={exc.body}"
|
||||||
|
) from exc
|
||||||
|
|
||||||
|
|
||||||
|
def set_repo_visibility(
|
||||||
|
spec: RepoSpec,
|
||||||
|
*,
|
||||||
|
private: bool,
|
||||||
|
provider_hint: Optional[ProviderHint] = None,
|
||||||
|
options: Optional[VisibilityOptions] = None,
|
||||||
|
registry: Optional[ProviderRegistry] = None,
|
||||||
|
token_resolver: Optional[TokenResolver] = None,
|
||||||
|
) -> EnsureResult:
|
||||||
|
"""
|
||||||
|
Set repository visibility (public/private) WITHOUT creating repositories.
|
||||||
|
|
||||||
|
Behavior:
|
||||||
|
- If repo does not exist -> status=notfound
|
||||||
|
- If already desired -> status=noop
|
||||||
|
- If changed -> status=updated
|
||||||
|
- Respects preview mode -> status=skipped
|
||||||
|
- Maps HTTP errors to domain-specific errors
|
||||||
|
"""
|
||||||
|
opts = options or VisibilityOptions()
|
||||||
|
reg = registry or ProviderRegistry.default()
|
||||||
|
resolver = token_resolver or TokenResolver()
|
||||||
|
|
||||||
|
provider = reg.resolve(spec.host)
|
||||||
|
if provider_hint and provider_hint.kind:
|
||||||
|
forced = provider_hint.kind.strip().lower()
|
||||||
|
forced_provider = next(
|
||||||
|
(p for p in reg.providers if getattr(p, "kind", "").lower() == forced),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
if forced_provider is not None:
|
||||||
|
provider = forced_provider
|
||||||
|
|
||||||
|
if provider is None:
|
||||||
|
raise UnsupportedProviderError(f"No provider matched host: {spec.host}")
|
||||||
|
|
||||||
|
token_opts = ResolutionOptions(
|
||||||
|
interactive=opts.interactive,
|
||||||
|
allow_prompt=opts.allow_prompt,
|
||||||
|
save_prompt_token_to_keyring=opts.save_prompt_token_to_keyring,
|
||||||
|
)
|
||||||
|
token = resolver.get_token(
|
||||||
|
provider_kind=getattr(provider, "kind", "unknown"),
|
||||||
|
host=spec.host,
|
||||||
|
owner=spec.owner,
|
||||||
|
options=token_opts,
|
||||||
|
)
|
||||||
|
|
||||||
|
if opts.preview:
|
||||||
|
return EnsureResult(
|
||||||
|
status="skipped",
|
||||||
|
message="Preview mode: no remote changes performed.",
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
current_private = provider.get_repo_private(token.token, spec)
|
||||||
|
if current_private is None:
|
||||||
|
return EnsureResult(status="notfound", message="Repository not found.")
|
||||||
|
|
||||||
|
if bool(current_private) == bool(private):
|
||||||
|
return EnsureResult(
|
||||||
|
status="noop",
|
||||||
|
message=f"Repository already {'private' if private else 'public'}.",
|
||||||
|
)
|
||||||
|
|
||||||
|
provider.set_repo_private(token.token, spec, private=private)
|
||||||
|
return EnsureResult(
|
||||||
|
status="updated",
|
||||||
|
message=f"Visibility updated to {'private' if private else 'public'}.",
|
||||||
|
)
|
||||||
|
except HttpError as exc:
|
||||||
|
_raise_mapped_http_error(exc, host=spec.host)
|
||||||
|
return EnsureResult(status="failed", message="Unreachable error mapping.")
|
||||||
@@ -34,7 +34,15 @@ def get_repo_dir(repositories_base_dir: str, repo: Dict[str, Any]) -> str:
|
|||||||
account = repo.get("account")
|
account = repo.get("account")
|
||||||
repository = repo.get("repository")
|
repository = repo.get("repository")
|
||||||
|
|
||||||
missing = [k for k, v in [("provider", provider), ("account", account), ("repository", repository)] if not v]
|
missing = [
|
||||||
|
k
|
||||||
|
for k, v in [
|
||||||
|
("provider", provider),
|
||||||
|
("account", account),
|
||||||
|
("repository", repository),
|
||||||
|
]
|
||||||
|
if not v
|
||||||
|
]
|
||||||
if missing:
|
if missing:
|
||||||
print(
|
print(
|
||||||
"Error: repository entry is missing required keys.\n"
|
"Error: repository entry is missing required keys.\n"
|
||||||
|
|||||||
@@ -9,4 +9,4 @@ def get_repo_identifier(repo, all_repos):
|
|||||||
if count == 1:
|
if count == 1:
|
||||||
return repo_name
|
return repo_name
|
||||||
else:
|
else:
|
||||||
return f'{repo.get("provider")}/{repo.get("account")}/{repo.get("repository")}'
|
return f"{repo.get('provider')}/{repo.get('account')}/{repo.get('repository')}"
|
||||||
|
|||||||
@@ -109,7 +109,9 @@ def resolve_repo_paths(repo_dir: str) -> RepoPaths:
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
if rpm_spec is None:
|
if rpm_spec is None:
|
||||||
rpm_spec = _find_first_spec_in_dir(os.path.join(repo_dir, "packaging", "fedora"))
|
rpm_spec = _find_first_spec_in_dir(
|
||||||
|
os.path.join(repo_dir, "packaging", "fedora")
|
||||||
|
)
|
||||||
if rpm_spec is None:
|
if rpm_spec is None:
|
||||||
rpm_spec = _find_first_spec_in_dir(repo_dir)
|
rpm_spec = _find_first_spec_in_dir(repo_dir)
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
|
|
||||||
def resolve_repos(identifiers: [], all_repos: []):
|
def resolve_repos(identifiers: [], all_repos: []):
|
||||||
"""
|
"""
|
||||||
Given a list of identifier strings, return a list of repository configs.
|
Given a list of identifier strings, return a list of repository configs.
|
||||||
@@ -11,7 +10,9 @@ def resolve_repos(identifiers:[], all_repos:[]):
|
|||||||
for ident in identifiers:
|
for ident in identifiers:
|
||||||
matches = []
|
matches = []
|
||||||
for repo in all_repos:
|
for repo in all_repos:
|
||||||
full_id = f'{repo.get("provider")}/{repo.get("account")}/{repo.get("repository")}'
|
full_id = (
|
||||||
|
f"{repo.get('provider')}/{repo.get('account')}/{repo.get('repository')}"
|
||||||
|
)
|
||||||
if ident == full_id:
|
if ident == full_id:
|
||||||
matches.append(repo)
|
matches.append(repo)
|
||||||
elif ident == repo.get("alias"):
|
elif ident == repo.get("alias"):
|
||||||
|
|||||||
@@ -66,18 +66,26 @@ def verify_repository(repo, repo_dir, mode="local", no_verification=False):
|
|||||||
if expected_commit:
|
if expected_commit:
|
||||||
if not commit_hash:
|
if not commit_hash:
|
||||||
commit_check_passed = False
|
commit_check_passed = False
|
||||||
error_details.append(f"Expected commit: {expected_commit}, but could not determine current commit.")
|
error_details.append(
|
||||||
|
f"Expected commit: {expected_commit}, but could not determine current commit."
|
||||||
|
)
|
||||||
elif commit_hash != expected_commit:
|
elif commit_hash != expected_commit:
|
||||||
commit_check_passed = False
|
commit_check_passed = False
|
||||||
error_details.append(f"Expected commit: {expected_commit}, found: {commit_hash}")
|
error_details.append(
|
||||||
|
f"Expected commit: {expected_commit}, found: {commit_hash}"
|
||||||
|
)
|
||||||
|
|
||||||
if expected_gpg_keys:
|
if expected_gpg_keys:
|
||||||
if not signing_key:
|
if not signing_key:
|
||||||
gpg_check_passed = False
|
gpg_check_passed = False
|
||||||
error_details.append(f"Expected one of GPG keys: {expected_gpg_keys}, but no signing key was found.")
|
error_details.append(
|
||||||
|
f"Expected one of GPG keys: {expected_gpg_keys}, but no signing key was found."
|
||||||
|
)
|
||||||
elif signing_key not in expected_gpg_keys:
|
elif signing_key not in expected_gpg_keys:
|
||||||
gpg_check_passed = False
|
gpg_check_passed = False
|
||||||
error_details.append(f"Expected one of GPG keys: {expected_gpg_keys}, found: {signing_key}")
|
error_details.append(
|
||||||
|
f"Expected one of GPG keys: {expected_gpg_keys}, found: {signing_key}"
|
||||||
|
)
|
||||||
|
|
||||||
if expected_commit and expected_gpg_keys:
|
if expected_commit and expected_gpg_keys:
|
||||||
verified_ok = commit_check_passed and gpg_check_passed
|
verified_ok = commit_check_passed and gpg_check_passed
|
||||||
|
|||||||
@@ -13,6 +13,7 @@ class InstalledVersion:
|
|||||||
"""
|
"""
|
||||||
Represents a resolved installed version and the matched name.
|
Represents a resolved installed version and the matched name.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
name: str
|
name: str
|
||||||
version: str
|
version: str
|
||||||
|
|
||||||
|
|||||||
@@ -43,10 +43,14 @@ class SemVer:
|
|||||||
minor = int(parts[1])
|
minor = int(parts[1])
|
||||||
patch = int(parts[2])
|
patch = int(parts[2])
|
||||||
except ValueError as exc:
|
except ValueError as exc:
|
||||||
raise ValueError(f"Semantic version components must be integers: {value!r}") from exc
|
raise ValueError(
|
||||||
|
f"Semantic version components must be integers: {value!r}"
|
||||||
|
) from exc
|
||||||
|
|
||||||
if major < 0 or minor < 0 or patch < 0:
|
if major < 0 or minor < 0 or patch < 0:
|
||||||
raise ValueError(f"Semantic version components must be non-negative: {value!r}")
|
raise ValueError(
|
||||||
|
f"Semantic version components must be non-negative: {value!r}"
|
||||||
|
)
|
||||||
|
|
||||||
return cls(major=major, minor=minor, patch=patch)
|
return cls(major=major, minor=minor, patch=patch)
|
||||||
|
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user