Restructure repo layout, wiring src/ and packaging for local and distro builds
- Add dev runner main.py that prefers local src/ over installed pkgmgr - Move Arch/Debian/Fedora packaging files under packaging/* and update build scripts - Adjust .gitignore/.dockerignore for new packaging paths and src/source/ - Improve config defaults discovery to support src/ layout and installed packages - Update architecture diagram and add TODO overview for TAGS/MIRROR/SIGNING_KEY https://chatgpt.com/share/693a76a0-e408-800f-9939-868524cbef4d
This commit is contained in:
0
src/pkgmgr/__init__.py
Normal file
0
src/pkgmgr/__init__.py
Normal file
0
src/pkgmgr/actions/__init__.py
Normal file
0
src/pkgmgr/actions/__init__.py
Normal file
235
src/pkgmgr/actions/branch/__init__.py
Normal file
235
src/pkgmgr/actions/branch/__init__.py
Normal file
@@ -0,0 +1,235 @@
|
||||
# pkgmgr/actions/branch/__init__.py
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
High-level helpers for branch-related operations.
|
||||
|
||||
This module encapsulates the actual Git logic so the CLI layer
|
||||
(pkgmgr.cli.commands.branch) stays thin and testable.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from pkgmgr.core.git import run_git, GitError, get_current_branch
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Branch creation (open)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def open_branch(
|
||||
name: Optional[str],
|
||||
base_branch: str = "main",
|
||||
fallback_base: str = "master",
|
||||
cwd: str = ".",
|
||||
) -> None:
|
||||
"""
|
||||
Create and push a new feature branch on top of a base branch.
|
||||
|
||||
The base branch is resolved by:
|
||||
1. Trying 'base_branch' (default: 'main')
|
||||
2. Falling back to 'fallback_base' (default: 'master')
|
||||
|
||||
Steps:
|
||||
1) git fetch origin
|
||||
2) git checkout <resolved_base>
|
||||
3) git pull origin <resolved_base>
|
||||
4) git checkout -b <name>
|
||||
5) git push -u origin <name>
|
||||
|
||||
If `name` is None or empty, the user is prompted to enter one.
|
||||
"""
|
||||
|
||||
# Request name interactively if not provided
|
||||
if not name:
|
||||
name = input("Enter new branch name: ").strip()
|
||||
|
||||
if not name:
|
||||
raise RuntimeError("Branch name must not be empty.")
|
||||
|
||||
# Resolve which base branch to use (main or master)
|
||||
resolved_base = _resolve_base_branch(base_branch, fallback_base, cwd=cwd)
|
||||
|
||||
# 1) Fetch from origin
|
||||
try:
|
||||
run_git(["fetch", "origin"], cwd=cwd)
|
||||
except GitError as exc:
|
||||
raise RuntimeError(
|
||||
f"Failed to fetch from origin before creating branch {name!r}: {exc}"
|
||||
) from exc
|
||||
|
||||
# 2) Checkout base branch
|
||||
try:
|
||||
run_git(["checkout", resolved_base], cwd=cwd)
|
||||
except GitError as exc:
|
||||
raise RuntimeError(
|
||||
f"Failed to checkout base branch {resolved_base!r}: {exc}"
|
||||
) from exc
|
||||
|
||||
# 3) Pull latest changes for base branch
|
||||
try:
|
||||
run_git(["pull", "origin", resolved_base], cwd=cwd)
|
||||
except GitError as exc:
|
||||
raise RuntimeError(
|
||||
f"Failed to pull latest changes for base branch {resolved_base!r}: {exc}"
|
||||
) from exc
|
||||
|
||||
# 4) Create new branch
|
||||
try:
|
||||
run_git(["checkout", "-b", name], cwd=cwd)
|
||||
except GitError as exc:
|
||||
raise RuntimeError(
|
||||
f"Failed to create new branch {name!r} from base {resolved_base!r}: {exc}"
|
||||
) from exc
|
||||
|
||||
# 5) Push new branch to origin
|
||||
try:
|
||||
run_git(["push", "-u", "origin", name], cwd=cwd)
|
||||
except GitError as exc:
|
||||
raise RuntimeError(
|
||||
f"Failed to push new branch {name!r} to origin: {exc}"
|
||||
) from exc
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Base branch resolver (shared by open/close)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _resolve_base_branch(
|
||||
preferred: str,
|
||||
fallback: str,
|
||||
cwd: str,
|
||||
) -> str:
|
||||
"""
|
||||
Resolve the base branch to use.
|
||||
|
||||
Try `preferred` first (default: main),
|
||||
fall back to `fallback` (default: master).
|
||||
|
||||
Raise RuntimeError if neither exists.
|
||||
"""
|
||||
for candidate in (preferred, fallback):
|
||||
try:
|
||||
run_git(["rev-parse", "--verify", candidate], cwd=cwd)
|
||||
return candidate
|
||||
except GitError:
|
||||
continue
|
||||
|
||||
raise RuntimeError(
|
||||
f"Neither {preferred!r} nor {fallback!r} exist in this repository."
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Branch closing (merge + deletion)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def close_branch(
|
||||
name: Optional[str],
|
||||
base_branch: str = "main",
|
||||
fallback_base: str = "master",
|
||||
cwd: str = ".",
|
||||
) -> None:
|
||||
"""
|
||||
Merge a feature branch into the base branch and delete it afterwards.
|
||||
|
||||
Steps:
|
||||
1) Determine the branch name (argument or current branch)
|
||||
2) Resolve base branch (main/master)
|
||||
3) Ask for confirmation
|
||||
4) git fetch origin
|
||||
5) git checkout <base>
|
||||
6) git pull origin <base>
|
||||
7) git merge --no-ff <name>
|
||||
8) git push origin <base>
|
||||
9) Delete branch locally
|
||||
10) Delete branch on origin (best effort)
|
||||
"""
|
||||
|
||||
# 1) Determine which branch should be closed
|
||||
if not name:
|
||||
try:
|
||||
name = get_current_branch(cwd=cwd)
|
||||
except GitError as exc:
|
||||
raise RuntimeError(f"Failed to detect current branch: {exc}") from exc
|
||||
|
||||
if not name:
|
||||
raise RuntimeError("Branch name must not be empty.")
|
||||
|
||||
# 2) Resolve base branch
|
||||
target_base = _resolve_base_branch(base_branch, fallback_base, cwd=cwd)
|
||||
|
||||
if name == target_base:
|
||||
raise RuntimeError(
|
||||
f"Refusing to close base branch {target_base!r}. "
|
||||
"Please specify a feature branch."
|
||||
)
|
||||
|
||||
# 3) Ask user for confirmation
|
||||
prompt = (
|
||||
f"Merge branch '{name}' into '{target_base}' and delete it afterwards? "
|
||||
"(y/N): "
|
||||
)
|
||||
answer = input(prompt).strip().lower()
|
||||
if answer != "y":
|
||||
print("Aborted closing branch.")
|
||||
return
|
||||
|
||||
# 4) Fetch from origin
|
||||
try:
|
||||
run_git(["fetch", "origin"], cwd=cwd)
|
||||
except GitError as exc:
|
||||
raise RuntimeError(
|
||||
f"Failed to fetch from origin before closing branch {name!r}: {exc}"
|
||||
) from exc
|
||||
|
||||
# 5) Checkout base
|
||||
try:
|
||||
run_git(["checkout", target_base], cwd=cwd)
|
||||
except GitError as exc:
|
||||
raise RuntimeError(
|
||||
f"Failed to checkout base branch {target_base!r}: {exc}"
|
||||
) from exc
|
||||
|
||||
# 6) Pull latest base state
|
||||
try:
|
||||
run_git(["pull", "origin", target_base], cwd=cwd)
|
||||
except GitError as exc:
|
||||
raise RuntimeError(
|
||||
f"Failed to pull latest changes for base branch {target_base!r}: {exc}"
|
||||
) from exc
|
||||
|
||||
# 7) Merge the feature branch
|
||||
try:
|
||||
run_git(["merge", "--no-ff", name], cwd=cwd)
|
||||
except GitError as exc:
|
||||
raise RuntimeError(
|
||||
f"Failed to merge branch {name!r} into {target_base!r}: {exc}"
|
||||
) from exc
|
||||
|
||||
# 8) Push updated base
|
||||
try:
|
||||
run_git(["push", "origin", target_base], cwd=cwd)
|
||||
except GitError as exc:
|
||||
raise RuntimeError(
|
||||
f"Failed to push base branch {target_base!r} after merge: {exc}"
|
||||
) from exc
|
||||
|
||||
# 9) Delete branch locally
|
||||
try:
|
||||
run_git(["branch", "-d", name], cwd=cwd)
|
||||
except GitError as exc:
|
||||
raise RuntimeError(
|
||||
f"Failed to delete local branch {name!r}: {exc}"
|
||||
) from exc
|
||||
|
||||
# 10) Delete branch on origin (best effort)
|
||||
try:
|
||||
run_git(["push", "origin", "--delete", name], cwd=cwd)
|
||||
except GitError as exc:
|
||||
raise RuntimeError(
|
||||
f"Branch {name!r} was deleted locally, but remote deletion failed: {exc}"
|
||||
) from exc
|
||||
78
src/pkgmgr/actions/changelog/__init__.py
Normal file
78
src/pkgmgr/actions/changelog/__init__.py
Normal file
@@ -0,0 +1,78 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Helpers to generate changelog information from Git history.
|
||||
|
||||
This module provides a small abstraction around `git log` so that
|
||||
CLI commands can request a changelog between two refs (tags, branches,
|
||||
commits) without dealing with raw subprocess calls.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from pkgmgr.core.git import run_git, GitError
|
||||
|
||||
|
||||
def generate_changelog(
|
||||
cwd: str,
|
||||
from_ref: Optional[str] = None,
|
||||
to_ref: Optional[str] = None,
|
||||
include_merges: bool = False,
|
||||
) -> str:
|
||||
"""
|
||||
Generate a plain-text changelog between two Git refs.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
cwd:
|
||||
Repository directory in which to run Git commands.
|
||||
from_ref:
|
||||
Optional starting reference (exclusive). If provided together
|
||||
with `to_ref`, the range `from_ref..to_ref` is used.
|
||||
If only `from_ref` is given, the range `from_ref..HEAD` is used.
|
||||
to_ref:
|
||||
Optional end reference (inclusive). If omitted, `HEAD` is used.
|
||||
include_merges:
|
||||
If False (default), merge commits are filtered out.
|
||||
|
||||
Returns
|
||||
-------
|
||||
str
|
||||
The output of `git log` formatted as a simple text changelog.
|
||||
If no commits are found or Git fails, an explanatory message
|
||||
is returned instead of raising.
|
||||
"""
|
||||
# Determine the revision range
|
||||
if to_ref is None:
|
||||
to_ref = "HEAD"
|
||||
|
||||
if from_ref:
|
||||
rev_range = f"{from_ref}..{to_ref}"
|
||||
else:
|
||||
rev_range = to_ref
|
||||
|
||||
# Use a custom pretty format that includes tags/refs (%d)
|
||||
cmd = [
|
||||
"log",
|
||||
"--pretty=format:%h %d %s",
|
||||
]
|
||||
if not include_merges:
|
||||
cmd.append("--no-merges")
|
||||
cmd.append(rev_range)
|
||||
|
||||
try:
|
||||
output = run_git(cmd, cwd=cwd)
|
||||
except GitError as exc:
|
||||
# Do not raise to the CLI, return a human-readable error instead.
|
||||
return (
|
||||
f"[ERROR] Failed to generate changelog in {cwd!r} "
|
||||
f"for range {rev_range!r}:\n{exc}"
|
||||
)
|
||||
|
||||
if not output.strip():
|
||||
return f"[INFO] No commits found for range {rev_range!r}."
|
||||
|
||||
return output.strip()
|
||||
0
src/pkgmgr/actions/config/__init__.py
Normal file
0
src/pkgmgr/actions/config/__init__.py
Normal file
35
src/pkgmgr/actions/config/add.py
Normal file
35
src/pkgmgr/actions/config/add.py
Normal file
@@ -0,0 +1,35 @@
|
||||
import yaml
|
||||
import os
|
||||
|
||||
def interactive_add(config,USER_CONFIG_PATH:str):
|
||||
"""Interactively prompt the user to add a new repository entry to the user config."""
|
||||
print("Adding a new repository configuration entry.")
|
||||
new_entry = {}
|
||||
new_entry["provider"] = input("Provider (e.g., github.com): ").strip()
|
||||
new_entry["account"] = input("Account (e.g., yourusername): ").strip()
|
||||
new_entry["repository"] = input("Repository name (e.g., mytool): ").strip()
|
||||
new_entry["command"] = input("Command (optional, leave blank to auto-detect): ").strip()
|
||||
new_entry["description"] = input("Description (optional): ").strip()
|
||||
new_entry["replacement"] = input("Replacement (optional): ").strip()
|
||||
new_entry["alias"] = input("Alias (optional): ").strip()
|
||||
# Allow the user to mark this entry as ignored.
|
||||
ignore_val = input("Ignore this entry? (y/N): ").strip().lower()
|
||||
if ignore_val == "y":
|
||||
new_entry["ignore"] = True
|
||||
|
||||
print("\nNew entry:")
|
||||
for key, value in new_entry.items():
|
||||
if value:
|
||||
print(f"{key}: {value}")
|
||||
confirm = input("Add this entry to user config? (y/N): ").strip().lower()
|
||||
if confirm == "y":
|
||||
if os.path.exists(USER_CONFIG_PATH):
|
||||
with open(USER_CONFIG_PATH, 'r') as f:
|
||||
user_config = yaml.safe_load(f) or {}
|
||||
else:
|
||||
user_config = {"repositories": []}
|
||||
user_config.setdefault("repositories", [])
|
||||
user_config["repositories"].append(new_entry)
|
||||
save_user_config(user_config,USER_CONFIG_PATH)
|
||||
else:
|
||||
print("Entry not added.")
|
||||
181
src/pkgmgr/actions/config/init.py
Normal file
181
src/pkgmgr/actions/config/init.py
Normal file
@@ -0,0 +1,181 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Initialize user configuration by scanning the repositories base directory.
|
||||
|
||||
This module scans the path:
|
||||
|
||||
defaults_config["directories"]["repositories"]
|
||||
|
||||
with the expected structure:
|
||||
|
||||
{base}/{provider}/{account}/{repository}
|
||||
|
||||
For each discovered repository, the function:
|
||||
• derives provider, account, repository from the folder structure
|
||||
• (optionally) determines the latest commit hash via git log
|
||||
• generates a unique CLI alias
|
||||
• marks ignore=True for newly discovered repos
|
||||
• skips repos already known in defaults or user config
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
from typing import Any, Dict
|
||||
|
||||
from pkgmgr.core.command.alias import generate_alias
|
||||
from pkgmgr.core.config.save import save_user_config
|
||||
|
||||
|
||||
def config_init(
|
||||
user_config: Dict[str, Any],
|
||||
defaults_config: Dict[str, Any],
|
||||
bin_dir: str,
|
||||
user_config_path: str,
|
||||
) -> None:
|
||||
"""
|
||||
Scan the repositories base directory and add missing entries
|
||||
to the user configuration.
|
||||
"""
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# Announce where we will write the result
|
||||
# ------------------------------------------------------------
|
||||
print("============================================================")
|
||||
print(f"[INIT] Writing user configuration to:")
|
||||
print(f" {user_config_path}")
|
||||
print("============================================================")
|
||||
|
||||
repositories_base_dir = os.path.expanduser(
|
||||
defaults_config["directories"]["repositories"]
|
||||
)
|
||||
|
||||
print(f"[INIT] Scanning repository base directory:")
|
||||
print(f" {repositories_base_dir}")
|
||||
print("")
|
||||
|
||||
if not os.path.isdir(repositories_base_dir):
|
||||
print(f"[ERROR] Base directory does not exist: {repositories_base_dir}")
|
||||
return
|
||||
|
||||
default_keys = {
|
||||
(entry.get("provider"), entry.get("account"), entry.get("repository"))
|
||||
for entry in defaults_config.get("repositories", [])
|
||||
}
|
||||
existing_keys = {
|
||||
(entry.get("provider"), entry.get("account"), entry.get("repository"))
|
||||
for entry in user_config.get("repositories", [])
|
||||
}
|
||||
existing_aliases = {
|
||||
entry.get("alias")
|
||||
for entry in user_config.get("repositories", [])
|
||||
if entry.get("alias")
|
||||
}
|
||||
|
||||
new_entries = []
|
||||
scanned = 0
|
||||
skipped = 0
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# Actual scanning
|
||||
# ------------------------------------------------------------
|
||||
for provider in os.listdir(repositories_base_dir):
|
||||
provider_path = os.path.join(repositories_base_dir, provider)
|
||||
if not os.path.isdir(provider_path):
|
||||
continue
|
||||
|
||||
print(f"[SCAN] Provider: {provider}")
|
||||
|
||||
for account in os.listdir(provider_path):
|
||||
account_path = os.path.join(provider_path, account)
|
||||
if not os.path.isdir(account_path):
|
||||
continue
|
||||
|
||||
print(f"[SCAN] Account: {account}")
|
||||
|
||||
for repo_name in os.listdir(account_path):
|
||||
repo_path = os.path.join(account_path, repo_name)
|
||||
if not os.path.isdir(repo_path):
|
||||
continue
|
||||
|
||||
scanned += 1
|
||||
key = (provider, account, repo_name)
|
||||
|
||||
# Already known?
|
||||
if key in default_keys:
|
||||
skipped += 1
|
||||
print(f"[SKIP] (defaults) {provider}/{account}/{repo_name}")
|
||||
continue
|
||||
if key in existing_keys:
|
||||
skipped += 1
|
||||
print(f"[SKIP] (user-config) {provider}/{account}/{repo_name}")
|
||||
continue
|
||||
|
||||
print(f"[ADD] {provider}/{account}/{repo_name}")
|
||||
|
||||
# Determine commit hash
|
||||
try:
|
||||
result = subprocess.run(
|
||||
["git", "log", "-1", "--format=%H"],
|
||||
cwd=repo_path,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
text=True,
|
||||
check=True,
|
||||
)
|
||||
verified = result.stdout.strip()
|
||||
print(f"[INFO] Latest commit: {verified}")
|
||||
except Exception as exc:
|
||||
verified = ""
|
||||
print(f"[WARN] Could not read commit: {exc}")
|
||||
|
||||
entry = {
|
||||
"provider": provider,
|
||||
"account": account,
|
||||
"repository": repo_name,
|
||||
"verified": {"commit": verified},
|
||||
"ignore": True,
|
||||
}
|
||||
|
||||
# Alias generation
|
||||
alias = generate_alias(
|
||||
{
|
||||
"repository": repo_name,
|
||||
"provider": provider,
|
||||
"account": account,
|
||||
},
|
||||
bin_dir,
|
||||
existing_aliases,
|
||||
)
|
||||
entry["alias"] = alias
|
||||
existing_aliases.add(alias)
|
||||
print(f"[INFO] Alias generated: {alias}")
|
||||
|
||||
new_entries.append(entry)
|
||||
|
||||
print("") # blank line between accounts
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# Summary
|
||||
# ------------------------------------------------------------
|
||||
print("============================================================")
|
||||
print(f"[DONE] Scanned repositories: {scanned}")
|
||||
print(f"[DONE] Skipped (known): {skipped}")
|
||||
print(f"[DONE] New entries discovered: {len(new_entries)}")
|
||||
print("============================================================")
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# Save if needed
|
||||
# ------------------------------------------------------------
|
||||
if new_entries:
|
||||
user_config.setdefault("repositories", []).extend(new_entries)
|
||||
save_user_config(user_config, user_config_path)
|
||||
print(f"[SAVE] Wrote user configuration to:")
|
||||
print(f" {user_config_path}")
|
||||
else:
|
||||
print("[INFO] No new repositories were added.")
|
||||
|
||||
print("============================================================")
|
||||
15
src/pkgmgr/actions/config/show.py
Normal file
15
src/pkgmgr/actions/config/show.py
Normal file
@@ -0,0 +1,15 @@
|
||||
import yaml
|
||||
from pkgmgr.core.config.load import load_config
|
||||
|
||||
def show_config(selected_repos, user_config_path, full_config=False):
|
||||
"""Display configuration for one or more repositories, or the entire merged config."""
|
||||
if full_config:
|
||||
merged = load_config(user_config_path)
|
||||
print(yaml.dump(merged, default_flow_style=False))
|
||||
else:
|
||||
for repo in selected_repos:
|
||||
identifier = f'{repo.get("provider")}/{repo.get("account")}/{repo.get("repository")}'
|
||||
print(f"Repository: {identifier}")
|
||||
for key, value in repo.items():
|
||||
print(f" {key}: {value}")
|
||||
print("-" * 40)
|
||||
218
src/pkgmgr/actions/install/__init__.py
Normal file
218
src/pkgmgr/actions/install/__init__.py
Normal file
@@ -0,0 +1,218 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
High-level entry point for repository installation.
|
||||
|
||||
Responsibilities:
|
||||
|
||||
- Ensure the repository directory exists (clone if necessary).
|
||||
- Verify the repository (GPG / commit checks).
|
||||
- Build a RepoContext object.
|
||||
- Delegate the actual installation decision logic to InstallationPipeline.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from pkgmgr.core.repository.identifier import get_repo_identifier
|
||||
from pkgmgr.core.repository.dir import get_repo_dir
|
||||
from pkgmgr.core.repository.verify import verify_repository
|
||||
from pkgmgr.actions.repository.clone import clone_repos
|
||||
from pkgmgr.actions.install.context import RepoContext
|
||||
from pkgmgr.actions.install.installers.os_packages import (
|
||||
ArchPkgbuildInstaller,
|
||||
DebianControlInstaller,
|
||||
RpmSpecInstaller,
|
||||
)
|
||||
from pkgmgr.actions.install.installers.nix_flake import (
|
||||
NixFlakeInstaller,
|
||||
)
|
||||
from pkgmgr.actions.install.installers.python import PythonInstaller
|
||||
from pkgmgr.actions.install.installers.makefile import (
|
||||
MakefileInstaller,
|
||||
)
|
||||
from pkgmgr.actions.install.pipeline import InstallationPipeline
|
||||
|
||||
|
||||
Repository = Dict[str, Any]
|
||||
|
||||
# All available installers, in the order they should be considered.
|
||||
INSTALLERS = [
|
||||
ArchPkgbuildInstaller(),
|
||||
DebianControlInstaller(),
|
||||
RpmSpecInstaller(),
|
||||
NixFlakeInstaller(),
|
||||
PythonInstaller(),
|
||||
MakefileInstaller(),
|
||||
]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Internal helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _ensure_repo_dir(
|
||||
repo: Repository,
|
||||
repositories_base_dir: str,
|
||||
all_repos: List[Repository],
|
||||
preview: bool,
|
||||
no_verification: bool,
|
||||
clone_mode: str,
|
||||
identifier: str,
|
||||
) -> str | None:
|
||||
"""
|
||||
Compute and, if necessary, clone the repository directory.
|
||||
|
||||
Returns the absolute repository path or None if cloning ultimately failed.
|
||||
"""
|
||||
repo_dir = get_repo_dir(repositories_base_dir, repo)
|
||||
|
||||
if not os.path.exists(repo_dir):
|
||||
print(
|
||||
f"Repository directory '{repo_dir}' does not exist. "
|
||||
f"Cloning it now..."
|
||||
)
|
||||
clone_repos(
|
||||
[repo],
|
||||
repositories_base_dir,
|
||||
all_repos,
|
||||
preview,
|
||||
no_verification,
|
||||
clone_mode,
|
||||
)
|
||||
if not os.path.exists(repo_dir):
|
||||
print(
|
||||
f"Cloning failed for repository {identifier}. "
|
||||
f"Skipping installation."
|
||||
)
|
||||
return None
|
||||
|
||||
return repo_dir
|
||||
|
||||
|
||||
def _verify_repo(
|
||||
repo: Repository,
|
||||
repo_dir: str,
|
||||
no_verification: bool,
|
||||
identifier: str,
|
||||
) -> bool:
|
||||
"""
|
||||
Verify a repository using the configured verification data.
|
||||
|
||||
Returns True if verification is considered okay and installation may continue.
|
||||
"""
|
||||
verified_info = repo.get("verified")
|
||||
verified_ok, errors, _commit_hash, _signing_key = verify_repository(
|
||||
repo,
|
||||
repo_dir,
|
||||
mode="local",
|
||||
no_verification=no_verification,
|
||||
)
|
||||
|
||||
if not no_verification and verified_info and not verified_ok:
|
||||
print(f"Warning: Verification failed for {identifier}:")
|
||||
for err in errors:
|
||||
print(f" - {err}")
|
||||
choice = input("Continue anyway? [y/N]: ").strip().lower()
|
||||
if choice != "y":
|
||||
print(f"Skipping installation for {identifier}.")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def _create_context(
|
||||
repo: Repository,
|
||||
identifier: str,
|
||||
repo_dir: str,
|
||||
repositories_base_dir: str,
|
||||
bin_dir: str,
|
||||
all_repos: List[Repository],
|
||||
no_verification: bool,
|
||||
preview: bool,
|
||||
quiet: bool,
|
||||
clone_mode: str,
|
||||
update_dependencies: bool,
|
||||
) -> RepoContext:
|
||||
"""
|
||||
Build a RepoContext instance for the given repository.
|
||||
"""
|
||||
return RepoContext(
|
||||
repo=repo,
|
||||
identifier=identifier,
|
||||
repo_dir=repo_dir,
|
||||
repositories_base_dir=repositories_base_dir,
|
||||
bin_dir=bin_dir,
|
||||
all_repos=all_repos,
|
||||
no_verification=no_verification,
|
||||
preview=preview,
|
||||
quiet=quiet,
|
||||
clone_mode=clone_mode,
|
||||
update_dependencies=update_dependencies,
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Public API
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def install_repos(
|
||||
selected_repos: List[Repository],
|
||||
repositories_base_dir: str,
|
||||
bin_dir: str,
|
||||
all_repos: List[Repository],
|
||||
no_verification: bool,
|
||||
preview: bool,
|
||||
quiet: bool,
|
||||
clone_mode: str,
|
||||
update_dependencies: bool,
|
||||
) -> None:
|
||||
"""
|
||||
Install one or more repositories according to the configured installers
|
||||
and the CLI layer precedence rules.
|
||||
"""
|
||||
pipeline = InstallationPipeline(INSTALLERS)
|
||||
|
||||
for repo in selected_repos:
|
||||
identifier = get_repo_identifier(repo, all_repos)
|
||||
|
||||
repo_dir = _ensure_repo_dir(
|
||||
repo=repo,
|
||||
repositories_base_dir=repositories_base_dir,
|
||||
all_repos=all_repos,
|
||||
preview=preview,
|
||||
no_verification=no_verification,
|
||||
clone_mode=clone_mode,
|
||||
identifier=identifier,
|
||||
)
|
||||
if not repo_dir:
|
||||
continue
|
||||
|
||||
if not _verify_repo(
|
||||
repo=repo,
|
||||
repo_dir=repo_dir,
|
||||
no_verification=no_verification,
|
||||
identifier=identifier,
|
||||
):
|
||||
continue
|
||||
|
||||
ctx = _create_context(
|
||||
repo=repo,
|
||||
identifier=identifier,
|
||||
repo_dir=repo_dir,
|
||||
repositories_base_dir=repositories_base_dir,
|
||||
bin_dir=bin_dir,
|
||||
all_repos=all_repos,
|
||||
no_verification=no_verification,
|
||||
preview=preview,
|
||||
quiet=quiet,
|
||||
clone_mode=clone_mode,
|
||||
update_dependencies=update_dependencies,
|
||||
)
|
||||
|
||||
pipeline.run(ctx)
|
||||
407
src/pkgmgr/actions/install/capabilities.py
Normal file
407
src/pkgmgr/actions/install/capabilities.py
Normal file
@@ -0,0 +1,407 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Capability detection for pkgmgr.
|
||||
|
||||
Each capability is represented by a class that:
|
||||
- defines a logical name (e.g. "python-runtime", "make-install", "nix-flake")
|
||||
- knows for which installer layer(s) it applies (e.g. "nix", "python",
|
||||
"makefile", "os-packages")
|
||||
- searches the repository config/build files for specific strings
|
||||
to determine whether that capability is provided by that layer.
|
||||
|
||||
This allows pkgmgr to dynamically decide if a higher layer already covers
|
||||
work a lower layer would otherwise do (e.g. Nix calling pyproject/make,
|
||||
or distro packages wrapping Nix or Makefile logic).
|
||||
|
||||
On top of the raw detection, this module also exposes a bottom-up
|
||||
"effective capability" resolver:
|
||||
|
||||
- We start from the lowest layer (e.g. "makefile") and go upwards.
|
||||
- For each capability provided by a lower layer, we check whether any
|
||||
higher layer also provides the same capability.
|
||||
- If yes, we consider the capability "shadowed" by the higher layer;
|
||||
the lower layer does not list it as an effective capability.
|
||||
- If no higher layer provides it, the capability remains attached to
|
||||
the lower layer.
|
||||
|
||||
This yields, for each layer, only those capabilities that are not
|
||||
redundant with respect to higher layers in the stack.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import glob
|
||||
import os
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Iterable, TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pkgmgr.actions.install.context import RepoContext
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helper functions
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _read_text_if_exists(path: str) -> str | None:
|
||||
"""Read a file as UTF-8 text, returning None if it does not exist or fails."""
|
||||
if not os.path.exists(path):
|
||||
return None
|
||||
try:
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
return f.read()
|
||||
except OSError:
|
||||
return None
|
||||
|
||||
|
||||
def _scan_files_for_patterns(files: Iterable[str], patterns: Iterable[str]) -> bool:
|
||||
"""
|
||||
Return True if any of the given files exists and contains at least one of
|
||||
the given patterns (case-insensitive).
|
||||
"""
|
||||
lower_patterns = [p.lower() for p in patterns]
|
||||
for path in files:
|
||||
if not path:
|
||||
continue
|
||||
content = _read_text_if_exists(path)
|
||||
if not content:
|
||||
continue
|
||||
lower_content = content.lower()
|
||||
if any(p in lower_content for p in lower_patterns):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _first_spec_file(repo_dir: str) -> str | None:
|
||||
"""Return the first *.spec file in repo_dir, if any."""
|
||||
matches = glob.glob(os.path.join(repo_dir, "*.spec"))
|
||||
if not matches:
|
||||
return None
|
||||
return sorted(matches)[0]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Base matcher
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class CapabilityMatcher(ABC):
|
||||
"""Base class for all capability detectors."""
|
||||
|
||||
#: Logical capability name (e.g. "python-runtime", "make-install").
|
||||
name: str
|
||||
|
||||
@abstractmethod
|
||||
def applies_to_layer(self, layer: str) -> bool:
|
||||
"""Return True if this capability can be provided by the given layer."""
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
def is_provided(self, ctx: "RepoContext", layer: str) -> bool:
|
||||
"""
|
||||
Return True if this capability is actually provided by the given layer
|
||||
for this repository.
|
||||
|
||||
This is where we search for specific strings in build/config files
|
||||
(flake.nix, pyproject.toml, Makefile, PKGBUILD, debian/rules, *.spec, ...).
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Capability: python-runtime
|
||||
#
|
||||
# Provided when:
|
||||
# - Layer "python":
|
||||
# pyproject.toml exists → Python runtime via pip for this project
|
||||
# - Layer "nix":
|
||||
# flake.nix contains hints that it builds a Python app
|
||||
# (buildPythonApplication, python3Packages., poetry2nix, pip install, ...)
|
||||
# - Layer "os-packages":
|
||||
# distro build scripts (PKGBUILD, debian/rules, *.spec) clearly call
|
||||
# pip/python to install THIS Python project (heuristic).
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class PythonRuntimeCapability(CapabilityMatcher):
|
||||
name = "python-runtime"
|
||||
|
||||
def applies_to_layer(self, layer: str) -> bool:
|
||||
# OS packages may wrap Python builds, but must explicitly prove it
|
||||
return layer in {"python", "nix", "os-packages"}
|
||||
|
||||
def is_provided(self, ctx: "RepoContext", layer: str) -> bool:
|
||||
repo_dir = ctx.repo_dir
|
||||
|
||||
if layer == "python":
|
||||
# For pkgmgr, a pyproject.toml is enough to say:
|
||||
# "This layer provides the Python runtime for this project."
|
||||
pyproject = os.path.join(repo_dir, "pyproject.toml")
|
||||
return os.path.exists(pyproject)
|
||||
|
||||
if layer == "nix":
|
||||
flake = os.path.join(repo_dir, "flake.nix")
|
||||
content = _read_text_if_exists(flake)
|
||||
if not content:
|
||||
return False
|
||||
|
||||
content = content.lower()
|
||||
patterns = [
|
||||
"buildpythonapplication",
|
||||
"python3packages.",
|
||||
"poetry2nix",
|
||||
"pip install",
|
||||
"python -m pip",
|
||||
]
|
||||
return any(p in content for p in patterns)
|
||||
|
||||
if layer == "os-packages":
|
||||
# Heuristic:
|
||||
# - repo looks like a Python project (pyproject.toml or setup.py)
|
||||
# - and OS build scripts call pip / python -m pip / setup.py install
|
||||
pyproject = os.path.join(repo_dir, "pyproject.toml")
|
||||
setup_py = os.path.join(repo_dir, "setup.py")
|
||||
if not (os.path.exists(pyproject) or os.path.exists(setup_py)):
|
||||
return False
|
||||
|
||||
pkgbuild = os.path.join(repo_dir, "PKGBUILD")
|
||||
debian_rules = os.path.join(repo_dir, "debian", "rules")
|
||||
spec = _first_spec_file(repo_dir)
|
||||
|
||||
scripts = [pkgbuild, debian_rules]
|
||||
if spec:
|
||||
scripts.append(spec)
|
||||
|
||||
patterns = [
|
||||
"pip install .",
|
||||
"python -m pip install",
|
||||
"python3 -m pip install",
|
||||
"setup.py install",
|
||||
]
|
||||
return _scan_files_for_patterns(scripts, patterns)
|
||||
|
||||
return False
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Capability: make-install
|
||||
#
|
||||
# Provided when:
|
||||
# - Layer "makefile":
|
||||
# Makefile has an "install:" target
|
||||
# - Layer "python":
|
||||
# pyproject.toml mentions "make install"
|
||||
# - Layer "nix":
|
||||
# flake.nix mentions "make install"
|
||||
# - Layer "os-packages":
|
||||
# distro build scripts call "make install" (they already consume the
|
||||
# Makefile installation step).
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class MakeInstallCapability(CapabilityMatcher):
|
||||
name = "make-install"
|
||||
|
||||
def applies_to_layer(self, layer: str) -> bool:
|
||||
return layer in {"makefile", "python", "nix", "os-packages"}
|
||||
|
||||
def is_provided(self, ctx: "RepoContext", layer: str) -> bool:
|
||||
repo_dir = ctx.repo_dir
|
||||
|
||||
if layer == "makefile":
|
||||
makefile = os.path.join(repo_dir, "Makefile")
|
||||
if not os.path.exists(makefile):
|
||||
return False
|
||||
try:
|
||||
with open(makefile, "r", encoding="utf-8") as f:
|
||||
for line in f:
|
||||
if line.strip().startswith("install:"):
|
||||
return True
|
||||
except OSError:
|
||||
return False
|
||||
return False
|
||||
|
||||
if layer == "python":
|
||||
pyproject = os.path.join(repo_dir, "pyproject.toml")
|
||||
content = _read_text_if_exists(pyproject)
|
||||
if not content:
|
||||
return False
|
||||
return "make install" in content.lower()
|
||||
|
||||
if layer == "nix":
|
||||
flake = os.path.join(repo_dir, "flake.nix")
|
||||
content = _read_text_if_exists(flake)
|
||||
if not content:
|
||||
return False
|
||||
return "make install" in content.lower()
|
||||
|
||||
if layer == "os-packages":
|
||||
pkgbuild = os.path.join(repo_dir, "PKGBUILD")
|
||||
debian_rules = os.path.join(repo_dir, "debian", "rules")
|
||||
spec = _first_spec_file(repo_dir)
|
||||
|
||||
scripts = [pkgbuild, debian_rules]
|
||||
if spec:
|
||||
scripts.append(spec)
|
||||
|
||||
# If any OS build script calls "make install", we assume it is
|
||||
# already consuming the Makefile installation and thus provides
|
||||
# the make-install capability.
|
||||
return _scan_files_for_patterns(scripts, ["make install"])
|
||||
|
||||
return False
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Capability: nix-flake
|
||||
#
|
||||
# Provided when:
|
||||
# - Layer "nix":
|
||||
# flake.nix exists → Nix flake installer can install this project
|
||||
# - Layer "os-packages":
|
||||
# distro build scripts clearly call Nix (nix build/run/develop/profile),
|
||||
# i.e. they already use Nix as part of building/installing.
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class NixFlakeCapability(CapabilityMatcher):
|
||||
name = "nix-flake"
|
||||
|
||||
def applies_to_layer(self, layer: str) -> bool:
|
||||
# Only Nix itself and OS packages that explicitly wrap Nix
|
||||
return layer in {"nix", "os-packages"}
|
||||
|
||||
def is_provided(self, ctx: "RepoContext", layer: str) -> bool:
|
||||
repo_dir = ctx.repo_dir
|
||||
|
||||
if layer == "nix":
|
||||
flake = os.path.join(repo_dir, "flake.nix")
|
||||
return os.path.exists(flake)
|
||||
|
||||
if layer == "os-packages":
|
||||
pkgbuild = os.path.join(repo_dir, "PKGBUILD")
|
||||
debian_rules = os.path.join(repo_dir, "debian", "rules")
|
||||
spec = _first_spec_file(repo_dir)
|
||||
|
||||
scripts = [pkgbuild, debian_rules]
|
||||
if spec:
|
||||
scripts.append(spec)
|
||||
|
||||
patterns = [
|
||||
"nix build",
|
||||
"nix run",
|
||||
"nix-shell",
|
||||
"nix develop",
|
||||
"nix profile",
|
||||
]
|
||||
return _scan_files_for_patterns(scripts, patterns)
|
||||
|
||||
return False
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Registry of all capability matchers currently supported.
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
CAPABILITY_MATCHERS: list[CapabilityMatcher] = [
|
||||
PythonRuntimeCapability(),
|
||||
MakeInstallCapability(),
|
||||
NixFlakeCapability(),
|
||||
]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Layer ordering and effective capability resolution
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
#: Default bottom-up order of installer layers.
|
||||
#: Lower indices = lower layers; higher indices = higher layers.
|
||||
LAYER_ORDER: list[str] = [
|
||||
"makefile",
|
||||
"python",
|
||||
"nix",
|
||||
"os-packages",
|
||||
]
|
||||
|
||||
|
||||
def detect_capabilities(
|
||||
ctx: "RepoContext",
|
||||
layers: Iterable[str],
|
||||
) -> dict[str, set[str]]:
|
||||
"""
|
||||
Perform raw capability detection per layer, without any shadowing.
|
||||
|
||||
Returns a mapping:
|
||||
|
||||
{
|
||||
"makefile": {"make-install"},
|
||||
"python": {"python-runtime", "make-install"},
|
||||
"nix": {"python-runtime", "make-install", "nix-flake"},
|
||||
"os-packages": set(),
|
||||
}
|
||||
|
||||
depending on which matchers report capabilities for each layer.
|
||||
"""
|
||||
layers_list = list(layers)
|
||||
caps_by_layer: dict[str, set[str]] = {layer: set() for layer in layers_list}
|
||||
|
||||
for matcher in CAPABILITY_MATCHERS:
|
||||
for layer in layers_list:
|
||||
if not matcher.applies_to_layer(layer):
|
||||
continue
|
||||
if matcher.is_provided(ctx, layer):
|
||||
caps_by_layer[layer].add(matcher.name)
|
||||
|
||||
return caps_by_layer
|
||||
|
||||
|
||||
def resolve_effective_capabilities(
|
||||
ctx: "RepoContext",
|
||||
layers: Iterable[str] | None = None,
|
||||
) -> dict[str, set[str]]:
|
||||
"""
|
||||
Resolve *effective* capabilities for each layer using a bottom-up strategy.
|
||||
|
||||
Algorithm (layer-agnostic, works for all layers in the given order):
|
||||
|
||||
1. Run raw detection (detect_capabilities) to obtain which capabilities
|
||||
are provided by which layer.
|
||||
2. Iterate layers from bottom to top (the order in `layers`):
|
||||
For each capability that a lower layer provides, check whether
|
||||
any *higher* layer also provides the same capability.
|
||||
- If yes, the capability is considered "shadowed" by the higher
|
||||
layer and is NOT listed as effective for the lower layer.
|
||||
- If no higher layer provides it, it remains as an effective
|
||||
capability of the lower layer.
|
||||
3. Return a mapping layer → set of effective capabilities.
|
||||
|
||||
This means *any* higher layer can overshadow a lower layer, not just
|
||||
a specific one like Nix. The resolver is completely generic.
|
||||
"""
|
||||
if layers is None:
|
||||
layers_list = list(LAYER_ORDER)
|
||||
else:
|
||||
layers_list = list(layers)
|
||||
|
||||
raw_caps = detect_capabilities(ctx, layers_list)
|
||||
effective: dict[str, set[str]] = {layer: set() for layer in layers_list}
|
||||
|
||||
# Bottom-up walk: lower index = lower layer, higher index = higher layer
|
||||
for idx, lower in enumerate(layers_list):
|
||||
lower_caps = raw_caps.get(lower, set())
|
||||
for cap in lower_caps:
|
||||
# Check if any higher layer also provides this capability
|
||||
covered_by_higher = False
|
||||
for higher in layers_list[idx + 1 :]:
|
||||
higher_caps = raw_caps.get(higher, set())
|
||||
if cap in higher_caps:
|
||||
covered_by_higher = True
|
||||
break
|
||||
|
||||
if not covered_by_higher:
|
||||
effective[lower].add(cap)
|
||||
|
||||
return effective
|
||||
30
src/pkgmgr/actions/install/context.py
Normal file
30
src/pkgmgr/actions/install/context.py
Normal file
@@ -0,0 +1,30 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Shared context object for repository installation steps.
|
||||
|
||||
This data class bundles all information needed by installer components so
|
||||
they do not depend on global state or long parameter lists.
|
||||
"""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Dict, List
|
||||
|
||||
|
||||
@dataclass
|
||||
class RepoContext:
|
||||
"""Container for all repository-related data used during installation."""
|
||||
|
||||
repo: Dict[str, Any]
|
||||
identifier: str
|
||||
repo_dir: str
|
||||
repositories_base_dir: str
|
||||
bin_dir: str
|
||||
all_repos: List[Dict[str, Any]]
|
||||
|
||||
no_verification: bool
|
||||
preview: bool
|
||||
quiet: bool
|
||||
clone_mode: str
|
||||
update_dependencies: bool
|
||||
19
src/pkgmgr/actions/install/installers/__init__.py
Normal file
19
src/pkgmgr/actions/install/installers/__init__.py
Normal file
@@ -0,0 +1,19 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Installer package for pkgmgr.
|
||||
|
||||
This exposes all installer classes so users can import them directly from
|
||||
pkgmgr.actions.install.installers.
|
||||
"""
|
||||
|
||||
from pkgmgr.actions.install.installers.base import BaseInstaller # noqa: F401
|
||||
from pkgmgr.actions.install.installers.nix_flake import NixFlakeInstaller # noqa: F401
|
||||
from pkgmgr.actions.install.installers.python import PythonInstaller # noqa: F401
|
||||
from pkgmgr.actions.install.installers.makefile import MakefileInstaller # noqa: F401
|
||||
|
||||
# OS-specific installers
|
||||
from pkgmgr.actions.install.installers.os_packages.arch_pkgbuild import ArchPkgbuildInstaller # noqa: F401
|
||||
from pkgmgr.actions.install.installers.os_packages.debian_control import DebianControlInstaller # noqa: F401
|
||||
from pkgmgr.actions.install.installers.os_packages.rpm_spec import RpmSpecInstaller # noqa: F401
|
||||
69
src/pkgmgr/actions/install/installers/base.py
Normal file
69
src/pkgmgr/actions/install/installers/base.py
Normal file
@@ -0,0 +1,69 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Base interface for all installer components in the pkgmgr installation pipeline.
|
||||
"""
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Set
|
||||
|
||||
from pkgmgr.actions.install.context import RepoContext
|
||||
from pkgmgr.actions.install.capabilities import CAPABILITY_MATCHERS
|
||||
|
||||
|
||||
class BaseInstaller(ABC):
|
||||
"""
|
||||
A single step in the installation pipeline for a repository.
|
||||
|
||||
Implementations should be small and focused on one technology or manifest
|
||||
type (e.g. PKGBUILD, Nix, Python, Makefile, etc.).
|
||||
"""
|
||||
|
||||
#: Logical layer name for this installer.
|
||||
# Examples: "nix", "python", "makefile".
|
||||
# This is used by capability matchers to decide which patterns to
|
||||
# search for in the repository.
|
||||
layer: str | None = None
|
||||
|
||||
def discover_capabilities(self, ctx: RepoContext) -> Set[str]:
|
||||
"""
|
||||
Determine which logical capabilities this installer will provide
|
||||
for this specific repository instance.
|
||||
|
||||
This method delegates to the global capability matchers, which
|
||||
inspect build/configuration files (flake.nix, pyproject.toml,
|
||||
Makefile, etc.) and decide, via string matching, whether a given
|
||||
capability is actually provided by this layer.
|
||||
"""
|
||||
caps: Set[str] = set()
|
||||
if not self.layer:
|
||||
return caps
|
||||
|
||||
for matcher in CAPABILITY_MATCHERS:
|
||||
if matcher.applies_to_layer(self.layer) and matcher.is_provided(ctx, self.layer):
|
||||
caps.add(matcher.name)
|
||||
|
||||
return caps
|
||||
|
||||
@abstractmethod
|
||||
def supports(self, ctx: RepoContext) -> bool:
|
||||
"""
|
||||
Return True if this installer should run for the given repository
|
||||
context. This is typically based on file existence or platform checks.
|
||||
|
||||
Implementations must never swallow critical errors silently; if a
|
||||
configuration is broken, they should raise SystemExit.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
def run(self, ctx: RepoContext) -> None:
|
||||
"""
|
||||
Execute the installer logic for the given repository context.
|
||||
|
||||
Implementations are allowed to raise SystemExit (for example via
|
||||
run_command()) on errors. Such failures are considered fatal for
|
||||
the installation pipeline.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
97
src/pkgmgr/actions/install/installers/makefile.py
Normal file
97
src/pkgmgr/actions/install/installers/makefile.py
Normal file
@@ -0,0 +1,97 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
from pkgmgr.actions.install.context import RepoContext
|
||||
from pkgmgr.actions.install.installers.base import BaseInstaller
|
||||
from pkgmgr.core.command.run import run_command
|
||||
|
||||
|
||||
class MakefileInstaller(BaseInstaller):
|
||||
"""
|
||||
Generic installer that runs `make install` if a Makefile with an
|
||||
install target is present.
|
||||
|
||||
Safety rules:
|
||||
- If PKGMGR_DISABLE_MAKEFILE_INSTALLER=1 is set, this installer
|
||||
is globally disabled.
|
||||
- The higher-level InstallationPipeline ensures that Makefile
|
||||
installation does not run if a stronger CLI layer already owns
|
||||
the command (e.g. Nix or OS packages).
|
||||
"""
|
||||
|
||||
layer = "makefile"
|
||||
MAKEFILE_NAME = "Makefile"
|
||||
|
||||
def supports(self, ctx: RepoContext) -> bool:
|
||||
"""
|
||||
Return True if this repository has a Makefile and the installer
|
||||
is not globally disabled.
|
||||
"""
|
||||
# Optional global kill switch.
|
||||
if os.environ.get("PKGMGR_DISABLE_MAKEFILE_INSTALLER") == "1":
|
||||
if not ctx.quiet:
|
||||
print(
|
||||
"[INFO] MakefileInstaller is disabled via "
|
||||
"PKGMGR_DISABLE_MAKEFILE_INSTALLER."
|
||||
)
|
||||
return False
|
||||
|
||||
makefile_path = os.path.join(ctx.repo_dir, self.MAKEFILE_NAME)
|
||||
return os.path.exists(makefile_path)
|
||||
|
||||
def _has_install_target(self, makefile_path: str) -> bool:
|
||||
"""
|
||||
Heuristically check whether the Makefile defines an install target.
|
||||
|
||||
We look for:
|
||||
|
||||
- a plain 'install:' target, or
|
||||
- any 'install-*:' style target.
|
||||
"""
|
||||
try:
|
||||
with open(makefile_path, "r", encoding="utf-8", errors="ignore") as f:
|
||||
content = f.read()
|
||||
except OSError:
|
||||
return False
|
||||
|
||||
# Simple heuristics: look for "install:" or targets starting with "install-"
|
||||
if re.search(r"^install\s*:", content, flags=re.MULTILINE):
|
||||
return True
|
||||
|
||||
if re.search(r"^install-[a-zA-Z0-9_-]*\s*:", content, flags=re.MULTILINE):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def run(self, ctx: RepoContext) -> None:
|
||||
"""
|
||||
Execute `make install` in the repository directory if an install
|
||||
target exists.
|
||||
"""
|
||||
makefile_path = os.path.join(ctx.repo_dir, self.MAKEFILE_NAME)
|
||||
|
||||
if not os.path.exists(makefile_path):
|
||||
if not ctx.quiet:
|
||||
print(
|
||||
f"[pkgmgr] Makefile '{makefile_path}' not found, "
|
||||
"skipping MakefileInstaller."
|
||||
)
|
||||
return
|
||||
|
||||
if not self._has_install_target(makefile_path):
|
||||
if not ctx.quiet:
|
||||
print(
|
||||
f"[pkgmgr] No 'install' target found in {makefile_path}."
|
||||
)
|
||||
return
|
||||
|
||||
if not ctx.quiet:
|
||||
print(
|
||||
f"[pkgmgr] Running 'make install' in {ctx.repo_dir} "
|
||||
f"(MakefileInstaller)"
|
||||
)
|
||||
|
||||
cmd = "make install"
|
||||
run_command(cmd, cwd=ctx.repo_dir, preview=ctx.preview)
|
||||
160
src/pkgmgr/actions/install/installers/nix_flake.py
Normal file
160
src/pkgmgr/actions/install/installers/nix_flake.py
Normal file
@@ -0,0 +1,160 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Installer for Nix flakes.
|
||||
|
||||
If a repository contains flake.nix and the 'nix' command is available, this
|
||||
installer will try to install profile outputs from the flake.
|
||||
|
||||
Behavior:
|
||||
- If flake.nix is present and `nix` exists on PATH:
|
||||
* First remove any existing `package-manager` profile entry (best-effort).
|
||||
* Then install one or more flake outputs via `nix profile install`.
|
||||
- For the package-manager repo:
|
||||
* `pkgmgr` is mandatory (CLI), `default` is optional.
|
||||
- For all other repos:
|
||||
* `default` is mandatory.
|
||||
|
||||
Special handling:
|
||||
- If PKGMGR_DISABLE_NIX_FLAKE_INSTALLER=1 is set, the installer is
|
||||
globally disabled (useful for CI or debugging).
|
||||
|
||||
The higher-level InstallationPipeline and CLI-layer model decide when this
|
||||
installer is allowed to run, based on where the current CLI comes from
|
||||
(e.g. Nix, OS packages, Python, Makefile).
|
||||
"""
|
||||
|
||||
import os
|
||||
import shutil
|
||||
from typing import TYPE_CHECKING, List, Tuple
|
||||
|
||||
from pkgmgr.actions.install.installers.base import BaseInstaller
|
||||
from pkgmgr.core.command.run import run_command
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pkgmgr.actions.install.context import RepoContext
|
||||
from pkgmgr.actions.install import InstallContext
|
||||
|
||||
|
||||
class NixFlakeInstaller(BaseInstaller):
|
||||
"""Install Nix flake profiles for repositories that define flake.nix."""
|
||||
|
||||
# Logical layer name, used by capability matchers.
|
||||
layer = "nix"
|
||||
|
||||
FLAKE_FILE = "flake.nix"
|
||||
PROFILE_NAME = "package-manager"
|
||||
|
||||
def supports(self, ctx: "RepoContext") -> bool:
|
||||
"""
|
||||
Only support repositories that:
|
||||
- Are NOT explicitly disabled via PKGMGR_DISABLE_NIX_FLAKE_INSTALLER=1,
|
||||
- Have a flake.nix,
|
||||
- And have the `nix` command available.
|
||||
"""
|
||||
# Optional global kill-switch for CI or debugging.
|
||||
if os.environ.get("PKGMGR_DISABLE_NIX_FLAKE_INSTALLER") == "1":
|
||||
print(
|
||||
"[INFO] PKGMGR_DISABLE_NIX_FLAKE_INSTALLER=1 – "
|
||||
"NixFlakeInstaller is disabled."
|
||||
)
|
||||
return False
|
||||
|
||||
# Nix must be available.
|
||||
if shutil.which("nix") is None:
|
||||
return False
|
||||
|
||||
# flake.nix must exist in the repository.
|
||||
flake_path = os.path.join(ctx.repo_dir, self.FLAKE_FILE)
|
||||
return os.path.exists(flake_path)
|
||||
|
||||
def _ensure_old_profile_removed(self, ctx: "RepoContext") -> None:
|
||||
"""
|
||||
Best-effort removal of an existing profile entry.
|
||||
|
||||
This handles the "already provides the following file" conflict by
|
||||
removing previous `package-manager` installations before we install
|
||||
the new one.
|
||||
|
||||
Any error in `nix profile remove` is intentionally ignored, because
|
||||
a missing profile entry is not a fatal condition.
|
||||
"""
|
||||
if shutil.which("nix") is None:
|
||||
return
|
||||
|
||||
cmd = f"nix profile remove {self.PROFILE_NAME} || true"
|
||||
try:
|
||||
# NOTE: no allow_failure here → matches the existing unit tests
|
||||
run_command(cmd, cwd=ctx.repo_dir, preview=ctx.preview)
|
||||
except SystemExit:
|
||||
# Unit tests explicitly assert this is swallowed
|
||||
pass
|
||||
|
||||
def _profile_outputs(self, ctx: "RepoContext") -> List[Tuple[str, bool]]:
|
||||
"""
|
||||
Decide which flake outputs to install and whether failures are fatal.
|
||||
|
||||
Returns a list of (output_name, allow_failure) tuples.
|
||||
|
||||
Rules:
|
||||
- For the package-manager repo (identifier 'pkgmgr' or 'package-manager'):
|
||||
[("pkgmgr", False), ("default", True)]
|
||||
- For all other repos:
|
||||
[("default", False)]
|
||||
"""
|
||||
ident = ctx.identifier
|
||||
|
||||
if ident in {"pkgmgr", "package-manager"}:
|
||||
# pkgmgr: main CLI output is "pkgmgr" (mandatory),
|
||||
# "default" is nice-to-have (non-fatal).
|
||||
return [("pkgmgr", False), ("default", True)]
|
||||
|
||||
# Generic repos: we expect a sensible "default" package/app.
|
||||
# Failure to install it is considered fatal.
|
||||
return [("default", False)]
|
||||
|
||||
def run(self, ctx: "InstallContext") -> None:
|
||||
"""
|
||||
Install Nix flake profile outputs.
|
||||
|
||||
For the package-manager repo, failure installing 'pkgmgr' is fatal,
|
||||
failure installing 'default' is non-fatal.
|
||||
For other repos, failure installing 'default' is fatal.
|
||||
"""
|
||||
# Reuse supports() to keep logic in one place.
|
||||
if not self.supports(ctx): # type: ignore[arg-type]
|
||||
return
|
||||
|
||||
outputs = self._profile_outputs(ctx) # list of (name, allow_failure)
|
||||
|
||||
print(
|
||||
"Nix flake detected in "
|
||||
f"{ctx.identifier}, attempting to install profile outputs: "
|
||||
+ ", ".join(name for name, _ in outputs)
|
||||
)
|
||||
|
||||
# Handle the "already installed" case up-front for the shared profile.
|
||||
self._ensure_old_profile_removed(ctx) # type: ignore[arg-type]
|
||||
|
||||
for output, allow_failure in outputs:
|
||||
cmd = f"nix profile install {ctx.repo_dir}#{output}"
|
||||
|
||||
try:
|
||||
run_command(
|
||||
cmd,
|
||||
cwd=ctx.repo_dir,
|
||||
preview=ctx.preview,
|
||||
allow_failure=allow_failure,
|
||||
)
|
||||
print(f"Nix flake output '{output}' successfully installed.")
|
||||
except SystemExit as e:
|
||||
print(f"[Error] Failed to install Nix flake output '{output}': {e}")
|
||||
if not allow_failure:
|
||||
# Mandatory output failed → fatal for the pipeline.
|
||||
raise
|
||||
# Optional output failed → log and continue.
|
||||
print(
|
||||
"[Warning] Continuing despite failure to install "
|
||||
f"optional output '{output}'."
|
||||
)
|
||||
@@ -0,0 +1,9 @@
|
||||
from .arch_pkgbuild import ArchPkgbuildInstaller
|
||||
from .debian_control import DebianControlInstaller
|
||||
from .rpm_spec import RpmSpecInstaller
|
||||
|
||||
__all__ = [
|
||||
"ArchPkgbuildInstaller",
|
||||
"DebianControlInstaller",
|
||||
"RpmSpecInstaller",
|
||||
]
|
||||
@@ -0,0 +1,59 @@
|
||||
# pkgmgr/installers/os_packages/arch_pkgbuild.py
|
||||
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from pkgmgr.actions.install.context import RepoContext
|
||||
from pkgmgr.actions.install.installers.base import BaseInstaller
|
||||
from pkgmgr.core.command.run import run_command
|
||||
|
||||
|
||||
class ArchPkgbuildInstaller(BaseInstaller):
|
||||
"""
|
||||
Build and install an Arch package from PKGBUILD via makepkg.
|
||||
|
||||
This installer is responsible for the full build + install of the
|
||||
application on Arch-based systems. System dependencies are resolved
|
||||
by makepkg itself (--syncdeps).
|
||||
|
||||
Note: makepkg must not be run as root, so this installer refuses
|
||||
to run when the current user is UID 0.
|
||||
"""
|
||||
|
||||
# Logical layer name, used by capability matchers.
|
||||
layer = "os-packages"
|
||||
|
||||
PKGBUILD_NAME = "PKGBUILD"
|
||||
|
||||
def supports(self, ctx: RepoContext) -> bool:
|
||||
"""
|
||||
This installer is supported if:
|
||||
- pacman and makepkg are available,
|
||||
- a PKGBUILD file exists in the repository root,
|
||||
- the current user is NOT root (makepkg forbids root).
|
||||
"""
|
||||
# Do not run makepkg as root – it is explicitly forbidden.
|
||||
try:
|
||||
if hasattr(os, "geteuid") and os.geteuid() == 0:
|
||||
return False
|
||||
except Exception:
|
||||
# On non-POSIX platforms just ignore this check.
|
||||
pass
|
||||
|
||||
if shutil.which("pacman") is None or shutil.which("makepkg") is None:
|
||||
return False
|
||||
|
||||
pkgbuild_path = os.path.join(ctx.repo_dir, self.PKGBUILD_NAME)
|
||||
return os.path.exists(pkgbuild_path)
|
||||
|
||||
def run(self, ctx: RepoContext) -> None:
|
||||
"""
|
||||
Build and install the package using makepkg.
|
||||
|
||||
This uses:
|
||||
makepkg --syncdeps --cleanbuild --install --noconfirm
|
||||
|
||||
Any failure is treated as fatal (SystemExit).
|
||||
"""
|
||||
cmd = "makepkg --syncdeps --cleanbuild --install --noconfirm"
|
||||
run_command(cmd, cwd=ctx.repo_dir, preview=ctx.preview)
|
||||
@@ -0,0 +1,177 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Installer for Debian/Ubuntu packages defined via debian/control.
|
||||
|
||||
This installer:
|
||||
|
||||
1. Installs build dependencies via `apt-get build-dep ./`
|
||||
2. Uses dpkg-buildpackage to build .deb packages from debian/*
|
||||
3. Installs the resulting .deb files via `dpkg -i`
|
||||
|
||||
It is intended for Debian-based systems where dpkg-buildpackage and
|
||||
apt/dpkg tooling are available.
|
||||
"""
|
||||
|
||||
import glob
|
||||
import os
|
||||
import shutil
|
||||
from typing import List
|
||||
|
||||
from pkgmgr.actions.install.context import RepoContext
|
||||
from pkgmgr.actions.install.installers.base import BaseInstaller
|
||||
from pkgmgr.core.command.run import run_command
|
||||
|
||||
|
||||
class DebianControlInstaller(BaseInstaller):
|
||||
"""
|
||||
Build and install a Debian/Ubuntu package from debian/control.
|
||||
|
||||
This installer is responsible for the full build + install of the
|
||||
application on Debian-like systems.
|
||||
"""
|
||||
|
||||
# Logical layer name, used by capability matchers.
|
||||
layer = "os-packages"
|
||||
|
||||
CONTROL_DIR = "debian"
|
||||
CONTROL_FILE = "control"
|
||||
|
||||
def _is_debian_like(self) -> bool:
|
||||
"""Return True if this looks like a Debian-based system."""
|
||||
return shutil.which("dpkg-buildpackage") is not None
|
||||
|
||||
def _control_path(self, ctx: RepoContext) -> str:
|
||||
return os.path.join(ctx.repo_dir, self.CONTROL_DIR, self.CONTROL_FILE)
|
||||
|
||||
def supports(self, ctx: RepoContext) -> bool:
|
||||
"""
|
||||
This installer is supported if:
|
||||
- we are on a Debian-like system (dpkg-buildpackage available), and
|
||||
- debian/control exists.
|
||||
"""
|
||||
if not self._is_debian_like():
|
||||
return False
|
||||
|
||||
return os.path.exists(self._control_path(ctx))
|
||||
|
||||
def _find_built_debs(self, repo_dir: str) -> List[str]:
|
||||
"""
|
||||
Find .deb files built by dpkg-buildpackage.
|
||||
|
||||
By default, dpkg-buildpackage creates .deb files in the parent
|
||||
directory of the source tree.
|
||||
"""
|
||||
parent = os.path.dirname(repo_dir)
|
||||
pattern = os.path.join(parent, "*.deb")
|
||||
return sorted(glob.glob(pattern))
|
||||
|
||||
def _privileged_prefix(self) -> str | None:
|
||||
"""
|
||||
Determine how to run privileged commands:
|
||||
|
||||
- If 'sudo' is available, return 'sudo '.
|
||||
- If we are running as root (e.g. inside CI/container), return ''.
|
||||
- Otherwise, return None, meaning we cannot safely elevate.
|
||||
|
||||
Callers are responsible for handling the None case (usually by
|
||||
warning and skipping automatic installation).
|
||||
"""
|
||||
sudo_path = shutil.which("sudo")
|
||||
|
||||
is_root = False
|
||||
try:
|
||||
is_root = os.geteuid() == 0
|
||||
except AttributeError: # pragma: no cover - non-POSIX platforms
|
||||
# On non-POSIX systems, fall back to assuming "not root".
|
||||
is_root = False
|
||||
|
||||
if sudo_path is not None:
|
||||
return "sudo "
|
||||
if is_root:
|
||||
return ""
|
||||
return None
|
||||
|
||||
def _install_build_dependencies(self, ctx: RepoContext) -> None:
|
||||
"""
|
||||
Install build dependencies using `apt-get build-dep ./`.
|
||||
|
||||
This is a best-effort implementation that assumes:
|
||||
- deb-src entries are configured in /etc/apt/sources.list*,
|
||||
- apt-get is available on PATH.
|
||||
|
||||
Any failure is treated as fatal (SystemExit), just like other
|
||||
installer steps.
|
||||
"""
|
||||
if shutil.which("apt-get") is None:
|
||||
print(
|
||||
"[Warning] apt-get not found on PATH. "
|
||||
"Skipping automatic build-dep installation for Debian."
|
||||
)
|
||||
return
|
||||
|
||||
prefix = self._privileged_prefix()
|
||||
if prefix is None:
|
||||
print(
|
||||
"[Warning] Neither 'sudo' is available nor running as root. "
|
||||
"Skipping automatic build-dep installation for Debian. "
|
||||
"Please install build dependencies from debian/control manually."
|
||||
)
|
||||
return
|
||||
|
||||
# Update package lists first for reliable build-dep resolution.
|
||||
run_command(
|
||||
f"{prefix}apt-get update",
|
||||
cwd=ctx.repo_dir,
|
||||
preview=ctx.preview,
|
||||
)
|
||||
|
||||
# Install build dependencies based on debian/control in the current tree.
|
||||
# `apt-get build-dep ./` uses the source in the current directory.
|
||||
builddep_cmd = f"{prefix}apt-get build-dep -y ./"
|
||||
run_command(builddep_cmd, cwd=ctx.repo_dir, preview=ctx.preview)
|
||||
|
||||
def run(self, ctx: RepoContext) -> None:
|
||||
"""
|
||||
Build and install Debian/Ubuntu packages from debian/*.
|
||||
|
||||
Steps:
|
||||
1. apt-get build-dep ./ (automatic build dependency installation)
|
||||
2. dpkg-buildpackage -b -us -uc
|
||||
3. sudo dpkg -i ../*.deb (or plain dpkg -i when running as root)
|
||||
"""
|
||||
control_path = self._control_path(ctx)
|
||||
if not os.path.exists(control_path):
|
||||
return
|
||||
|
||||
# 1) Install build dependencies
|
||||
self._install_build_dependencies(ctx)
|
||||
|
||||
# 2) Build the package
|
||||
build_cmd = "dpkg-buildpackage -b -us -uc"
|
||||
run_command(build_cmd, cwd=ctx.repo_dir, preview=ctx.preview)
|
||||
|
||||
# 3) Locate built .deb files
|
||||
debs = self._find_built_debs(ctx.repo_dir)
|
||||
if not debs:
|
||||
print(
|
||||
"[Warning] No .deb files found after dpkg-buildpackage. "
|
||||
"Skipping Debian package installation."
|
||||
)
|
||||
return
|
||||
|
||||
prefix = self._privileged_prefix()
|
||||
if prefix is None:
|
||||
print(
|
||||
"[Warning] Neither 'sudo' is available nor running as root. "
|
||||
"Skipping automatic .deb installation. "
|
||||
"You can manually install the following files with dpkg -i:\n "
|
||||
+ "\n ".join(debs)
|
||||
)
|
||||
return
|
||||
|
||||
# 4) Install .deb files
|
||||
install_cmd = prefix + "dpkg -i " + " ".join(os.path.basename(d) for d in debs)
|
||||
parent = os.path.dirname(ctx.repo_dir)
|
||||
run_command(install_cmd, cwd=parent, preview=ctx.preview)
|
||||
282
src/pkgmgr/actions/install/installers/os_packages/rpm_spec.py
Normal file
282
src/pkgmgr/actions/install/installers/os_packages/rpm_spec.py
Normal file
@@ -0,0 +1,282 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Installer for RPM-based packages defined in *.spec files.
|
||||
|
||||
This installer:
|
||||
|
||||
1. Installs build dependencies via dnf/yum builddep (where available)
|
||||
2. Prepares a source tarball in ~/rpmbuild/SOURCES based on the .spec
|
||||
3. Uses rpmbuild to build RPMs from the provided .spec file
|
||||
4. Installs the resulting RPMs via the system package manager (dnf/yum)
|
||||
or rpm as a fallback.
|
||||
|
||||
It targets RPM-based systems (Fedora / RHEL / CentOS / Rocky / Alma, etc.).
|
||||
"""
|
||||
|
||||
import glob
|
||||
import os
|
||||
import shutil
|
||||
import tarfile
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
from pkgmgr.actions.install.context import RepoContext
|
||||
from pkgmgr.actions.install.installers.base import BaseInstaller
|
||||
from pkgmgr.core.command.run import run_command
|
||||
|
||||
|
||||
class RpmSpecInstaller(BaseInstaller):
|
||||
"""
|
||||
Build and install RPM-based packages from *.spec files.
|
||||
|
||||
This installer is responsible for the full build + install of the
|
||||
application on RPM-like systems.
|
||||
"""
|
||||
|
||||
# Logical layer name, used by capability matchers.
|
||||
layer = "os-packages"
|
||||
|
||||
def _is_rpm_like(self) -> bool:
|
||||
"""
|
||||
Basic RPM-like detection:
|
||||
|
||||
- rpmbuild must be available
|
||||
- at least one of dnf / yum / yum-builddep must be present
|
||||
"""
|
||||
if shutil.which("rpmbuild") is None:
|
||||
return False
|
||||
|
||||
has_dnf = shutil.which("dnf") is not None
|
||||
has_yum = shutil.which("yum") is not None
|
||||
has_yum_builddep = shutil.which("yum-builddep") is not None
|
||||
|
||||
return has_dnf or has_yum or has_yum_builddep
|
||||
|
||||
def _spec_path(self, ctx: RepoContext) -> Optional[str]:
|
||||
"""Return the first *.spec file in the repository root, if any."""
|
||||
pattern = os.path.join(ctx.repo_dir, "*.spec")
|
||||
matches = sorted(glob.glob(pattern))
|
||||
if not matches:
|
||||
return None
|
||||
return matches[0]
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Helpers for preparing rpmbuild topdir and source tarball
|
||||
# ------------------------------------------------------------------
|
||||
def _rpmbuild_topdir(self) -> str:
|
||||
"""
|
||||
Return the rpmbuild topdir that rpmbuild will use by default.
|
||||
|
||||
By default this is: ~/rpmbuild
|
||||
|
||||
In the self-install tests, $HOME is set to /tmp/pkgmgr-self-install,
|
||||
so this becomes /tmp/pkgmgr-self-install/rpmbuild which matches the
|
||||
paths in the RPM build logs.
|
||||
"""
|
||||
home = os.path.expanduser("~")
|
||||
return os.path.join(home, "rpmbuild")
|
||||
|
||||
def _ensure_rpmbuild_tree(self, topdir: str) -> None:
|
||||
"""
|
||||
Ensure the standard rpmbuild directory tree exists:
|
||||
|
||||
<topdir>/
|
||||
BUILD/
|
||||
BUILDROOT/
|
||||
RPMS/
|
||||
SOURCES/
|
||||
SPECS/
|
||||
SRPMS/
|
||||
"""
|
||||
for sub in ("BUILD", "BUILDROOT", "RPMS", "SOURCES", "SPECS", "SRPMS"):
|
||||
os.makedirs(os.path.join(topdir, sub), exist_ok=True)
|
||||
|
||||
def _parse_name_version(self, spec_path: str) -> Optional[Tuple[str, str]]:
|
||||
"""
|
||||
Parse Name and Version from the given .spec file.
|
||||
|
||||
Returns (name, version) or None if either cannot be determined.
|
||||
"""
|
||||
name = None
|
||||
version = None
|
||||
|
||||
with open(spec_path, "r", encoding="utf-8") as f:
|
||||
for raw_line in f:
|
||||
line = raw_line.strip()
|
||||
# Ignore comments
|
||||
if not line or line.startswith("#"):
|
||||
continue
|
||||
|
||||
lower = line.lower()
|
||||
if lower.startswith("name:"):
|
||||
# e.g. "Name: package-manager"
|
||||
parts = line.split(":", 1)
|
||||
if len(parts) == 2:
|
||||
name = parts[1].strip()
|
||||
elif lower.startswith("version:"):
|
||||
# e.g. "Version: 0.7.7"
|
||||
parts = line.split(":", 1)
|
||||
if len(parts) == 2:
|
||||
version = parts[1].strip()
|
||||
|
||||
if name and version:
|
||||
break
|
||||
|
||||
if not name or not version:
|
||||
print(
|
||||
"[Warning] Could not determine Name/Version from spec file "
|
||||
f"'{spec_path}'. Skipping RPM source tarball preparation."
|
||||
)
|
||||
return None
|
||||
|
||||
return name, version
|
||||
|
||||
def _prepare_source_tarball(self, ctx: RepoContext, spec_path: str) -> None:
|
||||
"""
|
||||
Prepare a source tarball in <HOME>/rpmbuild/SOURCES that matches
|
||||
the Name/Version in the .spec file.
|
||||
"""
|
||||
parsed = self._parse_name_version(spec_path)
|
||||
if parsed is None:
|
||||
return
|
||||
|
||||
name, version = parsed
|
||||
topdir = self._rpmbuild_topdir()
|
||||
self._ensure_rpmbuild_tree(topdir)
|
||||
|
||||
build_dir = os.path.join(topdir, "BUILD")
|
||||
sources_dir = os.path.join(topdir, "SOURCES")
|
||||
|
||||
source_root = os.path.join(build_dir, f"{name}-{version}")
|
||||
tarball_path = os.path.join(sources_dir, f"{name}-{version}.tar.gz")
|
||||
|
||||
# Clean any previous build directory for this name/version.
|
||||
if os.path.exists(source_root):
|
||||
shutil.rmtree(source_root)
|
||||
|
||||
# Copy the repository tree into BUILD/<name>-<version>.
|
||||
shutil.copytree(ctx.repo_dir, source_root)
|
||||
|
||||
# Create the tarball with the top-level directory <name>-<version>.
|
||||
if os.path.exists(tarball_path):
|
||||
os.remove(tarball_path)
|
||||
|
||||
with tarfile.open(tarball_path, "w:gz") as tar:
|
||||
tar.add(source_root, arcname=f"{name}-{version}")
|
||||
|
||||
print(
|
||||
f"[INFO] Prepared RPM source tarball at '{tarball_path}' "
|
||||
f"from '{ctx.repo_dir}'."
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def supports(self, ctx: RepoContext) -> bool:
|
||||
"""
|
||||
This installer is supported if:
|
||||
- we are on an RPM-based system (rpmbuild + dnf/yum/yum-builddep available), and
|
||||
- a *.spec file exists in the repository root.
|
||||
"""
|
||||
if not self._is_rpm_like():
|
||||
return False
|
||||
|
||||
return self._spec_path(ctx) is not None
|
||||
|
||||
def _find_built_rpms(self) -> List[str]:
|
||||
"""
|
||||
Find RPMs built by rpmbuild.
|
||||
|
||||
By default, rpmbuild outputs RPMs into:
|
||||
~/rpmbuild/RPMS/*/*.rpm
|
||||
"""
|
||||
topdir = self._rpmbuild_topdir()
|
||||
pattern = os.path.join(topdir, "RPMS", "**", "*.rpm")
|
||||
return sorted(glob.glob(pattern, recursive=True))
|
||||
|
||||
def _install_build_dependencies(self, ctx: RepoContext, spec_path: str) -> None:
|
||||
"""
|
||||
Install build dependencies for the given .spec file.
|
||||
"""
|
||||
spec_basename = os.path.basename(spec_path)
|
||||
|
||||
if shutil.which("dnf") is not None:
|
||||
cmd = f"sudo dnf builddep -y {spec_basename}"
|
||||
elif shutil.which("yum-builddep") is not None:
|
||||
cmd = f"sudo yum-builddep -y {spec_basename}"
|
||||
elif shutil.which("yum") is not None:
|
||||
cmd = f"sudo yum-builddep -y {spec_basename}"
|
||||
else:
|
||||
print(
|
||||
"[Warning] No suitable RPM builddep tool (dnf/yum-builddep/yum) found. "
|
||||
"Skipping automatic build dependency installation for RPM."
|
||||
)
|
||||
return
|
||||
|
||||
run_command(cmd, cwd=ctx.repo_dir, preview=ctx.preview)
|
||||
|
||||
def _install_built_rpms(self, ctx: RepoContext, rpms: List[str]) -> None:
|
||||
"""
|
||||
Install or upgrade the built RPMs.
|
||||
|
||||
Strategy:
|
||||
- Prefer dnf install -y <rpms> (handles upgrades cleanly)
|
||||
- Else yum install -y <rpms>
|
||||
- Else fallback to rpm -Uvh <rpms> (upgrade/replace existing)
|
||||
"""
|
||||
if not rpms:
|
||||
print(
|
||||
"[Warning] No RPM files found after rpmbuild. "
|
||||
"Skipping RPM package installation."
|
||||
)
|
||||
return
|
||||
|
||||
dnf = shutil.which("dnf")
|
||||
yum = shutil.which("yum")
|
||||
rpm = shutil.which("rpm")
|
||||
|
||||
if dnf is not None:
|
||||
install_cmd = "sudo dnf install -y " + " ".join(rpms)
|
||||
elif yum is not None:
|
||||
install_cmd = "sudo yum install -y " + " ".join(rpms)
|
||||
elif rpm is not None:
|
||||
# Fallback: use rpm in upgrade mode so an existing older
|
||||
# version is replaced instead of causing file conflicts.
|
||||
install_cmd = "sudo rpm -Uvh " + " ".join(rpms)
|
||||
else:
|
||||
print(
|
||||
"[Warning] No suitable RPM installer (dnf/yum/rpm) found. "
|
||||
"Cannot install built RPMs."
|
||||
)
|
||||
return
|
||||
|
||||
run_command(install_cmd, cwd=ctx.repo_dir, preview=ctx.preview)
|
||||
|
||||
def run(self, ctx: RepoContext) -> None:
|
||||
"""
|
||||
Build and install RPM-based packages.
|
||||
|
||||
Steps:
|
||||
1. Prepare source tarball in ~/rpmbuild/SOURCES matching Name/Version
|
||||
2. dnf/yum builddep <spec> (automatic build dependency installation)
|
||||
3. rpmbuild -ba path/to/spec
|
||||
4. Install built RPMs via dnf/yum (or rpm as fallback)
|
||||
"""
|
||||
spec_path = self._spec_path(ctx)
|
||||
if not spec_path:
|
||||
return
|
||||
|
||||
# 1) Prepare source tarball so rpmbuild finds Source0 in SOURCES.
|
||||
self._prepare_source_tarball(ctx, spec_path)
|
||||
|
||||
# 2) Install build dependencies
|
||||
self._install_build_dependencies(ctx, spec_path)
|
||||
|
||||
# 3) Build RPMs
|
||||
spec_basename = os.path.basename(spec_path)
|
||||
build_cmd = f"rpmbuild -ba {spec_basename}"
|
||||
run_command(build_cmd, cwd=ctx.repo_dir, preview=ctx.preview)
|
||||
|
||||
# 4) Find and install built RPMs
|
||||
rpms = self._find_built_rpms()
|
||||
self._install_built_rpms(ctx, rpms)
|
||||
139
src/pkgmgr/actions/install/installers/python.py
Normal file
139
src/pkgmgr/actions/install/installers/python.py
Normal file
@@ -0,0 +1,139 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
PythonInstaller — install Python projects defined via pyproject.toml.
|
||||
|
||||
Installation rules:
|
||||
|
||||
1. pip command resolution:
|
||||
a) If PKGMGR_PIP is set → use it exactly as provided.
|
||||
b) Else if running inside a virtualenv → use `sys.executable -m pip`.
|
||||
c) Else → create/use a per-repository virtualenv under ~/.venvs/<repo>/.
|
||||
|
||||
2. Installation target:
|
||||
- Always install into the resolved pip environment.
|
||||
- Never modify system Python, never rely on --user.
|
||||
- Nix-immutable systems (PEP 668) are automatically avoided because we
|
||||
never touch system Python.
|
||||
|
||||
3. The installer is skipped when:
|
||||
- PKGMGR_DISABLE_PYTHON_INSTALLER=1 is set.
|
||||
- The repository has no pyproject.toml.
|
||||
|
||||
All pip failures are treated as fatal.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from pkgmgr.actions.install.installers.base import BaseInstaller
|
||||
from pkgmgr.core.command.run import run_command
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pkgmgr.actions.install.context import RepoContext
|
||||
from pkgmgr.actions.install import InstallContext
|
||||
|
||||
|
||||
class PythonInstaller(BaseInstaller):
|
||||
"""Install Python projects and dependencies via pip using isolated environments."""
|
||||
|
||||
layer = "python"
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# Installer activation logic
|
||||
# ----------------------------------------------------------------------
|
||||
def supports(self, ctx: "RepoContext") -> bool:
|
||||
"""
|
||||
Return True if this installer should handle this repository.
|
||||
|
||||
The installer is active only when:
|
||||
- A pyproject.toml exists in the repo, and
|
||||
- PKGMGR_DISABLE_PYTHON_INSTALLER is not set.
|
||||
"""
|
||||
if os.environ.get("PKGMGR_DISABLE_PYTHON_INSTALLER") == "1":
|
||||
print("[INFO] PythonInstaller disabled via PKGMGR_DISABLE_PYTHON_INSTALLER.")
|
||||
return False
|
||||
|
||||
return os.path.exists(os.path.join(ctx.repo_dir, "pyproject.toml"))
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# Virtualenv handling
|
||||
# ----------------------------------------------------------------------
|
||||
def _in_virtualenv(self) -> bool:
|
||||
"""Detect whether the current interpreter is inside a venv."""
|
||||
if os.environ.get("VIRTUAL_ENV"):
|
||||
return True
|
||||
|
||||
base = getattr(sys, "base_prefix", sys.prefix)
|
||||
return sys.prefix != base
|
||||
|
||||
def _ensure_repo_venv(self, ctx: "InstallContext") -> str:
|
||||
"""
|
||||
Ensure that ~/.venvs/<identifier>/ exists and contains a minimal venv.
|
||||
|
||||
Returns the venv directory path.
|
||||
"""
|
||||
venv_dir = os.path.expanduser(f"~/.venvs/{ctx.identifier}")
|
||||
python = sys.executable
|
||||
|
||||
if not os.path.isdir(venv_dir):
|
||||
print(f"[python-installer] Creating virtualenv: {venv_dir}")
|
||||
subprocess.check_call([python, "-m", "venv", venv_dir])
|
||||
|
||||
return venv_dir
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# pip command resolution
|
||||
# ----------------------------------------------------------------------
|
||||
def _pip_cmd(self, ctx: "InstallContext") -> str:
|
||||
"""
|
||||
Determine which pip command to use.
|
||||
|
||||
Priority:
|
||||
1. PKGMGR_PIP override given by user or automation.
|
||||
2. Active virtualenv → use sys.executable -m pip.
|
||||
3. Per-repository venv → ~/.venvs/<repo>/bin/pip
|
||||
"""
|
||||
explicit = os.environ.get("PKGMGR_PIP", "").strip()
|
||||
if explicit:
|
||||
return explicit
|
||||
|
||||
if self._in_virtualenv():
|
||||
return f"{sys.executable} -m pip"
|
||||
|
||||
venv_dir = self._ensure_repo_venv(ctx)
|
||||
pip_path = os.path.join(venv_dir, "bin", "pip")
|
||||
return pip_path
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# Execution
|
||||
# ----------------------------------------------------------------------
|
||||
def run(self, ctx: "InstallContext") -> None:
|
||||
"""
|
||||
Install the project defined by pyproject.toml.
|
||||
|
||||
Uses the resolved pip environment. Installation is isolated and never
|
||||
touches system Python.
|
||||
"""
|
||||
if not self.supports(ctx): # type: ignore[arg-type]
|
||||
return
|
||||
|
||||
pyproject = os.path.join(ctx.repo_dir, "pyproject.toml")
|
||||
if not os.path.exists(pyproject):
|
||||
return
|
||||
|
||||
print(f"[python-installer] Installing Python project for {ctx.identifier}...")
|
||||
|
||||
pip_cmd = self._pip_cmd(ctx)
|
||||
|
||||
# Final install command: ALWAYS isolated, never system-wide.
|
||||
install_cmd = f"{pip_cmd} install ."
|
||||
|
||||
run_command(install_cmd, cwd=ctx.repo_dir, preview=ctx.preview)
|
||||
|
||||
print(f"[python-installer] Installation finished for {ctx.identifier}.")
|
||||
91
src/pkgmgr/actions/install/layers.py
Normal file
91
src/pkgmgr/actions/install/layers.py
Normal file
@@ -0,0 +1,91 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
CLI layer model for the pkgmgr installation pipeline.
|
||||
|
||||
We treat CLI entry points as coming from one of four conceptual layers:
|
||||
|
||||
- os-packages : system package managers (pacman/apt/dnf/…)
|
||||
- nix : Nix flake / nix profile
|
||||
- python : pip / virtualenv / user-local scripts
|
||||
- makefile : repo-local Makefile / scripts inside the repo
|
||||
|
||||
The layer order defines precedence: higher layers "own" the CLI and
|
||||
lower layers will not be executed once a higher-priority CLI exists.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from enum import Enum
|
||||
from typing import Optional
|
||||
|
||||
|
||||
class CliLayer(str, Enum):
|
||||
OS_PACKAGES = "os-packages"
|
||||
NIX = "nix"
|
||||
PYTHON = "python"
|
||||
MAKEFILE = "makefile"
|
||||
|
||||
|
||||
# Highest priority first
|
||||
CLI_LAYERS: list[CliLayer] = [
|
||||
CliLayer.OS_PACKAGES,
|
||||
CliLayer.NIX,
|
||||
CliLayer.PYTHON,
|
||||
CliLayer.MAKEFILE,
|
||||
]
|
||||
|
||||
|
||||
def layer_priority(layer: Optional[CliLayer]) -> int:
|
||||
"""
|
||||
Return a numeric priority index for a given layer.
|
||||
|
||||
Lower index → higher priority.
|
||||
Unknown / None → very low priority.
|
||||
"""
|
||||
if layer is None:
|
||||
return len(CLI_LAYERS)
|
||||
try:
|
||||
return CLI_LAYERS.index(layer)
|
||||
except ValueError:
|
||||
return len(CLI_LAYERS)
|
||||
|
||||
|
||||
def classify_command_layer(command: str, repo_dir: str) -> CliLayer:
|
||||
"""
|
||||
Heuristically classify a resolved command path into a CLI layer.
|
||||
|
||||
Rules (best effort):
|
||||
|
||||
- /usr/... or /bin/... → os-packages
|
||||
- /nix/store/... or ~/.nix-profile → nix
|
||||
- ~/.local/bin/... → python
|
||||
- inside repo_dir → makefile
|
||||
- everything else → python (user/venv scripts, etc.)
|
||||
"""
|
||||
command_abs = os.path.abspath(os.path.expanduser(command))
|
||||
repo_abs = os.path.abspath(repo_dir)
|
||||
home = os.path.expanduser("~")
|
||||
|
||||
# OS package managers
|
||||
if command_abs.startswith("/usr/") or command_abs.startswith("/bin/"):
|
||||
return CliLayer.OS_PACKAGES
|
||||
|
||||
# Nix store / profile
|
||||
if command_abs.startswith("/nix/store/") or command_abs.startswith(
|
||||
os.path.join(home, ".nix-profile")
|
||||
):
|
||||
return CliLayer.NIX
|
||||
|
||||
# User-local bin
|
||||
if command_abs.startswith(os.path.join(home, ".local", "bin")):
|
||||
return CliLayer.PYTHON
|
||||
|
||||
# Inside the repository → usually a Makefile/script
|
||||
if command_abs.startswith(repo_abs):
|
||||
return CliLayer.MAKEFILE
|
||||
|
||||
# Fallback: treat as Python-style/user-level script
|
||||
return CliLayer.PYTHON
|
||||
257
src/pkgmgr/actions/install/pipeline.py
Normal file
257
src/pkgmgr/actions/install/pipeline.py
Normal file
@@ -0,0 +1,257 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Installation pipeline orchestration for repositories.
|
||||
|
||||
This module implements the "Setup Controller" logic:
|
||||
|
||||
1. Detect current CLI command for the repo (if any).
|
||||
2. Classify it into a layer (os-packages, nix, python, makefile).
|
||||
3. Iterate over installers in layer order:
|
||||
- Skip installers whose layer is weaker than an already-loaded one.
|
||||
- Run only installers that support() the repo and add new capabilities.
|
||||
- After each installer, re-resolve the command and update the layer.
|
||||
4. Maintain the repo["command"] field and create/update symlinks via create_ink().
|
||||
|
||||
The goal is to prevent conflicting installations and make the layering
|
||||
behaviour explicit and testable.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional, Sequence, Set
|
||||
|
||||
from pkgmgr.actions.install.context import RepoContext
|
||||
from pkgmgr.actions.install.installers.base import BaseInstaller
|
||||
from pkgmgr.actions.install.layers import (
|
||||
CliLayer,
|
||||
classify_command_layer,
|
||||
layer_priority,
|
||||
)
|
||||
from pkgmgr.core.command.ink import create_ink
|
||||
from pkgmgr.core.command.resolve import resolve_command_for_repo
|
||||
|
||||
|
||||
@dataclass
|
||||
class CommandState:
|
||||
"""
|
||||
Represents the current CLI state for a repository:
|
||||
|
||||
- command: absolute or relative path to the CLI entry point
|
||||
- layer: which conceptual layer this command belongs to
|
||||
"""
|
||||
|
||||
command: Optional[str]
|
||||
layer: Optional[CliLayer]
|
||||
|
||||
|
||||
class CommandResolver:
|
||||
"""
|
||||
Small helper responsible for resolving the current command for a repo
|
||||
and mapping it into a CommandState.
|
||||
"""
|
||||
|
||||
def __init__(self, ctx: RepoContext) -> None:
|
||||
self._ctx = ctx
|
||||
|
||||
def resolve(self) -> CommandState:
|
||||
"""
|
||||
Resolve the current command for this repository.
|
||||
|
||||
If resolve_command_for_repo raises SystemExit (e.g. Python package
|
||||
without installed entry point), we treat this as "no command yet"
|
||||
from the point of view of the installers.
|
||||
"""
|
||||
repo = self._ctx.repo
|
||||
identifier = self._ctx.identifier
|
||||
repo_dir = self._ctx.repo_dir
|
||||
|
||||
try:
|
||||
cmd = resolve_command_for_repo(
|
||||
repo=repo,
|
||||
repo_identifier=identifier,
|
||||
repo_dir=repo_dir,
|
||||
)
|
||||
except SystemExit:
|
||||
cmd = None
|
||||
|
||||
if not cmd:
|
||||
return CommandState(command=None, layer=None)
|
||||
|
||||
layer = classify_command_layer(cmd, repo_dir)
|
||||
return CommandState(command=cmd, layer=layer)
|
||||
|
||||
|
||||
class InstallationPipeline:
|
||||
"""
|
||||
High-level orchestrator that applies a sequence of installers
|
||||
to a repository based on CLI layer precedence.
|
||||
"""
|
||||
|
||||
def __init__(self, installers: Sequence[BaseInstaller]) -> None:
|
||||
self._installers = list(installers)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Public API
|
||||
# ------------------------------------------------------------------
|
||||
def run(self, ctx: RepoContext) -> None:
|
||||
"""
|
||||
Execute the installation pipeline for a single repository.
|
||||
|
||||
- Detect initial command & layer.
|
||||
- Optionally create a symlink.
|
||||
- Run installers in order, skipping those whose layer is weaker
|
||||
than an already-loaded CLI.
|
||||
- After each installer, re-resolve the command and refresh the
|
||||
symlink if needed.
|
||||
"""
|
||||
repo = ctx.repo
|
||||
repo_dir = ctx.repo_dir
|
||||
identifier = ctx.identifier
|
||||
repositories_base_dir = ctx.repositories_base_dir
|
||||
bin_dir = ctx.bin_dir
|
||||
all_repos = ctx.all_repos
|
||||
quiet = ctx.quiet
|
||||
preview = ctx.preview
|
||||
|
||||
resolver = CommandResolver(ctx)
|
||||
state = resolver.resolve()
|
||||
|
||||
# Persist initial command (if any) and create a symlink.
|
||||
if state.command:
|
||||
repo["command"] = state.command
|
||||
create_ink(
|
||||
repo,
|
||||
repositories_base_dir,
|
||||
bin_dir,
|
||||
all_repos,
|
||||
quiet=quiet,
|
||||
preview=preview,
|
||||
)
|
||||
else:
|
||||
repo.pop("command", None)
|
||||
|
||||
provided_capabilities: Set[str] = set()
|
||||
|
||||
# Main installer loop
|
||||
for installer in self._installers:
|
||||
layer_name = getattr(installer, "layer", None)
|
||||
|
||||
# Installers without a layer participate without precedence logic.
|
||||
if layer_name is None:
|
||||
self._run_installer(installer, ctx, identifier, repo_dir, quiet)
|
||||
continue
|
||||
|
||||
try:
|
||||
installer_layer = CliLayer(layer_name)
|
||||
except ValueError:
|
||||
# Unknown layer string → treat as lowest priority.
|
||||
installer_layer = None
|
||||
|
||||
# "Previous/Current layer already loaded?"
|
||||
if state.layer is not None and installer_layer is not None:
|
||||
current_prio = layer_priority(state.layer)
|
||||
installer_prio = layer_priority(installer_layer)
|
||||
|
||||
if current_prio < installer_prio:
|
||||
# Current CLI comes from a higher-priority layer,
|
||||
# so we skip this installer entirely.
|
||||
if not quiet:
|
||||
print(
|
||||
f"[pkgmgr] Skipping installer "
|
||||
f"{installer.__class__.__name__} for {identifier} – "
|
||||
f"CLI already provided by layer {state.layer.value!r}."
|
||||
)
|
||||
continue
|
||||
|
||||
if current_prio == installer_prio:
|
||||
# Same layer already provides a CLI; usually there is no
|
||||
# need to run another installer on top of it.
|
||||
if not quiet:
|
||||
print(
|
||||
f"[pkgmgr] Skipping installer "
|
||||
f"{installer.__class__.__name__} for {identifier} – "
|
||||
f"layer {installer_layer.value!r} is already loaded."
|
||||
)
|
||||
continue
|
||||
|
||||
# Check if this installer is applicable at all.
|
||||
if not installer.supports(ctx):
|
||||
continue
|
||||
|
||||
# Capabilities: if everything this installer would provide is already
|
||||
# covered, we can safely skip it.
|
||||
caps = installer.discover_capabilities(ctx)
|
||||
if caps and caps.issubset(provided_capabilities):
|
||||
if not quiet:
|
||||
print(
|
||||
f"Skipping installer {installer.__class__.__name__} "
|
||||
f"for {identifier} – capabilities {caps} already provided."
|
||||
)
|
||||
continue
|
||||
|
||||
if not quiet:
|
||||
print(
|
||||
f"[pkgmgr] Running installer {installer.__class__.__name__} "
|
||||
f"for {identifier} in '{repo_dir}' "
|
||||
f"(new capabilities: {caps or set()})..."
|
||||
)
|
||||
|
||||
# Run the installer with error reporting.
|
||||
self._run_installer(installer, ctx, identifier, repo_dir, quiet)
|
||||
|
||||
provided_capabilities.update(caps)
|
||||
|
||||
# After running an installer, re-resolve the command and layer.
|
||||
new_state = resolver.resolve()
|
||||
if new_state.command:
|
||||
repo["command"] = new_state.command
|
||||
create_ink(
|
||||
repo,
|
||||
repositories_base_dir,
|
||||
bin_dir,
|
||||
all_repos,
|
||||
quiet=quiet,
|
||||
preview=preview,
|
||||
)
|
||||
else:
|
||||
repo.pop("command", None)
|
||||
|
||||
state = new_state
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Internal helpers
|
||||
# ------------------------------------------------------------------
|
||||
@staticmethod
|
||||
def _run_installer(
|
||||
installer: BaseInstaller,
|
||||
ctx: RepoContext,
|
||||
identifier: str,
|
||||
repo_dir: str,
|
||||
quiet: bool,
|
||||
) -> None:
|
||||
"""
|
||||
Execute a single installer with unified error handling.
|
||||
"""
|
||||
try:
|
||||
installer.run(ctx)
|
||||
except SystemExit as exc:
|
||||
exit_code = exc.code if isinstance(exc.code, int) else str(exc.code)
|
||||
print(
|
||||
f"[ERROR] Installer {installer.__class__.__name__} failed "
|
||||
f"for repository {identifier} (dir: {repo_dir}) "
|
||||
f"with exit code {exit_code}."
|
||||
)
|
||||
print(
|
||||
"[ERROR] This usually means an underlying command failed "
|
||||
"(e.g. 'make install', 'nix build', 'pip install', ...)."
|
||||
)
|
||||
print(
|
||||
"[ERROR] Check the log above for the exact command output. "
|
||||
"You can also run this repository in isolation via:\n"
|
||||
f" pkgmgr install {identifier} "
|
||||
"--clone-mode shallow --no-verification"
|
||||
)
|
||||
raise
|
||||
33
src/pkgmgr/actions/proxy.py
Normal file
33
src/pkgmgr/actions/proxy.py
Normal file
@@ -0,0 +1,33 @@
|
||||
import os
|
||||
from pkgmgr.core.repository.identifier import get_repo_identifier
|
||||
from pkgmgr.core.repository.dir import get_repo_dir
|
||||
from pkgmgr.core.command.run import run_command
|
||||
import sys
|
||||
|
||||
def exec_proxy_command(proxy_prefix: str, selected_repos, repositories_base_dir, all_repos, proxy_command: str, extra_args, preview: bool):
|
||||
"""Execute a given proxy command with extra arguments for each repository."""
|
||||
error_repos = []
|
||||
max_exit_code = 0
|
||||
|
||||
for repo in selected_repos:
|
||||
repo_identifier = get_repo_identifier(repo, all_repos)
|
||||
repo_dir = get_repo_dir(repositories_base_dir, repo)
|
||||
|
||||
if not os.path.exists(repo_dir):
|
||||
print(f"Repository directory '{repo_dir}' not found for {repo_identifier}.")
|
||||
continue
|
||||
|
||||
full_cmd = f"{proxy_prefix} {proxy_command} {' '.join(extra_args)}"
|
||||
|
||||
try:
|
||||
run_command(full_cmd, cwd=repo_dir, preview=preview)
|
||||
except SystemExit as e:
|
||||
print(f"[ERROR] Command failed in {repo_identifier} with exit code {e.code}.")
|
||||
error_repos.append((repo_identifier, e.code))
|
||||
max_exit_code = max(max_exit_code, e.code)
|
||||
|
||||
if error_repos:
|
||||
print("\nSummary of failed commands:")
|
||||
for repo_identifier, exit_code in error_repos:
|
||||
print(f"- {repo_identifier} failed with exit code {exit_code}")
|
||||
sys.exit(max_exit_code)
|
||||
310
src/pkgmgr/actions/release/__init__.py
Normal file
310
src/pkgmgr/actions/release/__init__.py
Normal file
@@ -0,0 +1,310 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Release helper for pkgmgr (public entry point).
|
||||
|
||||
This package provides the high-level `release()` function used by the
|
||||
pkgmgr CLI to perform versioned releases:
|
||||
|
||||
- Determine the next semantic version based on existing Git tags.
|
||||
- Update pyproject.toml with the new version.
|
||||
- Update additional packaging files (flake.nix, PKGBUILD,
|
||||
debian/changelog, RPM spec) where present.
|
||||
- Prepend a basic entry to CHANGELOG.md.
|
||||
- Move the floating 'latest' tag to the newly created release tag so
|
||||
the newest release is always marked as latest.
|
||||
|
||||
Additional behaviour:
|
||||
- If `preview=True` (from --preview), no files are written and no
|
||||
Git commands are executed. Instead, a detailed summary of the
|
||||
planned changes and commands is printed.
|
||||
- If `preview=False` and not forced, the release is executed in two
|
||||
phases:
|
||||
1) Preview-only run (dry-run).
|
||||
2) Interactive confirmation, then real release if confirmed.
|
||||
This confirmation can be skipped with the `force=True` flag.
|
||||
- Before creating and pushing tags, main/master is updated from origin
|
||||
when the release is performed on one of these branches.
|
||||
- If `close=True` is used and the current branch is not main/master,
|
||||
the branch will be closed via branch_commands.close_branch() after
|
||||
a successful release.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sys
|
||||
from typing import Optional
|
||||
|
||||
from pkgmgr.core.git import get_current_branch, GitError
|
||||
from pkgmgr.actions.branch import close_branch
|
||||
|
||||
from .versioning import determine_current_version, bump_semver
|
||||
from .git_ops import run_git_command, sync_branch_with_remote, update_latest_tag
|
||||
from .files import (
|
||||
update_pyproject_version,
|
||||
update_flake_version,
|
||||
update_pkgbuild_version,
|
||||
update_spec_version,
|
||||
update_changelog,
|
||||
update_debian_changelog,
|
||||
update_spec_changelog,
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Internal implementation (single-phase, preview or real)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _release_impl(
|
||||
pyproject_path: str = "pyproject.toml",
|
||||
changelog_path: str = "CHANGELOG.md",
|
||||
release_type: str = "patch",
|
||||
message: Optional[str] = None,
|
||||
preview: bool = False,
|
||||
close: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
Internal implementation that performs a single-phase release.
|
||||
"""
|
||||
current_ver = determine_current_version()
|
||||
new_ver = bump_semver(current_ver, release_type)
|
||||
new_ver_str = str(new_ver)
|
||||
new_tag = new_ver.to_tag(with_prefix=True)
|
||||
|
||||
mode = "PREVIEW" if preview else "REAL"
|
||||
print(f"Release mode: {mode}")
|
||||
print(f"Current version: {current_ver}")
|
||||
print(f"New version: {new_ver_str} ({release_type})")
|
||||
|
||||
repo_root = os.path.dirname(os.path.abspath(pyproject_path))
|
||||
|
||||
# Update core project metadata and packaging files
|
||||
update_pyproject_version(pyproject_path, new_ver_str, preview=preview)
|
||||
changelog_message = update_changelog(
|
||||
changelog_path,
|
||||
new_ver_str,
|
||||
message=message,
|
||||
preview=preview,
|
||||
)
|
||||
|
||||
flake_path = os.path.join(repo_root, "flake.nix")
|
||||
update_flake_version(flake_path, new_ver_str, preview=preview)
|
||||
|
||||
pkgbuild_path = os.path.join(repo_root, "PKGBUILD")
|
||||
update_pkgbuild_version(pkgbuild_path, new_ver_str, preview=preview)
|
||||
|
||||
spec_path = os.path.join(repo_root, "package-manager.spec")
|
||||
update_spec_version(spec_path, new_ver_str, preview=preview)
|
||||
|
||||
# Determine a single effective_message to be reused across all
|
||||
# changelog targets (project, Debian, Fedora).
|
||||
effective_message: Optional[str] = message
|
||||
if effective_message is None and isinstance(changelog_message, str):
|
||||
if changelog_message.strip():
|
||||
effective_message = changelog_message.strip()
|
||||
|
||||
debian_changelog_path = os.path.join(repo_root, "debian", "changelog")
|
||||
package_name = os.path.basename(repo_root) or "package-manager"
|
||||
|
||||
# Debian changelog
|
||||
update_debian_changelog(
|
||||
debian_changelog_path,
|
||||
package_name=package_name,
|
||||
new_version=new_ver_str,
|
||||
message=effective_message,
|
||||
preview=preview,
|
||||
)
|
||||
|
||||
# Fedora / RPM %changelog
|
||||
update_spec_changelog(
|
||||
spec_path=spec_path,
|
||||
package_name=package_name,
|
||||
new_version=new_ver_str,
|
||||
message=effective_message,
|
||||
preview=preview,
|
||||
)
|
||||
|
||||
commit_msg = f"Release version {new_ver_str}"
|
||||
tag_msg = effective_message or commit_msg
|
||||
|
||||
# Determine branch and ensure it is up to date if main/master
|
||||
try:
|
||||
branch = get_current_branch() or "main"
|
||||
except GitError:
|
||||
branch = "main"
|
||||
print(f"Releasing on branch: {branch}")
|
||||
|
||||
# Ensure main/master are up-to-date from origin before creating and
|
||||
# pushing tags. For other branches we only log the intent.
|
||||
sync_branch_with_remote(branch, preview=preview)
|
||||
|
||||
files_to_add = [
|
||||
pyproject_path,
|
||||
changelog_path,
|
||||
flake_path,
|
||||
pkgbuild_path,
|
||||
spec_path,
|
||||
debian_changelog_path,
|
||||
]
|
||||
existing_files = [p for p in files_to_add if p and os.path.exists(p)]
|
||||
|
||||
if preview:
|
||||
for path in existing_files:
|
||||
print(f"[PREVIEW] Would run: git add {path}")
|
||||
print(f'[PREVIEW] Would run: git commit -am "{commit_msg}"')
|
||||
print(f'[PREVIEW] Would run: git tag -a {new_tag} -m "{tag_msg}"')
|
||||
print(f"[PREVIEW] Would run: git push origin {branch}")
|
||||
print("[PREVIEW] Would run: git push origin --tags")
|
||||
|
||||
# Also update the floating 'latest' tag to the new highest SemVer.
|
||||
update_latest_tag(new_tag, preview=True)
|
||||
|
||||
if close and branch not in ("main", "master"):
|
||||
print(
|
||||
f"[PREVIEW] Would also close branch {branch} after the release "
|
||||
"(close=True and branch is not main/master)."
|
||||
)
|
||||
elif close:
|
||||
print(
|
||||
f"[PREVIEW] close=True but current branch is {branch}; "
|
||||
"no branch would be closed."
|
||||
)
|
||||
|
||||
print("Preview completed. No changes were made.")
|
||||
return
|
||||
|
||||
for path in existing_files:
|
||||
run_git_command(f"git add {path}")
|
||||
|
||||
run_git_command(f'git commit -am "{commit_msg}"')
|
||||
run_git_command(f'git tag -a {new_tag} -m "{tag_msg}"')
|
||||
run_git_command(f"git push origin {branch}")
|
||||
run_git_command("git push origin --tags")
|
||||
|
||||
# Move 'latest' to the new release tag so the newest SemVer is always
|
||||
# marked as latest. This is best-effort and must not break the release.
|
||||
try:
|
||||
update_latest_tag(new_tag, preview=False)
|
||||
except GitError as exc: # pragma: no cover
|
||||
print(
|
||||
f"[WARN] Failed to update floating 'latest' tag for {new_tag}: {exc}\n"
|
||||
"[WARN] The release itself completed successfully; only the "
|
||||
"'latest' tag was not updated."
|
||||
)
|
||||
|
||||
print(f"Release {new_ver_str} completed.")
|
||||
|
||||
if close:
|
||||
if branch in ("main", "master"):
|
||||
print(
|
||||
f"[INFO] close=True but current branch is {branch}; "
|
||||
"nothing to close."
|
||||
)
|
||||
return
|
||||
|
||||
print(
|
||||
f"[INFO] Closing branch {branch} after successful release "
|
||||
"(close=True and branch is not main/master)..."
|
||||
)
|
||||
try:
|
||||
close_branch(name=branch, base_branch="main", cwd=".")
|
||||
except Exception as exc: # pragma: no cover
|
||||
print(f"[WARN] Failed to close branch {branch} automatically: {exc}")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Public release entry point
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def release(
|
||||
pyproject_path: str = "pyproject.toml",
|
||||
changelog_path: str = "CHANGELOG.md",
|
||||
release_type: str = "patch",
|
||||
message: Optional[str] = None,
|
||||
preview: bool = False,
|
||||
force: bool = False,
|
||||
close: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
High-level release entry point.
|
||||
|
||||
Modes:
|
||||
|
||||
- preview=True:
|
||||
* Single-phase PREVIEW only.
|
||||
|
||||
- preview=False, force=True:
|
||||
* Single-phase REAL release, no interactive preview.
|
||||
|
||||
- preview=False, force=False:
|
||||
* Two-phase flow (intended default for interactive CLI use).
|
||||
"""
|
||||
if preview:
|
||||
_release_impl(
|
||||
pyproject_path=pyproject_path,
|
||||
changelog_path=changelog_path,
|
||||
release_type=release_type,
|
||||
message=message,
|
||||
preview=True,
|
||||
close=close,
|
||||
)
|
||||
return
|
||||
|
||||
if force:
|
||||
_release_impl(
|
||||
pyproject_path=pyproject_path,
|
||||
changelog_path=changelog_path,
|
||||
release_type=release_type,
|
||||
message=message,
|
||||
preview=False,
|
||||
close=close,
|
||||
)
|
||||
return
|
||||
|
||||
if not sys.stdin.isatty():
|
||||
_release_impl(
|
||||
pyproject_path=pyproject_path,
|
||||
changelog_path=changelog_path,
|
||||
release_type=release_type,
|
||||
message=message,
|
||||
preview=False,
|
||||
close=close,
|
||||
)
|
||||
return
|
||||
|
||||
print("[INFO] Running preview before actual release...\n")
|
||||
_release_impl(
|
||||
pyproject_path=pyproject_path,
|
||||
changelog_path=changelog_path,
|
||||
release_type=release_type,
|
||||
message=message,
|
||||
preview=True,
|
||||
close=close,
|
||||
)
|
||||
|
||||
try:
|
||||
answer = input("Proceed with the actual release? [y/N]: ").strip().lower()
|
||||
except (EOFError, KeyboardInterrupt):
|
||||
print("\n[INFO] Release aborted (no confirmation).")
|
||||
return
|
||||
|
||||
if answer not in ("y", "yes"):
|
||||
print("Release aborted by user. No changes were made.")
|
||||
return
|
||||
|
||||
print("\n[INFO] Running REAL release...\n")
|
||||
_release_impl(
|
||||
pyproject_path=pyproject_path,
|
||||
changelog_path=changelog_path,
|
||||
release_type=release_type,
|
||||
message=message,
|
||||
preview=False,
|
||||
close=close,
|
||||
)
|
||||
|
||||
|
||||
__all__ = ["release"]
|
||||
537
src/pkgmgr/actions/release/files.py
Normal file
537
src/pkgmgr/actions/release/files.py
Normal file
@@ -0,0 +1,537 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
File and metadata update helpers for the release workflow.
|
||||
|
||||
Responsibilities:
|
||||
- Update pyproject.toml with the new version.
|
||||
- Update flake.nix, PKGBUILD, RPM spec files where present.
|
||||
- Prepend release entries to CHANGELOG.md.
|
||||
- Maintain distribution-specific changelog files:
|
||||
* debian/changelog
|
||||
* RPM spec %changelog section
|
||||
including maintainer metadata where applicable.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from datetime import date, datetime
|
||||
from typing import Optional, Tuple
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Editor helper for interactive changelog messages
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _open_editor_for_changelog(initial_message: Optional[str] = None) -> str:
|
||||
"""
|
||||
Open $EDITOR (fallback 'nano') so the user can enter a changelog message.
|
||||
|
||||
The temporary file is pre-filled with commented instructions and an
|
||||
optional initial_message. Lines starting with '#' are ignored when the
|
||||
message is read back.
|
||||
|
||||
Returns the final message (may be empty string if user leaves it blank).
|
||||
"""
|
||||
editor = os.environ.get("EDITOR", "nano")
|
||||
|
||||
with tempfile.NamedTemporaryFile(
|
||||
mode="w+",
|
||||
delete=False,
|
||||
encoding="utf-8",
|
||||
) as tmp:
|
||||
tmp_path = tmp.name
|
||||
tmp.write(
|
||||
"# Write the changelog entry for this release.\n"
|
||||
"# Lines starting with '#' will be ignored.\n"
|
||||
"# Empty result will fall back to a generic message.\n\n"
|
||||
)
|
||||
if initial_message:
|
||||
tmp.write(initial_message.strip() + "\n")
|
||||
tmp.flush()
|
||||
|
||||
try:
|
||||
subprocess.call([editor, tmp_path])
|
||||
except FileNotFoundError:
|
||||
print(
|
||||
f"[WARN] Editor {editor!r} not found; proceeding without "
|
||||
"interactive changelog message."
|
||||
)
|
||||
|
||||
try:
|
||||
with open(tmp_path, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
finally:
|
||||
try:
|
||||
os.remove(tmp_path)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
lines = [
|
||||
line for line in content.splitlines()
|
||||
if not line.strip().startswith("#")
|
||||
]
|
||||
return "\n".join(lines).strip()
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# File update helpers (pyproject + extra packaging + changelog)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def update_pyproject_version(
|
||||
pyproject_path: str,
|
||||
new_version: str,
|
||||
preview: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
Update the version in pyproject.toml with the new version.
|
||||
|
||||
The function looks for a line matching:
|
||||
|
||||
version = "X.Y.Z"
|
||||
|
||||
and replaces the version part with the given new_version string.
|
||||
|
||||
If the file does not exist, it is skipped without failing the release.
|
||||
"""
|
||||
if not os.path.exists(pyproject_path):
|
||||
print(
|
||||
f"[INFO] pyproject.toml not found at: {pyproject_path}, "
|
||||
"skipping version update."
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
with open(pyproject_path, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
except OSError as exc:
|
||||
print(
|
||||
f"[WARN] Could not read pyproject.toml at {pyproject_path}: {exc}. "
|
||||
"Skipping version update."
|
||||
)
|
||||
return
|
||||
|
||||
pattern = r'^(version\s*=\s*")([^"]+)(")'
|
||||
new_content, count = re.subn(
|
||||
pattern,
|
||||
lambda m: f'{m.group(1)}{new_version}{m.group(3)}',
|
||||
content,
|
||||
flags=re.MULTILINE,
|
||||
)
|
||||
|
||||
if count == 0:
|
||||
print("[ERROR] Could not find version line in pyproject.toml")
|
||||
sys.exit(1)
|
||||
|
||||
if preview:
|
||||
print(f"[PREVIEW] Would update pyproject.toml version to {new_version}")
|
||||
return
|
||||
|
||||
with open(pyproject_path, "w", encoding="utf-8") as f:
|
||||
f.write(new_content)
|
||||
|
||||
print(f"Updated pyproject.toml version to {new_version}")
|
||||
|
||||
|
||||
def update_flake_version(
|
||||
flake_path: str,
|
||||
new_version: str,
|
||||
preview: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
Update the version in flake.nix, if present.
|
||||
"""
|
||||
if not os.path.exists(flake_path):
|
||||
print("[INFO] flake.nix not found, skipping.")
|
||||
return
|
||||
|
||||
try:
|
||||
with open(flake_path, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
except Exception as exc:
|
||||
print(f"[WARN] Could not read flake.nix: {exc}")
|
||||
return
|
||||
|
||||
pattern = r'(version\s*=\s*")([^"]+)(")'
|
||||
new_content, count = re.subn(
|
||||
pattern,
|
||||
lambda m: f'{m.group(1)}{new_version}{m.group(3)}',
|
||||
content,
|
||||
)
|
||||
|
||||
if count == 0:
|
||||
print("[WARN] No version assignment found in flake.nix, skipping.")
|
||||
return
|
||||
|
||||
if preview:
|
||||
print(f"[PREVIEW] Would update flake.nix version to {new_version}")
|
||||
return
|
||||
|
||||
with open(flake_path, "w", encoding="utf-8") as f:
|
||||
f.write(new_content)
|
||||
|
||||
print(f"Updated flake.nix version to {new_version}")
|
||||
|
||||
|
||||
def update_pkgbuild_version(
|
||||
pkgbuild_path: str,
|
||||
new_version: str,
|
||||
preview: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
Update the version in PKGBUILD, if present.
|
||||
|
||||
Expects:
|
||||
pkgver=1.2.3
|
||||
pkgrel=1
|
||||
"""
|
||||
if not os.path.exists(pkgbuild_path):
|
||||
print("[INFO] PKGBUILD not found, skipping.")
|
||||
return
|
||||
|
||||
try:
|
||||
with open(pkgbuild_path, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
except Exception as exc:
|
||||
print(f"[WARN] Could not read PKGBUILD: {exc}")
|
||||
return
|
||||
|
||||
ver_pattern = r"^(pkgver\s*=\s*)(.+)$"
|
||||
new_content, ver_count = re.subn(
|
||||
ver_pattern,
|
||||
lambda m: f"{m.group(1)}{new_version}",
|
||||
content,
|
||||
flags=re.MULTILINE,
|
||||
)
|
||||
|
||||
if ver_count == 0:
|
||||
print("[WARN] No pkgver line found in PKGBUILD.")
|
||||
new_content = content
|
||||
|
||||
rel_pattern = r"^(pkgrel\s*=\s*)(.+)$"
|
||||
new_content, rel_count = re.subn(
|
||||
rel_pattern,
|
||||
lambda m: f"{m.group(1)}1",
|
||||
new_content,
|
||||
flags=re.MULTILINE,
|
||||
)
|
||||
|
||||
if rel_count == 0:
|
||||
print("[WARN] No pkgrel line found in PKGBUILD.")
|
||||
|
||||
if preview:
|
||||
print(f"[PREVIEW] Would update PKGBUILD to pkgver={new_version}, pkgrel=1")
|
||||
return
|
||||
|
||||
with open(pkgbuild_path, "w", encoding="utf-8") as f:
|
||||
f.write(new_content)
|
||||
|
||||
print(f"Updated PKGBUILD to pkgver={new_version}, pkgrel=1")
|
||||
|
||||
|
||||
def update_spec_version(
|
||||
spec_path: str,
|
||||
new_version: str,
|
||||
preview: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
Update the version in an RPM spec file, if present.
|
||||
"""
|
||||
if not os.path.exists(spec_path):
|
||||
print("[INFO] RPM spec file not found, skipping.")
|
||||
return
|
||||
|
||||
try:
|
||||
with open(spec_path, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
except Exception as exc:
|
||||
print(f"[WARN] Could not read spec file: {exc}")
|
||||
return
|
||||
|
||||
ver_pattern = r"^(Version:\s*)(.+)$"
|
||||
new_content, ver_count = re.subn(
|
||||
ver_pattern,
|
||||
lambda m: f"{m.group(1)}{new_version}",
|
||||
content,
|
||||
flags=re.MULTILINE,
|
||||
)
|
||||
|
||||
if ver_count == 0:
|
||||
print("[WARN] No 'Version:' line found in spec file.")
|
||||
|
||||
rel_pattern = r"^(Release:\s*)(.+)$"
|
||||
|
||||
def _release_repl(m: re.Match[str]) -> str: # type: ignore[name-defined]
|
||||
rest = m.group(2).strip()
|
||||
match = re.match(r"^(\d+)(.*)$", rest)
|
||||
if match:
|
||||
suffix = match.group(2)
|
||||
else:
|
||||
suffix = ""
|
||||
return f"{m.group(1)}1{suffix}"
|
||||
|
||||
new_content, rel_count = re.subn(
|
||||
rel_pattern,
|
||||
_release_repl,
|
||||
new_content,
|
||||
flags=re.MULTILINE,
|
||||
)
|
||||
|
||||
if rel_count == 0:
|
||||
print("[WARN] No 'Release:' line found in spec file.")
|
||||
|
||||
if preview:
|
||||
print(
|
||||
f"[PREVIEW] Would update spec file "
|
||||
f"{os.path.basename(spec_path)} to Version: {new_version}, Release: 1..."
|
||||
)
|
||||
return
|
||||
|
||||
with open(spec_path, "w", encoding="utf-8") as f:
|
||||
f.write(new_content)
|
||||
|
||||
print(
|
||||
f"Updated spec file {os.path.basename(spec_path)} "
|
||||
f"to Version: {new_version}, Release: 1..."
|
||||
)
|
||||
|
||||
|
||||
def update_changelog(
|
||||
changelog_path: str,
|
||||
new_version: str,
|
||||
message: Optional[str] = None,
|
||||
preview: bool = False,
|
||||
) -> str:
|
||||
"""
|
||||
Prepend a new release section to CHANGELOG.md with the new version,
|
||||
current date, and a message.
|
||||
"""
|
||||
today = date.today().isoformat()
|
||||
|
||||
if message is None:
|
||||
if preview:
|
||||
message = "Automated release."
|
||||
else:
|
||||
print(
|
||||
"\n[INFO] No release message provided, opening editor for "
|
||||
"changelog entry...\n"
|
||||
)
|
||||
editor_message = _open_editor_for_changelog()
|
||||
if not editor_message:
|
||||
message = "Automated release."
|
||||
else:
|
||||
message = editor_message
|
||||
|
||||
header = f"## [{new_version}] - {today}\n"
|
||||
header += f"\n* {message}\n\n"
|
||||
|
||||
if os.path.exists(changelog_path):
|
||||
try:
|
||||
with open(changelog_path, "r", encoding="utf-8") as f:
|
||||
changelog = f.read()
|
||||
except Exception as exc:
|
||||
print(f"[WARN] Could not read existing CHANGELOG.md: {exc}")
|
||||
changelog = ""
|
||||
else:
|
||||
changelog = ""
|
||||
|
||||
new_changelog = header + "\n" + changelog if changelog else header
|
||||
|
||||
print("\n================ CHANGELOG ENTRY ================")
|
||||
print(header.rstrip())
|
||||
print("=================================================\n")
|
||||
|
||||
if preview:
|
||||
print(f"[PREVIEW] Would prepend new entry for {new_version} to CHANGELOG.md")
|
||||
return message
|
||||
|
||||
with open(changelog_path, "w", encoding="utf-8") as f:
|
||||
f.write(new_changelog)
|
||||
|
||||
print(f"Updated CHANGELOG.md with version {new_version}")
|
||||
|
||||
return message
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Debian changelog helpers (with Git config fallback for maintainer)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _get_git_config_value(key: str) -> Optional[str]:
|
||||
"""
|
||||
Try to read a value from `git config --get <key>`.
|
||||
"""
|
||||
try:
|
||||
result = subprocess.run(
|
||||
["git", "config", "--get", key],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=False,
|
||||
)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
value = result.stdout.strip()
|
||||
return value or None
|
||||
|
||||
|
||||
def _get_debian_author() -> Tuple[str, str]:
|
||||
"""
|
||||
Determine the maintainer name/email for debian/changelog entries.
|
||||
"""
|
||||
name = os.environ.get("DEBFULLNAME")
|
||||
email = os.environ.get("DEBEMAIL")
|
||||
|
||||
if not name:
|
||||
name = os.environ.get("GIT_AUTHOR_NAME")
|
||||
if not email:
|
||||
email = os.environ.get("GIT_AUTHOR_EMAIL")
|
||||
|
||||
if not name:
|
||||
name = _get_git_config_value("user.name")
|
||||
if not email:
|
||||
email = _get_git_config_value("user.email")
|
||||
|
||||
if not name:
|
||||
name = "Unknown Maintainer"
|
||||
if not email:
|
||||
email = "unknown@example.com"
|
||||
|
||||
return name, email
|
||||
|
||||
|
||||
def update_debian_changelog(
|
||||
debian_changelog_path: str,
|
||||
package_name: str,
|
||||
new_version: str,
|
||||
message: Optional[str] = None,
|
||||
preview: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
Prepend a new entry to debian/changelog, if it exists.
|
||||
"""
|
||||
if not os.path.exists(debian_changelog_path):
|
||||
print("[INFO] debian/changelog not found, skipping.")
|
||||
return
|
||||
|
||||
debian_version = f"{new_version}-1"
|
||||
now = datetime.now().astimezone()
|
||||
date_str = now.strftime("%a, %d %b %Y %H:%M:%S %z")
|
||||
|
||||
author_name, author_email = _get_debian_author()
|
||||
|
||||
first_line = f"{package_name} ({debian_version}) unstable; urgency=medium"
|
||||
body_line = message.strip() if message else f"Automated release {new_version}."
|
||||
stanza = (
|
||||
f"{first_line}\n\n"
|
||||
f" * {body_line}\n\n"
|
||||
f" -- {author_name} <{author_email}> {date_str}\n\n"
|
||||
)
|
||||
|
||||
if preview:
|
||||
print(
|
||||
"[PREVIEW] Would prepend the following stanza to debian/changelog:\n"
|
||||
f"{stanza}"
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
with open(debian_changelog_path, "r", encoding="utf-8") as f:
|
||||
existing = f.read()
|
||||
except Exception as exc:
|
||||
print(f"[WARN] Could not read debian/changelog: {exc}")
|
||||
existing = ""
|
||||
|
||||
new_content = stanza + existing
|
||||
|
||||
with open(debian_changelog_path, "w", encoding="utf-8") as f:
|
||||
f.write(new_content)
|
||||
|
||||
print(f"Updated debian/changelog with version {debian_version}")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Fedora / RPM spec %changelog helper
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def update_spec_changelog(
|
||||
spec_path: str,
|
||||
package_name: str,
|
||||
new_version: str,
|
||||
message: Optional[str] = None,
|
||||
preview: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
Prepend a new entry to the %changelog section of an RPM spec file,
|
||||
if present.
|
||||
|
||||
Typical RPM-style entry:
|
||||
|
||||
* Tue Dec 09 2025 John Doe <john@example.com> - 0.5.1-1
|
||||
- Your changelog message
|
||||
"""
|
||||
if not os.path.exists(spec_path):
|
||||
print("[INFO] RPM spec file not found, skipping spec changelog update.")
|
||||
return
|
||||
|
||||
try:
|
||||
with open(spec_path, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
except Exception as exc:
|
||||
print(f"[WARN] Could not read spec file for changelog update: {exc}")
|
||||
return
|
||||
|
||||
debian_version = f"{new_version}-1"
|
||||
now = datetime.now().astimezone()
|
||||
date_str = now.strftime("%a %b %d %Y")
|
||||
|
||||
# Reuse Debian maintainer discovery for author name/email.
|
||||
author_name, author_email = _get_debian_author()
|
||||
|
||||
body_line = message.strip() if message else f"Automated release {new_version}."
|
||||
|
||||
stanza = (
|
||||
f"* {date_str} {author_name} <{author_email}> - {debian_version}\n"
|
||||
f"- {body_line}\n\n"
|
||||
)
|
||||
|
||||
marker = "%changelog"
|
||||
idx = content.find(marker)
|
||||
|
||||
if idx == -1:
|
||||
# No %changelog section yet: append one at the end.
|
||||
new_content = content.rstrip() + "\n\n%changelog\n" + stanza
|
||||
else:
|
||||
# Insert stanza right after the %changelog line.
|
||||
before = content[: idx + len(marker)]
|
||||
after = content[idx + len(marker) :]
|
||||
new_content = before + "\n" + stanza + after.lstrip("\n")
|
||||
|
||||
if preview:
|
||||
print(
|
||||
"[PREVIEW] Would update RPM %changelog section with the following "
|
||||
"stanza:\n"
|
||||
f"{stanza}"
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
with open(spec_path, "w", encoding="utf-8") as f:
|
||||
f.write(new_content)
|
||||
except Exception as exc:
|
||||
print(f"[WARN] Failed to write updated spec changelog section: {exc}")
|
||||
return
|
||||
|
||||
print(
|
||||
f"Updated RPM %changelog section in {os.path.basename(spec_path)} "
|
||||
f"for {package_name} {debian_version}"
|
||||
)
|
||||
95
src/pkgmgr/actions/release/git_ops.py
Normal file
95
src/pkgmgr/actions/release/git_ops.py
Normal file
@@ -0,0 +1,95 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Git-related helpers for the release workflow.
|
||||
|
||||
Responsibilities:
|
||||
- Run Git (or shell) commands with basic error reporting.
|
||||
- Ensure main/master are synchronized with origin before tagging.
|
||||
- Maintain the floating 'latest' tag that always points to the newest
|
||||
release tag.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import subprocess
|
||||
|
||||
from pkgmgr.core.git import GitError
|
||||
|
||||
|
||||
def run_git_command(cmd: str) -> None:
|
||||
"""
|
||||
Run a Git (or shell) command with basic error reporting.
|
||||
|
||||
The command is executed via the shell, primarily for readability
|
||||
when printed (as in 'git commit -am "msg"').
|
||||
"""
|
||||
print(f"[GIT] {cmd}")
|
||||
try:
|
||||
subprocess.run(cmd, shell=True, check=True)
|
||||
except subprocess.CalledProcessError as exc:
|
||||
print(f"[ERROR] Git command failed: {cmd}")
|
||||
print(f" Exit code: {exc.returncode}")
|
||||
if exc.stdout:
|
||||
print("--- stdout ---")
|
||||
print(exc.stdout)
|
||||
if exc.stderr:
|
||||
print("--- stderr ---")
|
||||
print(exc.stderr)
|
||||
raise GitError(f"Git command failed: {cmd}") from exc
|
||||
|
||||
|
||||
def sync_branch_with_remote(branch: str, preview: bool = False) -> None:
|
||||
"""
|
||||
Ensure the local main/master branch is up-to-date before tagging.
|
||||
|
||||
Behaviour:
|
||||
- For main/master: run 'git fetch origin' and 'git pull origin <branch>'.
|
||||
- For all other branches: only log that no automatic sync is performed.
|
||||
"""
|
||||
if branch not in ("main", "master"):
|
||||
print(
|
||||
f"[INFO] Skipping automatic git pull for non-main/master branch "
|
||||
f"{branch}."
|
||||
)
|
||||
return
|
||||
|
||||
print(
|
||||
f"[INFO] Updating branch {branch} from origin before creating tags..."
|
||||
)
|
||||
|
||||
if preview:
|
||||
print("[PREVIEW] Would run: git fetch origin")
|
||||
print(f"[PREVIEW] Would run: git pull origin {branch}")
|
||||
return
|
||||
|
||||
run_git_command("git fetch origin")
|
||||
run_git_command(f"git pull origin {branch}")
|
||||
|
||||
|
||||
def update_latest_tag(new_tag: str, preview: bool = False) -> None:
|
||||
"""
|
||||
Move the floating 'latest' tag to the newly created release tag.
|
||||
|
||||
Implementation details:
|
||||
- We explicitly dereference the tag object via `<tag>^{}` so that
|
||||
'latest' always points at the underlying commit, not at another tag.
|
||||
- We create/update 'latest' as an annotated tag with a short message so
|
||||
Git configurations that enforce annotated/signed tags do not fail
|
||||
with "no tag message".
|
||||
"""
|
||||
target_ref = f"{new_tag}^{{}}"
|
||||
print(f"[INFO] Updating 'latest' tag to point at {new_tag} (commit {target_ref})...")
|
||||
|
||||
if preview:
|
||||
print(f"[PREVIEW] Would run: git tag -f -a latest {target_ref} "
|
||||
f'-m "Floating latest tag for {new_tag}"')
|
||||
print("[PREVIEW] Would run: git push origin latest --force")
|
||||
return
|
||||
|
||||
run_git_command(
|
||||
f'git tag -f -a latest {target_ref} '
|
||||
f'-m "Floating latest tag for {new_tag}"'
|
||||
)
|
||||
run_git_command("git push origin latest --force")
|
||||
53
src/pkgmgr/actions/release/versioning.py
Normal file
53
src/pkgmgr/actions/release/versioning.py
Normal file
@@ -0,0 +1,53 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Version discovery and bumping helpers for the release workflow.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pkgmgr.core.git import get_tags
|
||||
from pkgmgr.core.version.semver import (
|
||||
SemVer,
|
||||
find_latest_version,
|
||||
bump_major,
|
||||
bump_minor,
|
||||
bump_patch,
|
||||
)
|
||||
|
||||
|
||||
def determine_current_version() -> SemVer:
|
||||
"""
|
||||
Determine the current semantic version from Git tags.
|
||||
|
||||
Behaviour:
|
||||
- If there are no tags or no SemVer-compatible tags, return 0.0.0.
|
||||
- Otherwise, use the latest SemVer tag as current version.
|
||||
"""
|
||||
tags = get_tags()
|
||||
if not tags:
|
||||
return SemVer(0, 0, 0)
|
||||
|
||||
latest = find_latest_version(tags)
|
||||
if latest is None:
|
||||
return SemVer(0, 0, 0)
|
||||
|
||||
_tag, ver = latest
|
||||
return ver
|
||||
|
||||
|
||||
def bump_semver(current: SemVer, release_type: str) -> SemVer:
|
||||
"""
|
||||
Bump the given SemVer according to the release type.
|
||||
|
||||
release_type must be one of: "major", "minor", "patch".
|
||||
"""
|
||||
if release_type == "major":
|
||||
return bump_major(current)
|
||||
if release_type == "minor":
|
||||
return bump_minor(current)
|
||||
if release_type == "patch":
|
||||
return bump_patch(current)
|
||||
|
||||
raise ValueError(f"Unknown release type: {release_type!r}")
|
||||
0
src/pkgmgr/actions/repository/__init__.py
Normal file
0
src/pkgmgr/actions/repository/__init__.py
Normal file
103
src/pkgmgr/actions/repository/clone.py
Normal file
103
src/pkgmgr/actions/repository/clone.py
Normal file
@@ -0,0 +1,103 @@
|
||||
import subprocess
|
||||
import os
|
||||
from pkgmgr.core.repository.dir import get_repo_dir
|
||||
from pkgmgr.core.repository.identifier import get_repo_identifier
|
||||
from pkgmgr.core.repository.verify import verify_repository
|
||||
|
||||
def clone_repos(
|
||||
selected_repos,
|
||||
repositories_base_dir: str,
|
||||
all_repos,
|
||||
preview: bool,
|
||||
no_verification: bool,
|
||||
clone_mode: str
|
||||
):
|
||||
for repo in selected_repos:
|
||||
repo_identifier = get_repo_identifier(repo, all_repos)
|
||||
repo_dir = get_repo_dir(repositories_base_dir, repo)
|
||||
if os.path.exists(repo_dir):
|
||||
print(f"[INFO] Repository '{repo_identifier}' already exists at '{repo_dir}'. Skipping clone.")
|
||||
continue
|
||||
|
||||
parent_dir = os.path.dirname(repo_dir)
|
||||
os.makedirs(parent_dir, exist_ok=True)
|
||||
# Build clone URL based on the clone_mode
|
||||
# Build clone URL based on the clone_mode
|
||||
if clone_mode == "ssh":
|
||||
clone_url = (
|
||||
f"git@{repo.get('provider')}:"
|
||||
f"{repo.get('account')}/"
|
||||
f"{repo.get('repository')}.git"
|
||||
)
|
||||
elif clone_mode in ("https", "shallow"):
|
||||
# Use replacement if defined, otherwise construct from provider/account/repository
|
||||
if repo.get("replacement"):
|
||||
clone_url = f"https://{repo.get('replacement')}.git"
|
||||
else:
|
||||
clone_url = (
|
||||
f"https://{repo.get('provider')}/"
|
||||
f"{repo.get('account')}/"
|
||||
f"{repo.get('repository')}.git"
|
||||
)
|
||||
else:
|
||||
print(f"Unknown clone mode '{clone_mode}'. Aborting clone for {repo_identifier}.")
|
||||
continue
|
||||
|
||||
# Build base clone command
|
||||
base_clone_cmd = "git clone"
|
||||
if clone_mode == "shallow":
|
||||
# Shallow clone: only latest state via HTTPS, no full history
|
||||
base_clone_cmd += " --depth 1 --single-branch"
|
||||
|
||||
mode_label = "HTTPS (shallow)" if clone_mode == "shallow" else clone_mode.upper()
|
||||
print(
|
||||
f"[INFO] Attempting to clone '{repo_identifier}' using {mode_label} "
|
||||
f"from {clone_url} into '{repo_dir}'."
|
||||
)
|
||||
|
||||
if preview:
|
||||
print(f"[Preview] Would run: {base_clone_cmd} {clone_url} {repo_dir} in {parent_dir}")
|
||||
result = subprocess.CompletedProcess(args=[], returncode=0)
|
||||
else:
|
||||
result = subprocess.run(
|
||||
f"{base_clone_cmd} {clone_url} {repo_dir}",
|
||||
cwd=parent_dir,
|
||||
shell=True,
|
||||
)
|
||||
|
||||
if result.returncode != 0:
|
||||
# Only offer fallback if the original mode was SSH.
|
||||
if clone_mode == "ssh":
|
||||
print(f"[WARNING] SSH clone failed for '{repo_identifier}' with return code {result.returncode}.")
|
||||
choice = input("Do you want to attempt HTTPS clone instead? (y/N): ").strip().lower()
|
||||
if choice == 'y':
|
||||
# Attempt HTTPS clone
|
||||
if repo.get("replacement"):
|
||||
clone_url = f"https://{repo.get('replacement')}.git"
|
||||
else:
|
||||
clone_url = f"https://{repo.get('provider')}/{repo.get('account')}/{repo.get('repository')}.git"
|
||||
print(f"[INFO] Attempting to clone '{repo_identifier}' using HTTPS from {clone_url} into '{repo_dir}'.")
|
||||
if preview:
|
||||
print(f"[Preview] Would run: git clone {clone_url} {repo_dir} in {parent_dir}")
|
||||
result = subprocess.CompletedProcess(args=[], returncode=0)
|
||||
else:
|
||||
result = subprocess.run(f"git clone {clone_url} {repo_dir}", cwd=parent_dir, shell=True)
|
||||
else:
|
||||
print(f"[INFO] HTTPS clone not attempted for '{repo_identifier}'.")
|
||||
continue
|
||||
else:
|
||||
# For https mode, do not attempt fallback.
|
||||
print(f"[WARNING] HTTPS clone failed for '{repo_identifier}' with return code {result.returncode}.")
|
||||
continue
|
||||
|
||||
# After cloning, perform verification in local mode.
|
||||
verified_info = repo.get("verified")
|
||||
if verified_info:
|
||||
verified_ok, errors, commit_hash, signing_key = verify_repository(repo, repo_dir, mode="local", no_verification=no_verification)
|
||||
if not no_verification and not verified_ok:
|
||||
print(f"Warning: Verification failed for {repo_identifier} after cloning:")
|
||||
for err in errors:
|
||||
print(f" - {err}")
|
||||
choice = input("Proceed anyway? (y/N): ").strip().lower()
|
||||
if choice != "y":
|
||||
print(f"Skipping repository {repo_identifier} due to failed verification.")
|
||||
144
src/pkgmgr/actions/repository/create.py
Normal file
144
src/pkgmgr/actions/repository/create.py
Normal file
@@ -0,0 +1,144 @@
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import yaml
|
||||
from pkgmgr.core.command.alias import generate_alias
|
||||
from pkgmgr.core.config.save import save_user_config
|
||||
|
||||
def create_repo(identifier, config_merged, user_config_path, bin_dir, remote=False, preview=False):
|
||||
"""
|
||||
Creates a new repository by performing the following steps:
|
||||
|
||||
1. Parses the identifier (provider:port/account/repository) and adds a new entry to the user config
|
||||
if it is not already present. The provider part is split into provider and port (if provided).
|
||||
2. Creates the local repository directory and initializes a Git repository.
|
||||
3. If --remote is set, checks for an existing "origin" remote (removing it if found),
|
||||
adds the remote using a URL built from provider, port, account, and repository,
|
||||
creates an initial commit (e.g. with a README.md), and pushes to the remote.
|
||||
The push is attempted on both "main" and "master" branches.
|
||||
"""
|
||||
parts = identifier.split("/")
|
||||
if len(parts) != 3:
|
||||
print("Identifier must be in the format 'provider:port/account/repository' (port is optional).")
|
||||
return
|
||||
|
||||
provider_with_port, account, repository = parts
|
||||
# Split provider and port if a colon is present.
|
||||
if ":" in provider_with_port:
|
||||
provider_name, port = provider_with_port.split(":", 1)
|
||||
else:
|
||||
provider_name = provider_with_port
|
||||
port = None
|
||||
|
||||
# Check if the repository is already present in the merged config (including port)
|
||||
exists = False
|
||||
for repo in config_merged.get("repositories", []):
|
||||
if (repo.get("provider") == provider_name and
|
||||
repo.get("account") == account and
|
||||
repo.get("repository") == repository):
|
||||
exists = True
|
||||
print(f"Repository {identifier} already exists in the configuration.")
|
||||
break
|
||||
|
||||
if not exists:
|
||||
# Create a new entry with an automatically generated alias.
|
||||
new_entry = {
|
||||
"provider": provider_name,
|
||||
"port": port,
|
||||
"account": account,
|
||||
"repository": repository,
|
||||
"alias": generate_alias({"repository": repository, "provider": provider_name, "account": account}, bin_dir, existing_aliases=set()),
|
||||
"verified": {} # No initial verification info
|
||||
}
|
||||
# Load or initialize the user configuration.
|
||||
if os.path.exists(user_config_path):
|
||||
with open(user_config_path, "r") as f:
|
||||
user_config = yaml.safe_load(f) or {}
|
||||
else:
|
||||
user_config = {"repositories": []}
|
||||
user_config.setdefault("repositories", [])
|
||||
user_config["repositories"].append(new_entry)
|
||||
save_user_config(user_config, user_config_path)
|
||||
print(f"Repository {identifier} added to the configuration.")
|
||||
# Also update the merged configuration object.
|
||||
config_merged.setdefault("repositories", []).append(new_entry)
|
||||
|
||||
# Create the local repository directory based on the configured base directory.
|
||||
base_dir = os.path.expanduser(config_merged["directories"]["repositories"])
|
||||
repo_dir = os.path.join(base_dir, provider_name, account, repository)
|
||||
if not os.path.exists(repo_dir):
|
||||
os.makedirs(repo_dir, exist_ok=True)
|
||||
print(f"Local repository directory created: {repo_dir}")
|
||||
else:
|
||||
print(f"Local repository directory already exists: {repo_dir}")
|
||||
|
||||
# Initialize a Git repository if not already initialized.
|
||||
if not os.path.exists(os.path.join(repo_dir, ".git")):
|
||||
cmd_init = "git init"
|
||||
if preview:
|
||||
print(f"[Preview] Would execute: '{cmd_init}' in {repo_dir}")
|
||||
else:
|
||||
subprocess.run(cmd_init, cwd=repo_dir, shell=True, check=True)
|
||||
print(f"Git repository initialized in {repo_dir}.")
|
||||
else:
|
||||
print("Git repository is already initialized.")
|
||||
|
||||
if remote:
|
||||
# Create a README.md if it does not exist to have content for an initial commit.
|
||||
readme_path = os.path.join(repo_dir, "README.md")
|
||||
if not os.path.exists(readme_path):
|
||||
if preview:
|
||||
print(f"[Preview] Would create README.md in {repo_dir}.")
|
||||
else:
|
||||
with open(readme_path, "w") as f:
|
||||
f.write(f"# {repository}\n")
|
||||
subprocess.run("git add README.md", cwd=repo_dir, shell=True, check=True)
|
||||
subprocess.run('git commit -m "Initial commit"', cwd=repo_dir, shell=True, check=True)
|
||||
print("README.md created and initial commit made.")
|
||||
|
||||
# Build the remote URL.
|
||||
if provider_name.lower() == "github.com":
|
||||
remote_url = f"git@{provider_name}:{account}/{repository}.git"
|
||||
else:
|
||||
if port:
|
||||
remote_url = f"ssh://git@{provider_name}:{port}/{account}/{repository}.git"
|
||||
else:
|
||||
remote_url = f"ssh://git@{provider_name}/{account}/{repository}.git"
|
||||
|
||||
# Check if the remote "origin" already exists.
|
||||
cmd_list = "git remote"
|
||||
if preview:
|
||||
print(f"[Preview] Would check for existing remotes in {repo_dir}")
|
||||
remote_exists = False # Assume no remote in preview mode.
|
||||
else:
|
||||
result = subprocess.run(cmd_list, cwd=repo_dir, shell=True, capture_output=True, text=True, check=True)
|
||||
remote_list = result.stdout.strip().split()
|
||||
remote_exists = "origin" in remote_list
|
||||
|
||||
if remote_exists:
|
||||
# Remove the existing remote "origin".
|
||||
cmd_remove = "git remote remove origin"
|
||||
if preview:
|
||||
print(f"[Preview] Would execute: '{cmd_remove}' in {repo_dir}")
|
||||
else:
|
||||
subprocess.run(cmd_remove, cwd=repo_dir, shell=True, check=True)
|
||||
print("Existing remote 'origin' removed.")
|
||||
|
||||
# Now add the new remote.
|
||||
cmd_remote = f"git remote add origin {remote_url}"
|
||||
if preview:
|
||||
print(f"[Preview] Would execute: '{cmd_remote}' in {repo_dir}")
|
||||
else:
|
||||
try:
|
||||
subprocess.run(cmd_remote, cwd=repo_dir, shell=True, check=True)
|
||||
print(f"Remote 'origin' added: {remote_url}")
|
||||
except subprocess.CalledProcessError:
|
||||
print(f"Failed to add remote using URL: {remote_url}.")
|
||||
|
||||
# Push the initial commit to the remote repository
|
||||
cmd_push = "git push -u origin master"
|
||||
if preview:
|
||||
print(f"[Preview] Would execute: '{cmd_push}' in {repo_dir}")
|
||||
else:
|
||||
subprocess.run(cmd_push, cwd=repo_dir, shell=True, check=True)
|
||||
print("Initial push to the remote repository completed.")
|
||||
28
src/pkgmgr/actions/repository/deinstall.py
Normal file
28
src/pkgmgr/actions/repository/deinstall.py
Normal file
@@ -0,0 +1,28 @@
|
||||
import os
|
||||
import sys
|
||||
from pkgmgr.core.repository.identifier import get_repo_identifier
|
||||
from pkgmgr.core.repository.dir import get_repo_dir
|
||||
|
||||
def deinstall_repos(selected_repos, repositories_base_dir, bin_dir, all_repos, preview=False):
|
||||
for repo in selected_repos:
|
||||
repo_identifier = get_repo_identifier(repo, all_repos)
|
||||
alias_path = os.path.join(bin_dir, repo_identifier)
|
||||
|
||||
if os.path.exists(alias_path):
|
||||
confirm = input(f"Are you sure you want to delete link '{alias_path}' for {repo_identifier}? [y/N]: ").strip().lower()
|
||||
if confirm == "y":
|
||||
if preview:
|
||||
print(f"[Preview] Would remove link '{alias_path}'.")
|
||||
else:
|
||||
os.remove(alias_path)
|
||||
print(f"Removed link for {repo_identifier}.")
|
||||
else:
|
||||
print(f"No link found for {repo_identifier} in {bin_dir}.")
|
||||
|
||||
makefile_path = os.path.join(repo_dir, "Makefile")
|
||||
if os.path.exists(makefile_path):
|
||||
print(f"Makefile found in {repo_identifier}, running 'make deinstall'...")
|
||||
try:
|
||||
run_command("make deinstall", cwd=repo_dir, preview=preview)
|
||||
except SystemExit as e:
|
||||
print(f"[Warning] Failed to run 'make deinstall' for {repo_identifier}: {e}")
|
||||
24
src/pkgmgr/actions/repository/delete.py
Normal file
24
src/pkgmgr/actions/repository/delete.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import shutil
|
||||
import os
|
||||
from pkgmgr.core.repository.identifier import get_repo_identifier
|
||||
from pkgmgr.core.repository.dir import get_repo_dir
|
||||
|
||||
def delete_repos(selected_repos, repositories_base_dir, all_repos, preview=False):
|
||||
for repo in selected_repos:
|
||||
repo_identifier = get_repo_identifier(repo, all_repos)
|
||||
repo_dir = get_repo_dir(repositories_base_dir, repo)
|
||||
if os.path.exists(repo_dir):
|
||||
confirm = input(f"Are you sure you want to delete directory '{repo_dir}' for {repo_identifier}? [y/N]: ").strip().lower()
|
||||
if confirm == "y":
|
||||
if preview:
|
||||
print(f"[Preview] Would delete directory '{repo_dir}' for {repo_identifier}.")
|
||||
else:
|
||||
try:
|
||||
shutil.rmtree(repo_dir)
|
||||
print(f"Deleted repository directory '{repo_dir}' for {repo_identifier}.")
|
||||
except Exception as e:
|
||||
print(f"Error deleting '{repo_dir}' for {repo_identifier}: {e}")
|
||||
else:
|
||||
print(f"Skipped deletion of '{repo_dir}' for {repo_identifier}.")
|
||||
else:
|
||||
print(f"Repository directory '{repo_dir}' not found for {repo_identifier}.")
|
||||
352
src/pkgmgr/actions/repository/list.py
Normal file
352
src/pkgmgr/actions/repository/list.py
Normal file
@@ -0,0 +1,352 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Pretty-print repository list with status, categories, tags and path.
|
||||
|
||||
- Tags come exclusively from YAML: repo["tags"].
|
||||
- Categories come from repo["category_files"] (YAML file names without
|
||||
.yml/.yaml) and optional repo["category"].
|
||||
- Optional detail mode (--description) prints an extended section per
|
||||
repository with description, homepage, etc.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import re
|
||||
from textwrap import wrap
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
Repository = Dict[str, Any]
|
||||
|
||||
RESET = "\033[0m"
|
||||
BOLD = "\033[1m"
|
||||
DIM = "\033[2m"
|
||||
GREEN = "\033[32m"
|
||||
YELLOW = "\033[33m"
|
||||
RED = "\033[31m"
|
||||
MAGENTA = "\033[35m"
|
||||
GREY = "\033[90m"
|
||||
|
||||
|
||||
def _compile_maybe_regex(pattern: str) -> Optional[re.Pattern[str]]:
|
||||
"""
|
||||
If pattern is of the form /.../, return a compiled regex (case-insensitive).
|
||||
Otherwise return None.
|
||||
"""
|
||||
if not pattern:
|
||||
return None
|
||||
if len(pattern) >= 2 and pattern.startswith("/") and pattern.endswith("/"):
|
||||
try:
|
||||
return re.compile(pattern[1:-1], re.IGNORECASE)
|
||||
except re.error:
|
||||
return None
|
||||
return None
|
||||
|
||||
|
||||
def _status_matches(status: str, status_filter: str) -> bool:
|
||||
"""
|
||||
Match a status string against an optional filter (substring or /regex/).
|
||||
"""
|
||||
if not status_filter:
|
||||
return True
|
||||
|
||||
regex = _compile_maybe_regex(status_filter)
|
||||
if regex:
|
||||
return bool(regex.search(status))
|
||||
return status_filter.lower() in status.lower()
|
||||
|
||||
|
||||
def _compute_repo_dir(repositories_base_dir: str, repo: Repository) -> str:
|
||||
"""
|
||||
Compute the local directory for a repository.
|
||||
|
||||
If the repository already has a 'directory' key, that is used;
|
||||
otherwise the path is constructed from provider/account/repository
|
||||
under repositories_base_dir.
|
||||
"""
|
||||
if repo.get("directory"):
|
||||
return os.path.expanduser(str(repo["directory"]))
|
||||
|
||||
provider = str(repo.get("provider", ""))
|
||||
account = str(repo.get("account", ""))
|
||||
repository = str(repo.get("repository", ""))
|
||||
|
||||
return os.path.join(
|
||||
os.path.expanduser(repositories_base_dir),
|
||||
provider,
|
||||
account,
|
||||
repository,
|
||||
)
|
||||
|
||||
|
||||
def _compute_status(
|
||||
repo: Repository,
|
||||
repo_dir: str,
|
||||
binaries_dir: str,
|
||||
) -> str:
|
||||
"""
|
||||
Compute a human-readable status string, e.g. 'present,alias,ignored'.
|
||||
"""
|
||||
parts: List[str] = []
|
||||
|
||||
exists = os.path.isdir(repo_dir)
|
||||
if exists:
|
||||
parts.append("present")
|
||||
else:
|
||||
parts.append("absent")
|
||||
|
||||
alias = repo.get("alias")
|
||||
if alias:
|
||||
alias_path = os.path.join(os.path.expanduser(binaries_dir), str(alias))
|
||||
if os.path.exists(alias_path):
|
||||
parts.append("alias")
|
||||
else:
|
||||
parts.append("alias-missing")
|
||||
|
||||
if repo.get("ignore"):
|
||||
parts.append("ignored")
|
||||
|
||||
return ",".join(parts) if parts else "-"
|
||||
|
||||
|
||||
def _color_status(status_padded: str) -> str:
|
||||
"""
|
||||
Color individual status flags inside a padded status string.
|
||||
|
||||
Input is expected to be right-padded to the column width.
|
||||
|
||||
Color mapping:
|
||||
- present -> green
|
||||
- absent -> red
|
||||
- alias -> red
|
||||
- alias-missing -> red
|
||||
- ignored -> magenta
|
||||
- other -> default
|
||||
"""
|
||||
core = status_padded.rstrip()
|
||||
pad_spaces = len(status_padded) - len(core)
|
||||
|
||||
plain_parts = core.split(",") if core else []
|
||||
colored_parts: List[str] = []
|
||||
|
||||
for raw_part in plain_parts:
|
||||
name = raw_part.strip()
|
||||
if not name:
|
||||
continue
|
||||
|
||||
if name == "present":
|
||||
color = GREEN
|
||||
elif name == "absent":
|
||||
color = MAGENTA
|
||||
elif name in ("alias", "alias-missing"):
|
||||
color = YELLOW
|
||||
elif name == "ignored":
|
||||
color = MAGENTA
|
||||
else:
|
||||
color = ""
|
||||
|
||||
if color:
|
||||
colored_parts.append(f"{color}{name}{RESET}")
|
||||
else:
|
||||
colored_parts.append(name)
|
||||
|
||||
colored_core = ",".join(colored_parts)
|
||||
return colored_core + (" " * pad_spaces)
|
||||
|
||||
|
||||
def list_repositories(
|
||||
repositories: List[Repository],
|
||||
repositories_base_dir: str,
|
||||
binaries_dir: str,
|
||||
search_filter: str = "",
|
||||
status_filter: str = "",
|
||||
extra_tags: Optional[List[str]] = None,
|
||||
show_description: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
Print a table of repositories and (optionally) detailed descriptions.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
repositories:
|
||||
Repositories to show (usually already filtered by get_selected_repos).
|
||||
repositories_base_dir:
|
||||
Base directory where repositories live.
|
||||
binaries_dir:
|
||||
Directory where alias symlinks live.
|
||||
search_filter:
|
||||
Optional substring/regex filter on identifier and metadata.
|
||||
status_filter:
|
||||
Optional filter on computed status.
|
||||
extra_tags:
|
||||
Additional tags to show for each repository (CLI overlay only).
|
||||
show_description:
|
||||
If True, print a detailed block for each repository after the table.
|
||||
"""
|
||||
if extra_tags is None:
|
||||
extra_tags = []
|
||||
|
||||
search_regex = _compile_maybe_regex(search_filter)
|
||||
rows: List[Dict[str, Any]] = []
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Build rows
|
||||
# ------------------------------------------------------------------
|
||||
for repo in repositories:
|
||||
identifier = str(repo.get("repository") or repo.get("alias") or "")
|
||||
alias = str(repo.get("alias") or "")
|
||||
provider = str(repo.get("provider") or "")
|
||||
account = str(repo.get("account") or "")
|
||||
description = str(repo.get("description") or "")
|
||||
homepage = str(repo.get("homepage") or "")
|
||||
|
||||
repo_dir = _compute_repo_dir(repositories_base_dir, repo)
|
||||
status = _compute_status(repo, repo_dir, binaries_dir)
|
||||
|
||||
if not _status_matches(status, status_filter):
|
||||
continue
|
||||
|
||||
if search_filter:
|
||||
haystack = " ".join(
|
||||
[
|
||||
identifier,
|
||||
alias,
|
||||
provider,
|
||||
account,
|
||||
description,
|
||||
homepage,
|
||||
repo_dir,
|
||||
]
|
||||
)
|
||||
if search_regex:
|
||||
if not search_regex.search(haystack):
|
||||
continue
|
||||
else:
|
||||
if search_filter.lower() not in haystack.lower():
|
||||
continue
|
||||
|
||||
categories: List[str] = []
|
||||
categories.extend(map(str, repo.get("category_files", [])))
|
||||
if repo.get("category"):
|
||||
categories.append(str(repo["category"]))
|
||||
|
||||
yaml_tags: List[str] = list(map(str, repo.get("tags", [])))
|
||||
display_tags: List[str] = sorted(
|
||||
set(yaml_tags + list(map(str, extra_tags)))
|
||||
)
|
||||
|
||||
rows.append(
|
||||
{
|
||||
"repo": repo,
|
||||
"identifier": identifier,
|
||||
"status": status,
|
||||
"categories": categories,
|
||||
"tags": display_tags,
|
||||
"dir": repo_dir,
|
||||
}
|
||||
)
|
||||
|
||||
if not rows:
|
||||
print("No repositories matched the given filters.")
|
||||
return
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Table section (header grey, values white, per-flag colored status)
|
||||
# ------------------------------------------------------------------
|
||||
ident_width = max(len("IDENTIFIER"), max(len(r["identifier"]) for r in rows))
|
||||
status_width = max(len("STATUS"), max(len(r["status"]) for r in rows))
|
||||
cat_width = max(
|
||||
len("CATEGORIES"),
|
||||
max((len(",".join(r["categories"])) for r in rows), default=0),
|
||||
)
|
||||
tag_width = max(
|
||||
len("TAGS"),
|
||||
max((len(",".join(r["tags"])) for r in rows), default=0),
|
||||
)
|
||||
|
||||
header = (
|
||||
f"{GREY}{BOLD}"
|
||||
f"{'IDENTIFIER'.ljust(ident_width)} "
|
||||
f"{'STATUS'.ljust(status_width)} "
|
||||
f"{'CATEGORIES'.ljust(cat_width)} "
|
||||
f"{'TAGS'.ljust(tag_width)} "
|
||||
f"DIR"
|
||||
f"{RESET}"
|
||||
)
|
||||
print(header)
|
||||
print("-" * (ident_width + status_width + cat_width + tag_width + 10 + 40))
|
||||
|
||||
for r in rows:
|
||||
ident_col = r["identifier"].ljust(ident_width)
|
||||
cat_col = ",".join(r["categories"]).ljust(cat_width)
|
||||
tag_col = ",".join(r["tags"]).ljust(tag_width)
|
||||
dir_col = r["dir"]
|
||||
status = r["status"]
|
||||
|
||||
status_padded = status.ljust(status_width)
|
||||
status_colored = _color_status(status_padded)
|
||||
|
||||
print(
|
||||
f"{ident_col} "
|
||||
f"{status_colored} "
|
||||
f"{cat_col} "
|
||||
f"{tag_col} "
|
||||
f"{dir_col}"
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Detailed section (alias value red, same status coloring)
|
||||
# ------------------------------------------------------------------
|
||||
if not show_description:
|
||||
return
|
||||
|
||||
print()
|
||||
print(f"{BOLD}Detailed repository information:{RESET}")
|
||||
print()
|
||||
|
||||
for r in rows:
|
||||
repo = r["repo"]
|
||||
identifier = r["identifier"]
|
||||
alias = str(repo.get("alias") or "")
|
||||
provider = str(repo.get("provider") or "")
|
||||
account = str(repo.get("account") or "")
|
||||
repository = str(repo.get("repository") or "")
|
||||
description = str(repo.get("description") or "")
|
||||
homepage = str(repo.get("homepage") or "")
|
||||
categories = r["categories"]
|
||||
tags = r["tags"]
|
||||
repo_dir = r["dir"]
|
||||
status = r["status"]
|
||||
|
||||
print(f"{BOLD}{identifier}{RESET}")
|
||||
|
||||
print(f" Provider: {provider}")
|
||||
print(f" Account: {account}")
|
||||
print(f" Repository: {repository}")
|
||||
|
||||
# Alias value highlighted in red
|
||||
if alias:
|
||||
print(f" Alias: {RED}{alias}{RESET}")
|
||||
|
||||
status_colored = _color_status(status)
|
||||
print(f" Status: {status_colored}")
|
||||
|
||||
if categories:
|
||||
print(f" Categories: {', '.join(categories)}")
|
||||
|
||||
if tags:
|
||||
print(f" Tags: {', '.join(tags)}")
|
||||
|
||||
print(f" Directory: {repo_dir}")
|
||||
|
||||
if homepage:
|
||||
print(f" Homepage: {homepage}")
|
||||
|
||||
if description:
|
||||
print(" Description:")
|
||||
for line in wrap(description, width=78):
|
||||
print(f" {line}")
|
||||
|
||||
print()
|
||||
77
src/pkgmgr/actions/repository/pull.py
Normal file
77
src/pkgmgr/actions/repository/pull.py
Normal file
@@ -0,0 +1,77 @@
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from pkgmgr.core.repository.identifier import get_repo_identifier
|
||||
from pkgmgr.core.repository.dir import get_repo_dir
|
||||
from pkgmgr.core.repository.verify import verify_repository
|
||||
|
||||
|
||||
def pull_with_verification(
|
||||
selected_repos,
|
||||
repositories_base_dir,
|
||||
all_repos,
|
||||
extra_args,
|
||||
no_verification,
|
||||
preview: bool,
|
||||
) -> None:
|
||||
"""
|
||||
Execute `git pull` for each repository with verification.
|
||||
|
||||
- Uses verify_repository() in "pull" mode.
|
||||
- If verification fails (and verification info is set) and
|
||||
--no-verification is not enabled, the user is prompted to confirm
|
||||
the pull.
|
||||
- In preview mode, no interactive prompts are performed and no
|
||||
Git commands are executed; only the would-be command is printed.
|
||||
"""
|
||||
for repo in selected_repos:
|
||||
repo_identifier = get_repo_identifier(repo, all_repos)
|
||||
repo_dir = get_repo_dir(repositories_base_dir, repo)
|
||||
|
||||
if not os.path.exists(repo_dir):
|
||||
print(f"Repository directory '{repo_dir}' not found for {repo_identifier}.")
|
||||
continue
|
||||
|
||||
verified_info = repo.get("verified")
|
||||
verified_ok, errors, commit_hash, signing_key = verify_repository(
|
||||
repo,
|
||||
repo_dir,
|
||||
mode="pull",
|
||||
no_verification=no_verification,
|
||||
)
|
||||
|
||||
# Only prompt the user if:
|
||||
# - we are NOT in preview mode
|
||||
# - verification is enabled
|
||||
# - the repo has verification info configured
|
||||
# - verification failed
|
||||
if (
|
||||
not preview
|
||||
and not no_verification
|
||||
and verified_info
|
||||
and not verified_ok
|
||||
):
|
||||
print(f"Warning: Verification failed for {repo_identifier}:")
|
||||
for err in errors:
|
||||
print(f" - {err}")
|
||||
choice = input("Proceed with 'git pull'? (y/N): ").strip().lower()
|
||||
if choice != "y":
|
||||
continue
|
||||
|
||||
# Build the git pull command (include extra args if present)
|
||||
args_part = " ".join(extra_args) if extra_args else ""
|
||||
full_cmd = f"git pull{(' ' + args_part) if args_part else ''}"
|
||||
|
||||
if preview:
|
||||
# Preview mode: only show the command, do not execute or prompt.
|
||||
print(f"[Preview] In '{repo_dir}': {full_cmd}")
|
||||
else:
|
||||
print(f"Running in '{repo_dir}': {full_cmd}")
|
||||
result = subprocess.run(full_cmd, cwd=repo_dir, shell=True)
|
||||
if result.returncode != 0:
|
||||
print(
|
||||
f"'git pull' for {repo_identifier} failed "
|
||||
f"with exit code {result.returncode}."
|
||||
)
|
||||
sys.exit(result.returncode)
|
||||
44
src/pkgmgr/actions/repository/status.py
Normal file
44
src/pkgmgr/actions/repository/status.py
Normal file
@@ -0,0 +1,44 @@
|
||||
import sys
|
||||
import shutil
|
||||
|
||||
from pkgmgr.actions.proxy import exec_proxy_command
|
||||
from pkgmgr.core.command.run import run_command
|
||||
from pkgmgr.core.repository.identifier import get_repo_identifier
|
||||
|
||||
|
||||
def status_repos(
|
||||
selected_repos,
|
||||
repositories_base_dir,
|
||||
all_repos,
|
||||
extra_args,
|
||||
list_only: bool = False,
|
||||
system_status: bool = False,
|
||||
preview: bool = False,
|
||||
):
|
||||
if system_status:
|
||||
print("System status:")
|
||||
|
||||
# Arch / AUR updates (if yay / aur_builder is configured)
|
||||
run_command("sudo -u aur_builder yay -Qu --noconfirm", preview=preview)
|
||||
|
||||
# Nix profile status (if Nix is available)
|
||||
if shutil.which("nix") is not None:
|
||||
print("\nNix profile status:")
|
||||
try:
|
||||
run_command("nix profile list", preview=preview)
|
||||
except SystemExit as e:
|
||||
print(f"[Warning] Failed to query Nix profiles: {e}")
|
||||
|
||||
if list_only:
|
||||
for repo in selected_repos:
|
||||
print(get_repo_identifier(repo, all_repos))
|
||||
else:
|
||||
exec_proxy_command(
|
||||
"git",
|
||||
selected_repos,
|
||||
repositories_base_dir,
|
||||
all_repos,
|
||||
"status",
|
||||
extra_args,
|
||||
preview,
|
||||
)
|
||||
68
src/pkgmgr/actions/repository/update.py
Normal file
68
src/pkgmgr/actions/repository/update.py
Normal file
@@ -0,0 +1,68 @@
|
||||
import sys
|
||||
import shutil
|
||||
|
||||
from pkgmgr.actions.repository.pull import pull_with_verification
|
||||
from pkgmgr.actions.install import install_repos
|
||||
|
||||
|
||||
def update_repos(
|
||||
selected_repos,
|
||||
repositories_base_dir,
|
||||
bin_dir,
|
||||
all_repos,
|
||||
no_verification,
|
||||
system_update,
|
||||
preview: bool,
|
||||
quiet: bool,
|
||||
update_dependencies: bool,
|
||||
clone_mode: str,
|
||||
):
|
||||
"""
|
||||
Update repositories by pulling latest changes and installing them.
|
||||
|
||||
Parameters:
|
||||
- selected_repos: List of selected repositories.
|
||||
- repositories_base_dir: Base directory for repositories.
|
||||
- bin_dir: Directory for symbolic links.
|
||||
- all_repos: All repository configurations.
|
||||
- no_verification: Whether to skip verification.
|
||||
- system_update: Whether to run system update.
|
||||
- preview: If True, only show commands without executing.
|
||||
- quiet: If True, suppress messages.
|
||||
- update_dependencies: Whether to update dependent repositories.
|
||||
- clone_mode: Method to clone repositories (ssh or https).
|
||||
"""
|
||||
pull_with_verification(
|
||||
selected_repos,
|
||||
repositories_base_dir,
|
||||
all_repos,
|
||||
[],
|
||||
no_verification,
|
||||
preview,
|
||||
)
|
||||
|
||||
install_repos(
|
||||
selected_repos,
|
||||
repositories_base_dir,
|
||||
bin_dir,
|
||||
all_repos,
|
||||
no_verification,
|
||||
preview,
|
||||
quiet,
|
||||
clone_mode,
|
||||
update_dependencies,
|
||||
)
|
||||
|
||||
if system_update:
|
||||
from pkgmgr.core.command.run import run_command
|
||||
|
||||
# Nix: upgrade all profile entries (if Nix is available)
|
||||
if shutil.which("nix") is not None:
|
||||
try:
|
||||
run_command("nix profile upgrade '.*'", preview=preview)
|
||||
except SystemExit as e:
|
||||
print(f"[Warning] 'nix profile upgrade' failed: {e}")
|
||||
|
||||
# Arch / AUR system update
|
||||
run_command("sudo -u aur_builder yay -Syu --noconfirm", preview=preview)
|
||||
run_command("sudo pacman -Syyu --noconfirm", preview=preview)
|
||||
109
src/pkgmgr/cli/__init__.py
Normal file
109
src/pkgmgr/cli/__init__.py
Normal file
@@ -0,0 +1,109 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
|
||||
from pkgmgr.core.config.load import load_config
|
||||
|
||||
from .context import CLIContext
|
||||
from .parser import create_parser
|
||||
from .dispatch import dispatch_command
|
||||
|
||||
__all__ = ["CLIContext", "create_parser", "dispatch_command", "main"]
|
||||
|
||||
|
||||
# User config lives in the home directory:
|
||||
# ~/.config/pkgmgr/config.yaml
|
||||
USER_CONFIG_PATH = os.path.expanduser("~/.config/pkgmgr/config.yaml")
|
||||
|
||||
DESCRIPTION_TEXT = """\
|
||||
\033[1;32mPackage Manager 🤖📦\033[0m
|
||||
\033[3mKevin's Package Manager is a multi-repository, multi-package, and multi-format
|
||||
development tool crafted by and designed for:\033[0m
|
||||
\033[1;34mKevin Veen-Birkenbach\033[0m
|
||||
\033[4mhttps://www.veen.world/\033[0m
|
||||
|
||||
\033[1mOverview:\033[0m
|
||||
A powerful toolchain that unifies and automates workflows across heterogeneous
|
||||
project ecosystems. pkgmgr is not only a package manager — it is a full
|
||||
developer-oriented orchestration tool.
|
||||
|
||||
It automatically detects, merges, and processes metadata from multiple
|
||||
dependency formats, including:
|
||||
• \033[1;33mPython:\033[0m pyproject.toml, requirements.txt
|
||||
• \033[1;33mNix:\033[0m flake.nix
|
||||
• \033[1;33mArch Linux:\033[0m PKGBUILD
|
||||
• \033[1;33mAnsible:\033[0m requirements.yml
|
||||
|
||||
This allows pkgmgr to perform installation, updates, verification, dependency
|
||||
resolution, and synchronization across complex multi-repo environments — with a
|
||||
single unified command-line interface.
|
||||
|
||||
\033[1mDeveloper Tools:\033[0m
|
||||
pkgmgr includes an integrated toolbox to enhance daily development workflows:
|
||||
|
||||
• \033[1;33mVS Code integration:\033[0m Auto-generate and open multi-repo workspaces
|
||||
• \033[1;33mTerminal integration:\033[0m Open repositories in new GNOME Terminal tabs
|
||||
• \033[1;33mExplorer integration:\033[0m Open repositories in your file manager
|
||||
• \033[1;33mRelease automation:\033[0m Version bumping, changelog updates, and tagging
|
||||
• \033[1;33mBatch operations:\033[0m Execute shell commands across multiple repositories
|
||||
• \033[1;33mGit/Docker/Make wrappers:\033[0m Unified command proxying for many tools
|
||||
|
||||
\033[1mCapabilities:\033[0m
|
||||
• Clone, pull, verify, update, and manage many repositories at once
|
||||
• Resolve dependencies across languages and ecosystems
|
||||
• Standardize install/update workflows
|
||||
• Create symbolic executable wrappers for any project
|
||||
• Merge configuration from default + user config layers
|
||||
|
||||
Use pkgmgr as both a robust package management framework and a versatile
|
||||
development orchestration tool.
|
||||
|
||||
For detailed help on each command, use:
|
||||
\033[1mpkgmgr <command> --help\033[0m
|
||||
"""
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""
|
||||
Entry point for the pkgmgr CLI.
|
||||
"""
|
||||
|
||||
config_merged = load_config(USER_CONFIG_PATH)
|
||||
|
||||
# Directories: be robust and provide sane defaults if missing
|
||||
directories = config_merged.get("directories") or {}
|
||||
repositories_dir = os.path.expanduser(
|
||||
directories.get("repositories", "~/Repositories")
|
||||
)
|
||||
binaries_dir = os.path.expanduser(
|
||||
directories.get("binaries", "~/.local/bin")
|
||||
)
|
||||
|
||||
# Ensure the merged config actually contains the resolved directories
|
||||
config_merged.setdefault("directories", {})
|
||||
config_merged["directories"]["repositories"] = repositories_dir
|
||||
config_merged["directories"]["binaries"] = binaries_dir
|
||||
|
||||
all_repositories = config_merged.get("repositories", [])
|
||||
|
||||
ctx = CLIContext(
|
||||
config_merged=config_merged,
|
||||
repositories_base_dir=repositories_dir,
|
||||
all_repositories=all_repositories,
|
||||
binaries_dir=binaries_dir,
|
||||
user_config_path=USER_CONFIG_PATH,
|
||||
)
|
||||
|
||||
parser = create_parser(DESCRIPTION_TEXT)
|
||||
args = parser.parse_args()
|
||||
|
||||
if not getattr(args, "command", None):
|
||||
parser.print_help()
|
||||
return
|
||||
|
||||
dispatch_command(args, ctx)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
19
src/pkgmgr/cli/commands/__init__.py
Normal file
19
src/pkgmgr/cli/commands/__init__.py
Normal file
@@ -0,0 +1,19 @@
|
||||
from .repos import handle_repos_command
|
||||
from .config import handle_config
|
||||
from .tools import handle_tools_command
|
||||
from .release import handle_release
|
||||
from .version import handle_version
|
||||
from .make import handle_make
|
||||
from .changelog import handle_changelog
|
||||
from .branch import handle_branch
|
||||
|
||||
__all__ = [
|
||||
"handle_repos_command",
|
||||
"handle_config",
|
||||
"handle_tools_command",
|
||||
"handle_release",
|
||||
"handle_version",
|
||||
"handle_make",
|
||||
"handle_changelog",
|
||||
"handle_branch",
|
||||
]
|
||||
34
src/pkgmgr/cli/commands/branch.py
Normal file
34
src/pkgmgr/cli/commands/branch.py
Normal file
@@ -0,0 +1,34 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
|
||||
from pkgmgr.cli.context import CLIContext
|
||||
from pkgmgr.actions.branch import open_branch, close_branch
|
||||
|
||||
|
||||
def handle_branch(args, ctx: CLIContext) -> None:
|
||||
"""
|
||||
Handle `pkgmgr branch` subcommands.
|
||||
|
||||
Currently supported:
|
||||
- pkgmgr branch open [<name>] [--base <branch>]
|
||||
- pkgmgr branch close [<name>] [--base <branch>]
|
||||
"""
|
||||
if args.subcommand == "open":
|
||||
open_branch(
|
||||
name=getattr(args, "name", None),
|
||||
base_branch=getattr(args, "base", "main"),
|
||||
cwd=".",
|
||||
)
|
||||
return
|
||||
|
||||
if args.subcommand == "close":
|
||||
close_branch(
|
||||
name=getattr(args, "name", None),
|
||||
base_branch=getattr(args, "base", "main"),
|
||||
cwd=".",
|
||||
)
|
||||
return
|
||||
|
||||
print(f"Unknown branch subcommand: {args.subcommand}")
|
||||
sys.exit(2)
|
||||
168
src/pkgmgr/cli/commands/changelog.py
Normal file
168
src/pkgmgr/cli/commands/changelog.py
Normal file
@@ -0,0 +1,168 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sys
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
|
||||
from pkgmgr.cli.context import CLIContext
|
||||
from pkgmgr.core.repository.dir import get_repo_dir
|
||||
from pkgmgr.core.repository.identifier import get_repo_identifier
|
||||
from pkgmgr.core.git import get_tags
|
||||
from pkgmgr.core.version.semver import SemVer, extract_semver_from_tags
|
||||
from pkgmgr.actions.changelog import generate_changelog
|
||||
|
||||
|
||||
Repository = Dict[str, Any]
|
||||
|
||||
|
||||
def _find_previous_and_current_tag(
|
||||
tags: List[str],
|
||||
target_tag: Optional[str] = None,
|
||||
) -> Tuple[Optional[str], Optional[str]]:
|
||||
"""
|
||||
Given a list of tags and an optional target tag, determine
|
||||
(previous_tag, current_tag) on the SemVer axis.
|
||||
|
||||
If target_tag is None:
|
||||
- If there are at least two SemVer tags, return (prev, latest).
|
||||
- If there is only one SemVer tag, return (None, latest).
|
||||
- If there are no SemVer tags, return (None, None).
|
||||
|
||||
If target_tag is given:
|
||||
- If target_tag is not a SemVer tag or is unknown, return (None, None).
|
||||
- Otherwise, return (previous_semver_tag, target_tag).
|
||||
If there is no previous SemVer tag, previous_semver_tag is None.
|
||||
"""
|
||||
semver_pairs = extract_semver_from_tags(tags)
|
||||
if not semver_pairs:
|
||||
return None, None
|
||||
|
||||
# Sort ascending by SemVer
|
||||
semver_pairs.sort(key=lambda item: item[1])
|
||||
|
||||
tag_to_index = {tag: idx for idx, (tag, _ver) in enumerate(semver_pairs)}
|
||||
|
||||
if target_tag is None:
|
||||
if len(semver_pairs) == 1:
|
||||
return None, semver_pairs[0][0]
|
||||
prev_tag = semver_pairs[-2][0]
|
||||
latest_tag = semver_pairs[-1][0]
|
||||
return prev_tag, latest_tag
|
||||
|
||||
# target_tag is specified
|
||||
if target_tag not in tag_to_index:
|
||||
return None, None
|
||||
|
||||
idx = tag_to_index[target_tag]
|
||||
current_tag = semver_pairs[idx][0]
|
||||
if idx == 0:
|
||||
return None, current_tag
|
||||
|
||||
previous_tag = semver_pairs[idx - 1][0]
|
||||
return previous_tag, current_tag
|
||||
|
||||
|
||||
def handle_changelog(
|
||||
args,
|
||||
ctx: CLIContext,
|
||||
selected: List[Repository],
|
||||
) -> None:
|
||||
"""
|
||||
Handle the 'changelog' command.
|
||||
|
||||
Behaviour:
|
||||
- Without range: show changelog between the last two SemVer tags,
|
||||
or from the single SemVer tag to HEAD, or from the beginning if
|
||||
no tags exist.
|
||||
- With RANGE of the form 'A..B': show changelog between A and B.
|
||||
- With RANGE of the form 'vX.Y.Z': show changelog between the
|
||||
previous SemVer tag and vX.Y.Z (or from start if there is none).
|
||||
"""
|
||||
|
||||
if not selected:
|
||||
print("No repositories selected for changelog.")
|
||||
sys.exit(1)
|
||||
|
||||
range_arg: str = getattr(args, "range", "") or ""
|
||||
|
||||
print("pkgmgr changelog")
|
||||
print("=================")
|
||||
|
||||
for repo in selected:
|
||||
# Resolve repository directory
|
||||
repo_dir = repo.get("directory")
|
||||
if not repo_dir:
|
||||
try:
|
||||
repo_dir = get_repo_dir(ctx.repositories_base_dir, repo)
|
||||
except Exception:
|
||||
repo_dir = None
|
||||
|
||||
identifier = get_repo_identifier(repo, ctx.all_repositories)
|
||||
|
||||
if not repo_dir or not os.path.isdir(repo_dir):
|
||||
print(f"\nRepository: {identifier}")
|
||||
print("----------------------------------------")
|
||||
print(
|
||||
"[INFO] Skipped: repository directory does not exist "
|
||||
"locally, changelog generation is not possible."
|
||||
)
|
||||
continue
|
||||
|
||||
print(f"\nRepository: {identifier}")
|
||||
print(f"Path: {repo_dir}")
|
||||
print("----------------------------------------")
|
||||
|
||||
try:
|
||||
tags = get_tags(cwd=repo_dir)
|
||||
except Exception as exc:
|
||||
print(f"[ERROR] Could not read git tags: {exc}")
|
||||
tags = []
|
||||
|
||||
from_ref: Optional[str] = None
|
||||
to_ref: Optional[str] = None
|
||||
|
||||
if range_arg:
|
||||
# Explicit range provided
|
||||
if ".." in range_arg:
|
||||
# Format: A..B
|
||||
parts = range_arg.split("..", 1)
|
||||
from_ref = parts[0] or None
|
||||
to_ref = parts[1] or None
|
||||
else:
|
||||
# Single tag, compute previous + current
|
||||
prev_tag, cur_tag = _find_previous_and_current_tag(
|
||||
tags,
|
||||
target_tag=range_arg,
|
||||
)
|
||||
if cur_tag is None:
|
||||
print(
|
||||
f"[WARN] Tag {range_arg!r} not found or not a SemVer tag."
|
||||
)
|
||||
print("[INFO] Falling back to full history.")
|
||||
from_ref = None
|
||||
to_ref = None
|
||||
else:
|
||||
from_ref = prev_tag
|
||||
to_ref = cur_tag
|
||||
else:
|
||||
# No explicit range: last two SemVer tags (or fallback)
|
||||
prev_tag, cur_tag = _find_previous_and_current_tag(tags)
|
||||
from_ref = prev_tag
|
||||
to_ref = cur_tag # may be None if no tags
|
||||
|
||||
changelog_text = generate_changelog(
|
||||
cwd=repo_dir,
|
||||
from_ref=from_ref,
|
||||
to_ref=to_ref,
|
||||
include_merges=False,
|
||||
)
|
||||
|
||||
if from_ref or to_ref:
|
||||
ref_desc = f"{from_ref or '<root>'}..{to_ref or 'HEAD'}"
|
||||
else:
|
||||
ref_desc = "<full history>"
|
||||
|
||||
print(f"Range: {ref_desc}")
|
||||
print()
|
||||
print(changelog_text)
|
||||
print()
|
||||
240
src/pkgmgr/cli/commands/config.py
Normal file
240
src/pkgmgr/cli/commands/config.py
Normal file
@@ -0,0 +1,240 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sys
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict
|
||||
|
||||
import yaml
|
||||
|
||||
from pkgmgr.cli.context import CLIContext
|
||||
from pkgmgr.actions.config.init import config_init
|
||||
from pkgmgr.actions.config.add import interactive_add
|
||||
from pkgmgr.core.repository.resolve import resolve_repos
|
||||
from pkgmgr.core.config.save import save_user_config
|
||||
from pkgmgr.actions.config.show import show_config
|
||||
from pkgmgr.core.command.run import run_command
|
||||
|
||||
|
||||
def _load_user_config(user_config_path: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Load the user config from ~/.config/pkgmgr/config.yaml
|
||||
(or whatever ctx.user_config_path is), creating the directory if needed.
|
||||
"""
|
||||
user_config_path_expanded = os.path.expanduser(user_config_path)
|
||||
cfg_dir = os.path.dirname(user_config_path_expanded)
|
||||
if cfg_dir and not os.path.isdir(cfg_dir):
|
||||
os.makedirs(cfg_dir, exist_ok=True)
|
||||
|
||||
if os.path.exists(user_config_path_expanded):
|
||||
with open(user_config_path_expanded, "r", encoding="utf-8") as f:
|
||||
return yaml.safe_load(f) or {"repositories": []}
|
||||
return {"repositories": []}
|
||||
|
||||
|
||||
def _find_defaults_source_dir() -> str | None:
|
||||
"""
|
||||
Find the directory inside the installed pkgmgr package OR the
|
||||
project root that contains default config files.
|
||||
|
||||
Preferred locations (in dieser Reihenfolge):
|
||||
- <pkg_root>/config_defaults
|
||||
- <pkg_root>/config
|
||||
- <project_root>/config_defaults
|
||||
- <project_root>/config
|
||||
"""
|
||||
import pkgmgr # local import to avoid circular deps
|
||||
|
||||
pkg_root = Path(pkgmgr.__file__).resolve().parent
|
||||
project_root = pkg_root.parent
|
||||
|
||||
candidates = [
|
||||
pkg_root / "config_defaults",
|
||||
pkg_root / "config",
|
||||
project_root / "config_defaults",
|
||||
project_root / "config",
|
||||
]
|
||||
for cand in candidates:
|
||||
if cand.is_dir():
|
||||
return str(cand)
|
||||
return None
|
||||
|
||||
|
||||
def _update_default_configs(user_config_path: str) -> None:
|
||||
"""
|
||||
Copy all default *.yml/*.yaml files from the installed pkgmgr package
|
||||
into ~/.config/pkgmgr/, overwriting existing ones – except the user
|
||||
config file itself (config.yaml), which is never touched.
|
||||
"""
|
||||
source_dir = _find_defaults_source_dir()
|
||||
if not source_dir:
|
||||
print(
|
||||
"[WARN] No config_defaults or config directory found in "
|
||||
"pkgmgr installation. Nothing to update."
|
||||
)
|
||||
return
|
||||
|
||||
dest_dir = os.path.dirname(os.path.expanduser(user_config_path))
|
||||
if not dest_dir:
|
||||
dest_dir = os.path.expanduser("~/.config/pkgmgr")
|
||||
os.makedirs(dest_dir, exist_ok=True)
|
||||
|
||||
for name in os.listdir(source_dir):
|
||||
lower = name.lower()
|
||||
if not (lower.endswith(".yml") or lower.endswith(".yaml")):
|
||||
continue
|
||||
if name == "config.yaml":
|
||||
# Never overwrite the user config template / live config
|
||||
continue
|
||||
|
||||
src = os.path.join(source_dir, name)
|
||||
dst = os.path.join(dest_dir, name)
|
||||
|
||||
shutil.copy2(src, dst)
|
||||
print(f"[INFO] Updated default config file: {dst}")
|
||||
|
||||
|
||||
def handle_config(args, ctx: CLIContext) -> None:
|
||||
"""
|
||||
Handle 'pkgmgr config' subcommands.
|
||||
"""
|
||||
|
||||
user_config_path = ctx.user_config_path
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# config show
|
||||
# ------------------------------------------------------------
|
||||
if args.subcommand == "show":
|
||||
if args.all or (not args.identifiers):
|
||||
# Full merged config view
|
||||
show_config([], user_config_path, full_config=True)
|
||||
else:
|
||||
# Show only matching entries from user config
|
||||
user_config = _load_user_config(user_config_path)
|
||||
selected = resolve_repos(
|
||||
args.identifiers,
|
||||
user_config.get("repositories", []),
|
||||
)
|
||||
if selected:
|
||||
show_config(
|
||||
selected,
|
||||
user_config_path,
|
||||
full_config=False,
|
||||
)
|
||||
return
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# config add
|
||||
# ------------------------------------------------------------
|
||||
if args.subcommand == "add":
|
||||
interactive_add(ctx.config_merged, user_config_path)
|
||||
return
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# config edit
|
||||
# ------------------------------------------------------------
|
||||
if args.subcommand == "edit":
|
||||
run_command(f"nano {user_config_path}")
|
||||
return
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# config init
|
||||
# ------------------------------------------------------------
|
||||
if args.subcommand == "init":
|
||||
user_config = _load_user_config(user_config_path)
|
||||
config_init(
|
||||
user_config,
|
||||
ctx.config_merged,
|
||||
ctx.binaries_dir,
|
||||
user_config_path,
|
||||
)
|
||||
return
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# config delete
|
||||
# ------------------------------------------------------------
|
||||
if args.subcommand == "delete":
|
||||
user_config = _load_user_config(user_config_path)
|
||||
|
||||
if args.all or not args.identifiers:
|
||||
print(
|
||||
"[ERROR] 'config delete' requires explicit identifiers. "
|
||||
"Use 'config show' to inspect entries."
|
||||
)
|
||||
return
|
||||
|
||||
to_delete = resolve_repos(
|
||||
args.identifiers,
|
||||
user_config.get("repositories", []),
|
||||
)
|
||||
new_repos = [
|
||||
entry
|
||||
for entry in user_config.get("repositories", [])
|
||||
if entry not in to_delete
|
||||
]
|
||||
user_config["repositories"] = new_repos
|
||||
save_user_config(user_config, user_config_path)
|
||||
print(f"Deleted {len(to_delete)} entries from user config.")
|
||||
return
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# config ignore
|
||||
# ------------------------------------------------------------
|
||||
if args.subcommand == "ignore":
|
||||
user_config = _load_user_config(user_config_path)
|
||||
|
||||
if args.all or not args.identifiers:
|
||||
print(
|
||||
"[ERROR] 'config ignore' requires explicit identifiers. "
|
||||
"Use 'config show' to inspect entries."
|
||||
)
|
||||
return
|
||||
|
||||
to_modify = resolve_repos(
|
||||
args.identifiers,
|
||||
user_config.get("repositories", []),
|
||||
)
|
||||
|
||||
for entry in user_config["repositories"]:
|
||||
key = (
|
||||
entry.get("provider"),
|
||||
entry.get("account"),
|
||||
entry.get("repository"),
|
||||
)
|
||||
for mod in to_modify:
|
||||
mod_key = (
|
||||
mod.get("provider"),
|
||||
mod.get("account"),
|
||||
mod.get("repository"),
|
||||
)
|
||||
if key == mod_key:
|
||||
entry["ignore"] = args.set == "true"
|
||||
print(
|
||||
f"Set ignore for {key} to {entry['ignore']}"
|
||||
)
|
||||
|
||||
save_user_config(user_config, user_config_path)
|
||||
return
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# config update
|
||||
# ------------------------------------------------------------
|
||||
if args.subcommand == "update":
|
||||
"""
|
||||
Copy default YAML configs from the installed package into the
|
||||
user's ~/.config/pkgmgr directory.
|
||||
|
||||
This will overwrite files with the same name (except config.yaml).
|
||||
"""
|
||||
_update_default_configs(user_config_path)
|
||||
return
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# Unknown subcommand
|
||||
# ------------------------------------------------------------
|
||||
print(f"Unknown config subcommand: {args.subcommand}")
|
||||
sys.exit(2)
|
||||
33
src/pkgmgr/cli/commands/make.py
Normal file
33
src/pkgmgr/cli/commands/make.py
Normal file
@@ -0,0 +1,33 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from pkgmgr.cli.context import CLIContext
|
||||
from pkgmgr.actions.proxy import exec_proxy_command
|
||||
|
||||
|
||||
Repository = Dict[str, Any]
|
||||
|
||||
|
||||
def handle_make(
|
||||
args,
|
||||
ctx: CLIContext,
|
||||
selected: List[Repository],
|
||||
) -> None:
|
||||
"""
|
||||
Handle the 'make' command by delegating to exec_proxy_command.
|
||||
|
||||
This mirrors the old behaviour where `make` was treated as a
|
||||
special proxy command.
|
||||
"""
|
||||
exec_proxy_command(
|
||||
"make",
|
||||
selected,
|
||||
ctx.repositories_base_dir,
|
||||
ctx.all_repositories,
|
||||
args.subcommand,
|
||||
getattr(args, "extra_args", []),
|
||||
getattr(args, "preview", False),
|
||||
)
|
||||
sys.exit(0)
|
||||
98
src/pkgmgr/cli/commands/release.py
Normal file
98
src/pkgmgr/cli/commands/release.py
Normal file
@@ -0,0 +1,98 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Release command wiring for the pkgmgr CLI.
|
||||
|
||||
This module implements the `pkgmgr release` subcommand on top of the
|
||||
generic selection logic from cli.dispatch. It does not define its
|
||||
own subparser; the CLI surface is configured in cli.parser.
|
||||
|
||||
Responsibilities:
|
||||
- Take the parsed argparse.Namespace for the `release` command.
|
||||
- Use the list of selected repositories provided by dispatch_command().
|
||||
- Optionally list affected repositories when --list is set.
|
||||
- For each selected repository, run pkgmgr.actions.release.release(...) in
|
||||
the context of that repository directory.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from pkgmgr.cli.context import CLIContext
|
||||
from pkgmgr.core.repository.dir import get_repo_dir
|
||||
from pkgmgr.core.repository.identifier import get_repo_identifier
|
||||
from pkgmgr.actions.release import release as run_release
|
||||
|
||||
|
||||
Repository = Dict[str, Any]
|
||||
|
||||
|
||||
def handle_release(
|
||||
args,
|
||||
ctx: CLIContext,
|
||||
selected: List[Repository],
|
||||
) -> None:
|
||||
"""
|
||||
Handle the `pkgmgr release` subcommand.
|
||||
|
||||
Flow:
|
||||
1) Use the `selected` repositories as computed by dispatch_command().
|
||||
2) If --list is given, print the identifiers of the selected repos
|
||||
and return without running any release.
|
||||
3) For each selected repository:
|
||||
- Resolve its identifier and local directory.
|
||||
- Change into that directory.
|
||||
- Call pkgmgr.actions.release.release(...) with the parsed options.
|
||||
"""
|
||||
if not selected:
|
||||
print("[pkgmgr] No repositories selected for release.")
|
||||
return
|
||||
|
||||
# List-only mode: show which repositories would be affected.
|
||||
if getattr(args, "list", False):
|
||||
print("[pkgmgr] Repositories that would be affected by this release:")
|
||||
for repo in selected:
|
||||
identifier = get_repo_identifier(repo, ctx.all_repositories)
|
||||
print(f" - {identifier}")
|
||||
return
|
||||
|
||||
for repo in selected:
|
||||
identifier = get_repo_identifier(repo, ctx.all_repositories)
|
||||
|
||||
repo_dir = repo.get("directory")
|
||||
if not repo_dir:
|
||||
try:
|
||||
repo_dir = get_repo_dir(ctx.repositories_base_dir, repo)
|
||||
except Exception:
|
||||
repo_dir = None
|
||||
|
||||
if not repo_dir or not os.path.isdir(repo_dir):
|
||||
print(
|
||||
f"[WARN] Skipping repository {identifier}: "
|
||||
"local directory does not exist."
|
||||
)
|
||||
continue
|
||||
|
||||
print(
|
||||
f"[pkgmgr] Running release for repository {identifier} "
|
||||
f"in '{repo_dir}'..."
|
||||
)
|
||||
|
||||
# Change to repo directory and invoke the helper.
|
||||
cwd_before = os.getcwd()
|
||||
try:
|
||||
os.chdir(repo_dir)
|
||||
run_release(
|
||||
pyproject_path="pyproject.toml",
|
||||
changelog_path="CHANGELOG.md",
|
||||
release_type=args.release_type,
|
||||
message=args.message or None,
|
||||
preview=getattr(args, "preview", False),
|
||||
force=getattr(args, "force", False),
|
||||
close=getattr(args, "close", False),
|
||||
)
|
||||
finally:
|
||||
os.chdir(cwd_before)
|
||||
214
src/pkgmgr/cli/commands/repos.py
Normal file
214
src/pkgmgr/cli/commands/repos.py
Normal file
@@ -0,0 +1,214 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from pkgmgr.cli.context import CLIContext
|
||||
from pkgmgr.actions.install import install_repos
|
||||
from pkgmgr.actions.repository.deinstall import deinstall_repos
|
||||
from pkgmgr.actions.repository.delete import delete_repos
|
||||
from pkgmgr.actions.repository.update import update_repos
|
||||
from pkgmgr.actions.repository.status import status_repos
|
||||
from pkgmgr.actions.repository.list import list_repositories
|
||||
from pkgmgr.core.command.run import run_command
|
||||
from pkgmgr.actions.repository.create import create_repo
|
||||
from pkgmgr.core.repository.selected import get_selected_repos
|
||||
from pkgmgr.core.repository.dir import get_repo_dir
|
||||
|
||||
Repository = Dict[str, Any]
|
||||
|
||||
|
||||
def _resolve_repository_directory(repository: Repository, ctx: CLIContext) -> str:
|
||||
"""
|
||||
Resolve the local filesystem directory for a repository.
|
||||
|
||||
Priority:
|
||||
1. Use repository["directory"] if present.
|
||||
2. Fallback to get_repo_dir(...) using the repositories base directory
|
||||
from the CLI context.
|
||||
"""
|
||||
repo_dir = repository.get("directory")
|
||||
if repo_dir:
|
||||
return repo_dir
|
||||
|
||||
base_dir = (
|
||||
getattr(ctx, "repositories_base_dir", None)
|
||||
or getattr(ctx, "repositories_dir", None)
|
||||
)
|
||||
if not base_dir:
|
||||
raise RuntimeError(
|
||||
"Cannot resolve repositories base directory from context; "
|
||||
"expected ctx.repositories_base_dir or ctx.repositories_dir."
|
||||
)
|
||||
return get_repo_dir(base_dir, repository)
|
||||
|
||||
|
||||
def handle_repos_command(
|
||||
args,
|
||||
ctx: CLIContext,
|
||||
selected: List[Repository],
|
||||
) -> None:
|
||||
"""
|
||||
Handle core repository commands (install/update/deinstall/delete/.../list).
|
||||
"""
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# install
|
||||
# ------------------------------------------------------------
|
||||
if args.command == "install":
|
||||
install_repos(
|
||||
selected,
|
||||
ctx.repositories_base_dir,
|
||||
ctx.binaries_dir,
|
||||
ctx.all_repositories,
|
||||
args.no_verification,
|
||||
args.preview,
|
||||
args.quiet,
|
||||
args.clone_mode,
|
||||
args.dependencies,
|
||||
)
|
||||
return
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# update
|
||||
# ------------------------------------------------------------
|
||||
if args.command == "update":
|
||||
update_repos(
|
||||
selected,
|
||||
ctx.repositories_base_dir,
|
||||
ctx.binaries_dir,
|
||||
ctx.all_repositories,
|
||||
args.no_verification,
|
||||
args.system,
|
||||
args.preview,
|
||||
args.quiet,
|
||||
args.dependencies,
|
||||
args.clone_mode,
|
||||
)
|
||||
return
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# deinstall
|
||||
# ------------------------------------------------------------
|
||||
if args.command == "deinstall":
|
||||
deinstall_repos(
|
||||
selected,
|
||||
ctx.repositories_base_dir,
|
||||
ctx.binaries_dir,
|
||||
ctx.all_repositories,
|
||||
preview=args.preview,
|
||||
)
|
||||
return
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# delete
|
||||
# ------------------------------------------------------------
|
||||
if args.command == "delete":
|
||||
delete_repos(
|
||||
selected,
|
||||
ctx.repositories_base_dir,
|
||||
ctx.all_repositories,
|
||||
preview=args.preview,
|
||||
)
|
||||
return
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# status
|
||||
# ------------------------------------------------------------
|
||||
if args.command == "status":
|
||||
status_repos(
|
||||
selected,
|
||||
ctx.repositories_base_dir,
|
||||
ctx.all_repositories,
|
||||
args.extra_args,
|
||||
list_only=args.list,
|
||||
system_status=args.system,
|
||||
preview=args.preview,
|
||||
)
|
||||
return
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# path
|
||||
# ------------------------------------------------------------
|
||||
if args.command == "path":
|
||||
if not selected:
|
||||
print("[pkgmgr] No repositories selected for path.")
|
||||
return
|
||||
|
||||
for repository in selected:
|
||||
try:
|
||||
repo_dir = _resolve_repository_directory(repository, ctx)
|
||||
except Exception as exc:
|
||||
ident = (
|
||||
f"{repository.get('provider', '?')}/"
|
||||
f"{repository.get('account', '?')}/"
|
||||
f"{repository.get('repository', '?')}"
|
||||
)
|
||||
print(
|
||||
f"[WARN] Could not resolve directory for {ident}: {exc}"
|
||||
)
|
||||
continue
|
||||
|
||||
print(repo_dir)
|
||||
return
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# shell
|
||||
# ------------------------------------------------------------
|
||||
if args.command == "shell":
|
||||
if not args.shell_command:
|
||||
print("[ERROR] 'shell' requires a command via -c/--command.")
|
||||
sys.exit(2)
|
||||
|
||||
command_to_run = " ".join(args.shell_command)
|
||||
for repository in selected:
|
||||
repo_dir = _resolve_repository_directory(repository, ctx)
|
||||
print(f"Executing in '{repo_dir}': {command_to_run}")
|
||||
run_command(
|
||||
command_to_run,
|
||||
cwd=repo_dir,
|
||||
preview=args.preview,
|
||||
)
|
||||
return
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# create
|
||||
# ------------------------------------------------------------
|
||||
if args.command == "create":
|
||||
if not args.identifiers:
|
||||
print(
|
||||
"[ERROR] 'create' requires at least one identifier "
|
||||
"in the format provider/account/repository."
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
for identifier in args.identifiers:
|
||||
create_repo(
|
||||
identifier,
|
||||
ctx.config_merged,
|
||||
ctx.user_config_path,
|
||||
ctx.binaries_dir,
|
||||
remote=args.remote,
|
||||
preview=args.preview,
|
||||
)
|
||||
return
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# list
|
||||
# ------------------------------------------------------------
|
||||
if args.command == "list":
|
||||
list_repositories(
|
||||
selected,
|
||||
ctx.repositories_base_dir,
|
||||
ctx.binaries_dir,
|
||||
status_filter=getattr(args, "status", "") or "",
|
||||
extra_tags=getattr(args, "tag", []) or [],
|
||||
show_description=getattr(args, "description", False),
|
||||
)
|
||||
return
|
||||
|
||||
print(f"[ERROR] Unknown repos command: {args.command}")
|
||||
sys.exit(2)
|
||||
115
src/pkgmgr/cli/commands/tools.py
Normal file
115
src/pkgmgr/cli/commands/tools.py
Normal file
@@ -0,0 +1,115 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from pkgmgr .cli .context import CLIContext
|
||||
from pkgmgr .core .command .run import run_command
|
||||
from pkgmgr .core .repository .identifier import get_repo_identifier
|
||||
from pkgmgr .core .repository .dir import get_repo_dir
|
||||
|
||||
|
||||
Repository = Dict[str, Any]
|
||||
|
||||
|
||||
def _resolve_repository_path(repository: Repository, ctx: CLIContext) -> str:
|
||||
"""
|
||||
Resolve the filesystem path for a repository.
|
||||
|
||||
Priority:
|
||||
1. Use explicit keys if present (directory / path / workspace / workspace_dir).
|
||||
2. Fallback to get_repo_dir(...) using the repositories base directory
|
||||
from the CLI context.
|
||||
"""
|
||||
|
||||
# 1) Explicit path-like keys on the repository object
|
||||
for key in ("directory", "path", "workspace", "workspace_dir"):
|
||||
value = repository.get(key)
|
||||
if value:
|
||||
return value
|
||||
|
||||
# 2) Fallback: compute from base dir + repository metadata
|
||||
base_dir = (
|
||||
getattr(ctx, "repositories_base_dir", None)
|
||||
or getattr(ctx, "repositories_dir", None)
|
||||
)
|
||||
if not base_dir:
|
||||
raise RuntimeError(
|
||||
"Cannot resolve repositories base directory from context; "
|
||||
"expected ctx.repositories_base_dir or ctx.repositories_dir."
|
||||
)
|
||||
|
||||
return get_repo_dir(base_dir, repository)
|
||||
|
||||
|
||||
def handle_tools_command(
|
||||
args,
|
||||
ctx: CLIContext,
|
||||
selected: List[Repository],
|
||||
) -> None:
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# nautilus "explore" command
|
||||
# ------------------------------------------------------------------
|
||||
if args.command == "explore":
|
||||
for repository in selected:
|
||||
repo_path = _resolve_repository_path(repository, ctx)
|
||||
run_command(
|
||||
f'nautilus "{repo_path}" & disown'
|
||||
)
|
||||
return
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# GNOME terminal command
|
||||
# ------------------------------------------------------------------
|
||||
if args.command == "terminal":
|
||||
for repository in selected:
|
||||
repo_path = _resolve_repository_path(repository, ctx)
|
||||
run_command(
|
||||
f'gnome-terminal --tab --working-directory="{repo_path}"'
|
||||
)
|
||||
return
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# VS Code workspace command
|
||||
# ------------------------------------------------------------------
|
||||
if args.command == "code":
|
||||
if not selected:
|
||||
print("No repositories selected.")
|
||||
return
|
||||
|
||||
identifiers = [
|
||||
get_repo_identifier(repo, ctx.all_repositories)
|
||||
for repo in selected
|
||||
]
|
||||
sorted_identifiers = sorted(identifiers)
|
||||
workspace_name = "_".join(sorted_identifiers) + ".code-workspace"
|
||||
|
||||
directories_cfg = ctx.config_merged.get("directories") or {}
|
||||
workspaces_dir = os.path.expanduser(
|
||||
directories_cfg.get("workspaces", "~/Workspaces")
|
||||
)
|
||||
os.makedirs(workspaces_dir, exist_ok=True)
|
||||
workspace_file = os.path.join(workspaces_dir, workspace_name)
|
||||
|
||||
folders = [
|
||||
{"path": _resolve_repository_path(repository, ctx)}
|
||||
for repository in selected
|
||||
]
|
||||
|
||||
workspace_data = {
|
||||
"folders": folders,
|
||||
"settings": {},
|
||||
}
|
||||
|
||||
if not os.path.exists(workspace_file):
|
||||
with open(workspace_file, "w", encoding="utf-8") as f:
|
||||
json.dump(workspace_data, f, indent=4)
|
||||
print(f"Created workspace file: {workspace_file}")
|
||||
else:
|
||||
print(f"Using existing workspace file: {workspace_file}")
|
||||
|
||||
run_command(f'code "{workspace_file}"')
|
||||
return
|
||||
118
src/pkgmgr/cli/commands/version.py
Normal file
118
src/pkgmgr/cli/commands/version.py
Normal file
@@ -0,0 +1,118 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sys
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
|
||||
from pkgmgr.cli.context import CLIContext
|
||||
from pkgmgr.core.repository.dir import get_repo_dir
|
||||
from pkgmgr.core.repository.identifier import get_repo_identifier
|
||||
from pkgmgr.core.git import get_tags
|
||||
from pkgmgr.core.version.semver import SemVer, find_latest_version
|
||||
from pkgmgr.core.version.source import (
|
||||
read_pyproject_version,
|
||||
read_flake_version,
|
||||
read_pkgbuild_version,
|
||||
read_debian_changelog_version,
|
||||
read_spec_version,
|
||||
read_ansible_galaxy_version,
|
||||
)
|
||||
|
||||
|
||||
Repository = Dict[str, Any]
|
||||
|
||||
|
||||
def handle_version(
|
||||
args,
|
||||
ctx: CLIContext,
|
||||
selected: List[Repository],
|
||||
) -> None:
|
||||
"""
|
||||
Handle the 'version' command.
|
||||
|
||||
Shows version information from various sources (git tags, pyproject,
|
||||
flake.nix, PKGBUILD, debian, spec, Ansible Galaxy).
|
||||
"""
|
||||
|
||||
repo_list = selected
|
||||
if not repo_list:
|
||||
print("No repositories selected for version.")
|
||||
sys.exit(1)
|
||||
|
||||
print("pkgmgr version info")
|
||||
print("====================")
|
||||
|
||||
for repo in repo_list:
|
||||
# Resolve repository directory
|
||||
repo_dir = repo.get("directory")
|
||||
if not repo_dir:
|
||||
try:
|
||||
repo_dir = get_repo_dir(ctx.repositories_base_dir, repo)
|
||||
except Exception:
|
||||
repo_dir = None
|
||||
|
||||
# If no local clone exists, skip gracefully with info message
|
||||
if not repo_dir or not os.path.isdir(repo_dir):
|
||||
identifier = get_repo_identifier(repo, ctx.all_repositories)
|
||||
print(f"\nRepository: {identifier}")
|
||||
print("----------------------------------------")
|
||||
print(
|
||||
"[INFO] Skipped: repository directory does not exist "
|
||||
"locally, version detection is not possible."
|
||||
)
|
||||
continue
|
||||
|
||||
print(f"\nRepository: {repo_dir}")
|
||||
print("----------------------------------------")
|
||||
|
||||
# 1) Git tags (SemVer)
|
||||
try:
|
||||
tags = get_tags(cwd=repo_dir)
|
||||
except Exception as exc:
|
||||
print(f"[ERROR] Could not read git tags: {exc}")
|
||||
tags = []
|
||||
|
||||
latest_tag_info: Optional[Tuple[str, SemVer]]
|
||||
latest_tag_info = find_latest_version(tags) if tags else None
|
||||
|
||||
if latest_tag_info is None:
|
||||
latest_tag_str = None
|
||||
latest_ver = None
|
||||
else:
|
||||
latest_tag_str, latest_ver = latest_tag_info
|
||||
|
||||
# 2) Packaging / metadata sources
|
||||
pyproject_version = read_pyproject_version(repo_dir)
|
||||
flake_version = read_flake_version(repo_dir)
|
||||
pkgbuild_version = read_pkgbuild_version(repo_dir)
|
||||
debian_version = read_debian_changelog_version(repo_dir)
|
||||
spec_version = read_spec_version(repo_dir)
|
||||
ansible_version = read_ansible_galaxy_version(repo_dir)
|
||||
|
||||
# 3) Print version summary
|
||||
if latest_ver is not None:
|
||||
print(
|
||||
f"Git (latest SemVer tag): {latest_tag_str} (parsed: {latest_ver})"
|
||||
)
|
||||
else:
|
||||
print("Git (latest SemVer tag): <none found>")
|
||||
|
||||
print(f"pyproject.toml: {pyproject_version or '<not found>'}")
|
||||
print(f"flake.nix: {flake_version or '<not found>'}")
|
||||
print(f"PKGBUILD: {pkgbuild_version or '<not found>'}")
|
||||
print(f"debian/changelog: {debian_version or '<not found>'}")
|
||||
print(f"package-manager.spec: {spec_version or '<not found>'}")
|
||||
print(f"Ansible Galaxy meta: {ansible_version or '<not found>'}")
|
||||
|
||||
# 4) Consistency hint (Git tag vs. pyproject)
|
||||
if latest_ver is not None and pyproject_version is not None:
|
||||
try:
|
||||
file_ver = SemVer.parse(pyproject_version)
|
||||
if file_ver != latest_ver:
|
||||
print(
|
||||
f"[WARN] Version mismatch: Git={latest_ver}, pyproject={file_ver}"
|
||||
)
|
||||
except ValueError:
|
||||
print(
|
||||
f"[WARN] pyproject version {pyproject_version!r} is not valid SemVer."
|
||||
)
|
||||
20
src/pkgmgr/cli/context.py
Normal file
20
src/pkgmgr/cli/context.py
Normal file
@@ -0,0 +1,20 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Dict, List
|
||||
|
||||
|
||||
@dataclass
|
||||
class CLIContext:
|
||||
"""
|
||||
Shared runtime context for CLI commands.
|
||||
|
||||
This avoids passing many individual parameters around and
|
||||
keeps the CLI layer thin and structured.
|
||||
"""
|
||||
|
||||
config_merged: Dict[str, Any]
|
||||
repositories_base_dir: str
|
||||
all_repositories: List[Dict[str, Any]]
|
||||
binaries_dir: str
|
||||
user_config_path: str
|
||||
178
src/pkgmgr/cli/dispatch.py
Normal file
178
src/pkgmgr/cli/dispatch.py
Normal file
@@ -0,0 +1,178 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sys
|
||||
from typing import List, Dict, Any
|
||||
|
||||
from pkgmgr.cli.context import CLIContext
|
||||
from pkgmgr.cli.proxy import maybe_handle_proxy
|
||||
from pkgmgr.core.repository.selected import get_selected_repos
|
||||
from pkgmgr.core.repository.dir import get_repo_dir
|
||||
|
||||
from pkgmgr.cli.commands import (
|
||||
handle_repos_command,
|
||||
handle_tools_command,
|
||||
handle_release,
|
||||
handle_version,
|
||||
handle_config,
|
||||
handle_make,
|
||||
handle_changelog,
|
||||
handle_branch,
|
||||
)
|
||||
|
||||
|
||||
def _has_explicit_selection(args) -> bool:
|
||||
"""
|
||||
Return True if the user explicitly selected repositories via
|
||||
identifiers / --all / --category / --tag / --string.
|
||||
"""
|
||||
identifiers = getattr(args, "identifiers", []) or []
|
||||
use_all = getattr(args, "all", False)
|
||||
categories = getattr(args, "category", []) or []
|
||||
tags = getattr(args, "tag", []) or []
|
||||
string_filter = getattr(args, "string", "") or ""
|
||||
|
||||
return bool(
|
||||
use_all
|
||||
or identifiers
|
||||
or categories
|
||||
or tags
|
||||
or string_filter
|
||||
)
|
||||
|
||||
|
||||
def _select_repo_for_current_directory(
|
||||
ctx: CLIContext,
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Heuristic: find the repository whose local directory matches the
|
||||
current working directory or is the closest parent.
|
||||
|
||||
Example:
|
||||
- Repo directory: /home/kevin/Repositories/foo
|
||||
- CWD: /home/kevin/Repositories/foo/subdir
|
||||
→ 'foo' is selected.
|
||||
"""
|
||||
cwd = os.path.abspath(os.getcwd())
|
||||
candidates: List[tuple[str, Dict[str, Any]]] = []
|
||||
|
||||
for repo in ctx.all_repositories:
|
||||
repo_dir = repo.get("directory")
|
||||
if not repo_dir:
|
||||
try:
|
||||
repo_dir = get_repo_dir(ctx.repositories_base_dir, repo)
|
||||
except Exception:
|
||||
repo_dir = None
|
||||
if not repo_dir:
|
||||
continue
|
||||
|
||||
repo_dir_abs = os.path.abspath(os.path.expanduser(repo_dir))
|
||||
if cwd == repo_dir_abs or cwd.startswith(repo_dir_abs + os.sep):
|
||||
candidates.append((repo_dir_abs, repo))
|
||||
|
||||
if not candidates:
|
||||
return []
|
||||
|
||||
# Pick the repo with the longest (most specific) path.
|
||||
candidates.sort(key=lambda item: len(item[0]), reverse=True)
|
||||
return [candidates[0][1]]
|
||||
|
||||
|
||||
def dispatch_command(args, ctx: CLIContext) -> None:
|
||||
"""
|
||||
Dispatch the parsed arguments to the appropriate command handler.
|
||||
"""
|
||||
|
||||
# First: proxy commands (git / docker / docker compose / make wrapper etc.)
|
||||
if maybe_handle_proxy(args, ctx):
|
||||
return
|
||||
|
||||
# Commands that operate on repository selections
|
||||
commands_with_selection: List[str] = [
|
||||
"install",
|
||||
"update",
|
||||
"deinstall",
|
||||
"delete",
|
||||
"status",
|
||||
"path",
|
||||
"shell",
|
||||
"create",
|
||||
"list",
|
||||
"make",
|
||||
"release",
|
||||
"version",
|
||||
"changelog",
|
||||
"explore",
|
||||
"terminal",
|
||||
"code",
|
||||
]
|
||||
|
||||
if getattr(args, "command", None) in commands_with_selection:
|
||||
if _has_explicit_selection(args):
|
||||
# Classic selection logic (identifiers / --all / filters)
|
||||
selected = get_selected_repos(args, ctx.all_repositories)
|
||||
else:
|
||||
# Default per help text: repository of current folder.
|
||||
selected = _select_repo_for_current_directory(ctx)
|
||||
# If none is found, leave 'selected' empty.
|
||||
# Individual handlers will then emit a clear message instead
|
||||
# of silently picking an unrelated repository.
|
||||
else:
|
||||
selected = []
|
||||
|
||||
# ------------------------------------------------------------------ #
|
||||
# Repos-related commands
|
||||
# ------------------------------------------------------------------ #
|
||||
if args.command in (
|
||||
"install",
|
||||
"update",
|
||||
"deinstall",
|
||||
"delete",
|
||||
"status",
|
||||
"path",
|
||||
"shell",
|
||||
"create",
|
||||
"list",
|
||||
):
|
||||
handle_repos_command(args, ctx, selected)
|
||||
return
|
||||
|
||||
# ------------------------------------------------------------------ #
|
||||
# Tools (explore / terminal / code)
|
||||
# ------------------------------------------------------------------ #
|
||||
if args.command in ("explore", "terminal", "code"):
|
||||
handle_tools_command(args, ctx, selected)
|
||||
return
|
||||
|
||||
# ------------------------------------------------------------------ #
|
||||
# Release / Version / Changelog / Config / Make / Branch
|
||||
# ------------------------------------------------------------------ #
|
||||
if args.command == "release":
|
||||
handle_release(args, ctx, selected)
|
||||
return
|
||||
|
||||
if args.command == "version":
|
||||
handle_version(args, ctx, selected)
|
||||
return
|
||||
|
||||
if args.command == "changelog":
|
||||
handle_changelog(args, ctx, selected)
|
||||
return
|
||||
|
||||
if args.command == "config":
|
||||
handle_config(args, ctx)
|
||||
return
|
||||
|
||||
if args.command == "make":
|
||||
handle_make(args, ctx, selected)
|
||||
return
|
||||
|
||||
if args.command == "branch":
|
||||
handle_branch(args, ctx)
|
||||
return
|
||||
|
||||
print(f"Unknown command: {args.command}")
|
||||
sys.exit(2)
|
||||
505
src/pkgmgr/cli/parser.py
Normal file
505
src/pkgmgr/cli/parser.py
Normal file
@@ -0,0 +1,505 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
|
||||
from pkgmgr.cli.proxy import register_proxy_commands
|
||||
|
||||
|
||||
class SortedSubParsersAction(argparse._SubParsersAction):
|
||||
"""
|
||||
Subparsers action that keeps choices sorted alphabetically.
|
||||
"""
|
||||
|
||||
def add_parser(self, name, **kwargs):
|
||||
parser = super().add_parser(name, **kwargs)
|
||||
# Sort choices alphabetically by dest (subcommand name)
|
||||
self._choices_actions.sort(key=lambda a: a.dest)
|
||||
return parser
|
||||
|
||||
|
||||
def add_identifier_arguments(subparser: argparse.ArgumentParser) -> None:
|
||||
"""
|
||||
Common identifier / selection arguments for many subcommands.
|
||||
|
||||
Selection modes (mutual intent, not hard-enforced):
|
||||
- identifiers (positional): select by alias / provider/account/repo
|
||||
- --all: select all repositories
|
||||
- --category / --string / --tag: filter-based selection on top
|
||||
of the full repository set
|
||||
"""
|
||||
subparser.add_argument(
|
||||
"identifiers",
|
||||
nargs="*",
|
||||
help=(
|
||||
"Identifier(s) for repositories. "
|
||||
"Default: Repository of current folder."
|
||||
),
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--all",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=(
|
||||
"Apply the subcommand to all repositories in the config. "
|
||||
"Some subcommands ask for confirmation. If you want to give this "
|
||||
"confirmation for all repositories, pipe 'yes'. E.g: "
|
||||
"yes | pkgmgr {subcommand} --all"
|
||||
),
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--category",
|
||||
nargs="+",
|
||||
default=[],
|
||||
help=(
|
||||
"Filter repositories by category patterns derived from config "
|
||||
"filenames or repo metadata (use filename without .yml/.yaml, "
|
||||
"or /regex/ to use a regular expression)."
|
||||
),
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--string",
|
||||
default="",
|
||||
help=(
|
||||
"Filter repositories whose identifier / name / path contains this "
|
||||
"substring (case-insensitive). Use /regex/ for regular expressions."
|
||||
),
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--tag",
|
||||
action="append",
|
||||
default=[],
|
||||
help=(
|
||||
"Filter repositories by tag. Matches tags from the repository "
|
||||
"collector and category tags. Use /regex/ for regular expressions."
|
||||
),
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--preview",
|
||||
action="store_true",
|
||||
help="Preview changes without executing commands",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--list",
|
||||
action="store_true",
|
||||
help="List affected repositories (with preview or status)",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-a",
|
||||
"--args",
|
||||
nargs=argparse.REMAINDER,
|
||||
dest="extra_args",
|
||||
help="Additional parameters to be attached.",
|
||||
default=[],
|
||||
)
|
||||
|
||||
|
||||
def add_install_update_arguments(subparser: argparse.ArgumentParser) -> None:
|
||||
"""
|
||||
Common arguments for install/update commands.
|
||||
"""
|
||||
add_identifier_arguments(subparser)
|
||||
subparser.add_argument(
|
||||
"-q",
|
||||
"--quiet",
|
||||
action="store_true",
|
||||
help="Suppress warnings and info messages",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--no-verification",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Disable verification via commit/gpg",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--dependencies",
|
||||
action="store_true",
|
||||
help="Also pull and update dependencies",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--clone-mode",
|
||||
choices=["ssh", "https", "shallow"],
|
||||
default="ssh",
|
||||
help=(
|
||||
"Specify the clone mode: ssh, https, or shallow "
|
||||
"(HTTPS shallow clone; default: ssh)"
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def create_parser(description_text: str) -> argparse.ArgumentParser:
|
||||
"""
|
||||
Create the top-level argument parser for pkgmgr.
|
||||
"""
|
||||
parser = argparse.ArgumentParser(
|
||||
description=description_text,
|
||||
formatter_class=argparse.RawTextHelpFormatter,
|
||||
)
|
||||
subparsers = parser.add_subparsers(
|
||||
dest="command",
|
||||
help="Subcommands",
|
||||
action=SortedSubParsersAction,
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# install / update / deinstall / delete
|
||||
# ------------------------------------------------------------
|
||||
install_parser = subparsers.add_parser(
|
||||
"install",
|
||||
help="Setup repository/repositories alias links to executables",
|
||||
)
|
||||
add_install_update_arguments(install_parser)
|
||||
|
||||
update_parser = subparsers.add_parser(
|
||||
"update",
|
||||
help="Update (pull + install) repository/repositories",
|
||||
)
|
||||
add_install_update_arguments(update_parser)
|
||||
update_parser.add_argument(
|
||||
"--system",
|
||||
action="store_true",
|
||||
help="Include system update commands",
|
||||
)
|
||||
|
||||
deinstall_parser = subparsers.add_parser(
|
||||
"deinstall",
|
||||
help="Remove alias links to repository/repositories",
|
||||
)
|
||||
add_identifier_arguments(deinstall_parser)
|
||||
|
||||
delete_parser = subparsers.add_parser(
|
||||
"delete",
|
||||
help="Delete repository/repositories alias links to executables",
|
||||
)
|
||||
add_identifier_arguments(delete_parser)
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# create
|
||||
# ------------------------------------------------------------
|
||||
create_cmd_parser = subparsers.add_parser(
|
||||
"create",
|
||||
help=(
|
||||
"Create new repository entries: add them to the config if not "
|
||||
"already present, initialize the local repository, and push "
|
||||
"remotely if --remote is set."
|
||||
),
|
||||
)
|
||||
add_identifier_arguments(create_cmd_parser)
|
||||
create_cmd_parser.add_argument(
|
||||
"--remote",
|
||||
action="store_true",
|
||||
help="If set, add the remote and push the initial commit.",
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# status
|
||||
# ------------------------------------------------------------
|
||||
status_parser = subparsers.add_parser(
|
||||
"status",
|
||||
help="Show status for repository/repositories or system",
|
||||
)
|
||||
add_identifier_arguments(status_parser)
|
||||
status_parser.add_argument(
|
||||
"--system",
|
||||
action="store_true",
|
||||
help="Show system status",
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# config
|
||||
# ------------------------------------------------------------
|
||||
config_parser = subparsers.add_parser(
|
||||
"config",
|
||||
help="Manage configuration",
|
||||
)
|
||||
config_subparsers = config_parser.add_subparsers(
|
||||
dest="subcommand",
|
||||
help="Config subcommands",
|
||||
required=True,
|
||||
)
|
||||
|
||||
config_show = config_subparsers.add_parser(
|
||||
"show",
|
||||
help="Show configuration",
|
||||
)
|
||||
add_identifier_arguments(config_show)
|
||||
|
||||
config_subparsers.add_parser(
|
||||
"add",
|
||||
help="Interactively add a new repository entry",
|
||||
)
|
||||
|
||||
config_subparsers.add_parser(
|
||||
"edit",
|
||||
help="Edit configuration file with nano",
|
||||
)
|
||||
|
||||
config_subparsers.add_parser(
|
||||
"init",
|
||||
help="Initialize user configuration by scanning the base directory",
|
||||
)
|
||||
|
||||
config_delete = config_subparsers.add_parser(
|
||||
"delete",
|
||||
help="Delete repository entry from user config",
|
||||
)
|
||||
add_identifier_arguments(config_delete)
|
||||
|
||||
config_ignore = config_subparsers.add_parser(
|
||||
"ignore",
|
||||
help="Set ignore flag for repository entries in user config",
|
||||
)
|
||||
add_identifier_arguments(config_ignore)
|
||||
config_ignore.add_argument(
|
||||
"--set",
|
||||
choices=["true", "false"],
|
||||
required=True,
|
||||
help="Set ignore to true or false",
|
||||
)
|
||||
|
||||
config_subparsers.add_parser(
|
||||
"update",
|
||||
help=(
|
||||
"Update default config files in ~/.config/pkgmgr/ from the "
|
||||
"installed pkgmgr package (does not touch config.yaml)."
|
||||
),
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# path / explore / terminal / code / shell
|
||||
# ------------------------------------------------------------
|
||||
path_parser = subparsers.add_parser(
|
||||
"path",
|
||||
help="Print the path(s) of repository/repositories",
|
||||
)
|
||||
add_identifier_arguments(path_parser)
|
||||
|
||||
explore_parser = subparsers.add_parser(
|
||||
"explore",
|
||||
help="Open repository in Nautilus file manager",
|
||||
)
|
||||
add_identifier_arguments(explore_parser)
|
||||
|
||||
terminal_parser = subparsers.add_parser(
|
||||
"terminal",
|
||||
help="Open repository in a new GNOME Terminal tab",
|
||||
)
|
||||
add_identifier_arguments(terminal_parser)
|
||||
|
||||
code_parser = subparsers.add_parser(
|
||||
"code",
|
||||
help="Open repository workspace with VS Code",
|
||||
)
|
||||
add_identifier_arguments(code_parser)
|
||||
|
||||
shell_parser = subparsers.add_parser(
|
||||
"shell",
|
||||
help="Execute a shell command in each repository",
|
||||
)
|
||||
add_identifier_arguments(shell_parser)
|
||||
shell_parser.add_argument(
|
||||
"-c",
|
||||
"--command",
|
||||
nargs=argparse.REMAINDER,
|
||||
dest="shell_command",
|
||||
help=(
|
||||
"The shell command (and its arguments) to execute in each "
|
||||
"repository"
|
||||
),
|
||||
default=[],
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# branch
|
||||
# ------------------------------------------------------------
|
||||
branch_parser = subparsers.add_parser(
|
||||
"branch",
|
||||
help="Branch-related utilities (e.g. open/close feature branches)",
|
||||
)
|
||||
branch_subparsers = branch_parser.add_subparsers(
|
||||
dest="subcommand",
|
||||
help="Branch subcommands",
|
||||
required=True,
|
||||
)
|
||||
|
||||
branch_open = branch_subparsers.add_parser(
|
||||
"open",
|
||||
help="Create and push a new branch on top of a base branch",
|
||||
)
|
||||
branch_open.add_argument(
|
||||
"name",
|
||||
nargs="?",
|
||||
help=(
|
||||
"Name of the new branch (optional; will be asked interactively "
|
||||
"if omitted)"
|
||||
),
|
||||
)
|
||||
branch_open.add_argument(
|
||||
"--base",
|
||||
default="main",
|
||||
help="Base branch to create the new branch from (default: main)",
|
||||
)
|
||||
|
||||
branch_close = branch_subparsers.add_parser(
|
||||
"close",
|
||||
help="Merge a feature branch into base and delete it",
|
||||
)
|
||||
branch_close.add_argument(
|
||||
"name",
|
||||
nargs="?",
|
||||
help=(
|
||||
"Name of the branch to close (optional; current branch is used "
|
||||
"if omitted)"
|
||||
),
|
||||
)
|
||||
branch_close.add_argument(
|
||||
"--base",
|
||||
default="main",
|
||||
help=(
|
||||
"Base branch to merge into (default: main; falls back to master "
|
||||
"internally if main does not exist)"
|
||||
),
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# release
|
||||
# ------------------------------------------------------------
|
||||
release_parser = subparsers.add_parser(
|
||||
"release",
|
||||
help=(
|
||||
"Create a release for repository/ies by incrementing version "
|
||||
"and updating the changelog."
|
||||
),
|
||||
)
|
||||
release_parser.add_argument(
|
||||
"release_type",
|
||||
choices=["major", "minor", "patch"],
|
||||
help="Type of version increment for the release (major, minor, patch).",
|
||||
)
|
||||
release_parser.add_argument(
|
||||
"-m",
|
||||
"--message",
|
||||
default=None,
|
||||
help=(
|
||||
"Optional release message to add to the changelog and tag."
|
||||
),
|
||||
)
|
||||
# Generic selection / preview / list / extra_args
|
||||
add_identifier_arguments(release_parser)
|
||||
# Close current branch after successful release
|
||||
release_parser.add_argument(
|
||||
"--close",
|
||||
action="store_true",
|
||||
help=(
|
||||
"Close the current branch after a successful release in each "
|
||||
"repository, if it is not main/master."
|
||||
),
|
||||
)
|
||||
# Force: skip preview+confirmation and run release directly
|
||||
release_parser.add_argument(
|
||||
"-f",
|
||||
"--force",
|
||||
action="store_true",
|
||||
help=(
|
||||
"Skip the interactive preview+confirmation step and run the "
|
||||
"release directly."
|
||||
),
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# version
|
||||
# ------------------------------------------------------------
|
||||
version_parser = subparsers.add_parser(
|
||||
"version",
|
||||
help=(
|
||||
"Show version information for repository/ies "
|
||||
"(git tags, pyproject.toml, flake.nix, PKGBUILD, debian, spec, "
|
||||
"Ansible Galaxy)."
|
||||
),
|
||||
)
|
||||
add_identifier_arguments(version_parser)
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# changelog
|
||||
# ------------------------------------------------------------
|
||||
changelog_parser = subparsers.add_parser(
|
||||
"changelog",
|
||||
help=(
|
||||
"Show changelog derived from Git history. "
|
||||
"By default, shows the changes between the last two SemVer tags."
|
||||
),
|
||||
)
|
||||
changelog_parser.add_argument(
|
||||
"range",
|
||||
nargs="?",
|
||||
default="",
|
||||
help=(
|
||||
"Optional tag or range (e.g. v1.2.3 or v1.2.0..v1.2.3). "
|
||||
"If omitted, the changelog between the last two SemVer "
|
||||
"tags is shown."
|
||||
),
|
||||
)
|
||||
add_identifier_arguments(changelog_parser)
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# list
|
||||
# ------------------------------------------------------------
|
||||
list_parser = subparsers.add_parser(
|
||||
"list",
|
||||
help="List all repositories with details and status",
|
||||
)
|
||||
# dieselbe Selektionslogik wie bei install/update/etc.:
|
||||
add_identifier_arguments(list_parser)
|
||||
list_parser.add_argument(
|
||||
"--status",
|
||||
type=str,
|
||||
default="",
|
||||
help=(
|
||||
"Filter repositories by status (case insensitive). "
|
||||
"Use /regex/ for regular expressions."
|
||||
),
|
||||
)
|
||||
list_parser.add_argument(
|
||||
"--description",
|
||||
action="store_true",
|
||||
help=(
|
||||
"Show an additional detailed section per repository "
|
||||
"(description, homepage, tags, categories, paths)."
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# make
|
||||
# ------------------------------------------------------------
|
||||
make_parser = subparsers.add_parser(
|
||||
"make",
|
||||
help="Executes make commands",
|
||||
)
|
||||
add_identifier_arguments(make_parser)
|
||||
make_subparsers = make_parser.add_subparsers(
|
||||
dest="subcommand",
|
||||
help="Make subcommands",
|
||||
required=True,
|
||||
)
|
||||
|
||||
make_install = make_subparsers.add_parser(
|
||||
"install",
|
||||
help="Executes the make install command",
|
||||
)
|
||||
add_identifier_arguments(make_install)
|
||||
|
||||
make_deinstall = make_subparsers.add_parser(
|
||||
"deinstall",
|
||||
help="Executes the make deinstall command",
|
||||
)
|
||||
add_identifier_arguments(make_deinstall)
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# Proxy commands (git, docker, docker compose, ...)
|
||||
# ------------------------------------------------------------
|
||||
register_proxy_commands(subparsers)
|
||||
|
||||
return parser
|
||||
260
src/pkgmgr/cli/proxy.py
Normal file
260
src/pkgmgr/cli/proxy.py
Normal file
@@ -0,0 +1,260 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
from typing import Dict, List, Any
|
||||
|
||||
from pkgmgr.cli.context import CLIContext
|
||||
from pkgmgr.actions.repository.clone import clone_repos
|
||||
from pkgmgr.actions.proxy import exec_proxy_command
|
||||
from pkgmgr.actions.repository.pull import pull_with_verification
|
||||
from pkgmgr.core.repository.selected import get_selected_repos
|
||||
from pkgmgr.core.repository.dir import get_repo_dir
|
||||
|
||||
|
||||
PROXY_COMMANDS: Dict[str, List[str]] = {
|
||||
"git": [
|
||||
"pull",
|
||||
"push",
|
||||
"diff",
|
||||
"add",
|
||||
"show",
|
||||
"checkout",
|
||||
"clone",
|
||||
"reset",
|
||||
"revert",
|
||||
"rebase",
|
||||
"commit",
|
||||
],
|
||||
"docker": [
|
||||
"start",
|
||||
"stop",
|
||||
"build",
|
||||
],
|
||||
"docker compose": [
|
||||
"up",
|
||||
"down",
|
||||
"exec",
|
||||
"ps",
|
||||
"restart",
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
def _add_proxy_identifier_arguments(parser: argparse.ArgumentParser) -> None:
|
||||
"""
|
||||
Selection arguments for proxy subcommands.
|
||||
"""
|
||||
parser.add_argument(
|
||||
"identifiers",
|
||||
nargs="*",
|
||||
help=(
|
||||
"Identifier(s) for repositories. "
|
||||
"Default: Repository of current folder."
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--all",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=(
|
||||
"Apply the subcommand to all repositories in the config. "
|
||||
"Some subcommands ask for confirmation. If you want to give this "
|
||||
"confirmation for all repositories, pipe 'yes'. E.g: "
|
||||
"yes | pkgmgr {subcommand} --all"
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--category",
|
||||
nargs="+",
|
||||
default=[],
|
||||
help=(
|
||||
"Filter repositories by category patterns derived from config "
|
||||
"filenames or repo metadata (use filename without .yml/.yaml, "
|
||||
"or /regex/ to use a regular expression)."
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--string",
|
||||
default="",
|
||||
help=(
|
||||
"Filter repositories whose identifier / name / path contains this "
|
||||
"substring (case-insensitive). Use /regex/ for regular expressions."
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--preview",
|
||||
action="store_true",
|
||||
help="Preview changes without executing commands",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--list",
|
||||
action="store_true",
|
||||
help="List affected repositories (with preview or status)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-a",
|
||||
"--args",
|
||||
nargs=argparse.REMAINDER,
|
||||
dest="extra_args",
|
||||
help="Additional parameters to be attached.",
|
||||
default=[],
|
||||
)
|
||||
|
||||
|
||||
def _proxy_has_explicit_selection(args: argparse.Namespace) -> bool:
|
||||
"""
|
||||
Same semantics as in the main dispatch:
|
||||
True if the user explicitly selected repositories.
|
||||
"""
|
||||
identifiers = getattr(args, "identifiers", []) or []
|
||||
use_all = getattr(args, "all", False)
|
||||
categories = getattr(args, "category", []) or []
|
||||
string_filter = getattr(args, "string", "") or ""
|
||||
|
||||
# Proxy commands currently do not support --tag, so it is not checked here.
|
||||
return bool(
|
||||
use_all
|
||||
or identifiers
|
||||
or categories
|
||||
or string_filter
|
||||
)
|
||||
|
||||
|
||||
def _select_repo_for_current_directory(
|
||||
ctx: CLIContext,
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Heuristic: find the repository whose local directory matches the
|
||||
current working directory or is the closest parent.
|
||||
"""
|
||||
cwd = os.path.abspath(os.getcwd())
|
||||
candidates: List[tuple[str, Dict[str, Any]]] = []
|
||||
|
||||
for repo in ctx.all_repositories:
|
||||
repo_dir = repo.get("directory")
|
||||
if not repo_dir:
|
||||
try:
|
||||
repo_dir = get_repo_dir(ctx.repositories_base_dir, repo)
|
||||
except Exception:
|
||||
repo_dir = None
|
||||
if not repo_dir:
|
||||
continue
|
||||
|
||||
repo_dir_abs = os.path.abspath(os.path.expanduser(repo_dir))
|
||||
if cwd == repo_dir_abs or cwd.startswith(repo_dir_abs + os.sep):
|
||||
candidates.append((repo_dir_abs, repo))
|
||||
|
||||
if not candidates:
|
||||
return []
|
||||
|
||||
# Pick the repo with the longest (most specific) path.
|
||||
candidates.sort(key=lambda item: len(item[0]), reverse=True)
|
||||
return [candidates[0][1]]
|
||||
|
||||
|
||||
def register_proxy_commands(
|
||||
subparsers: argparse._SubParsersAction,
|
||||
) -> None:
|
||||
"""
|
||||
Register proxy subcommands for git, docker, docker compose, ...
|
||||
"""
|
||||
for command, subcommands in PROXY_COMMANDS.items():
|
||||
for subcommand in subcommands:
|
||||
parser = subparsers.add_parser(
|
||||
subcommand,
|
||||
help=f"Proxies '{command} {subcommand}' to repository/ies",
|
||||
description=(
|
||||
f"Executes '{command} {subcommand}' for the "
|
||||
"selected repositories. "
|
||||
"For more details see the underlying tool's help: "
|
||||
f"'{command} {subcommand} --help'"
|
||||
),
|
||||
formatter_class=argparse.RawTextHelpFormatter,
|
||||
)
|
||||
|
||||
if subcommand in ["pull", "clone"]:
|
||||
parser.add_argument(
|
||||
"--no-verification",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Disable verification via commit/gpg",
|
||||
)
|
||||
if subcommand == "clone":
|
||||
parser.add_argument(
|
||||
"--clone-mode",
|
||||
choices=["ssh", "https", "shallow"],
|
||||
default="ssh",
|
||||
help=(
|
||||
"Specify the clone mode: ssh, https, or shallow "
|
||||
"(HTTPS shallow clone; default: ssh)"
|
||||
),
|
||||
)
|
||||
|
||||
_add_proxy_identifier_arguments(parser)
|
||||
|
||||
|
||||
def maybe_handle_proxy(args: argparse.Namespace, ctx: CLIContext) -> bool:
|
||||
"""
|
||||
If the top-level command is one of the proxy subcommands
|
||||
(git / docker / docker compose), handle it here and return True.
|
||||
"""
|
||||
all_proxy_subcommands = {
|
||||
sub for subs in PROXY_COMMANDS.values() for sub in subs
|
||||
}
|
||||
|
||||
if args.command not in all_proxy_subcommands:
|
||||
return False
|
||||
|
||||
# Default semantics: without explicit selection → repo of current folder.
|
||||
if _proxy_has_explicit_selection(args):
|
||||
selected = get_selected_repos(args, ctx.all_repositories)
|
||||
else:
|
||||
selected = _select_repo_for_current_directory(ctx)
|
||||
if not selected:
|
||||
print(
|
||||
"[ERROR] No repository matches the current directory. "
|
||||
"Specify identifiers or use --all/--category/--string."
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
for command, subcommands in PROXY_COMMANDS.items():
|
||||
if args.command not in subcommands:
|
||||
continue
|
||||
|
||||
if args.command == "clone":
|
||||
clone_repos(
|
||||
selected,
|
||||
ctx.repositories_base_dir,
|
||||
ctx.all_repositories,
|
||||
args.preview,
|
||||
args.no_verification,
|
||||
args.clone_mode,
|
||||
)
|
||||
elif args.command == "pull":
|
||||
pull_with_verification(
|
||||
selected,
|
||||
ctx.repositories_base_dir,
|
||||
ctx.all_repositories,
|
||||
args.extra_args,
|
||||
args.no_verification,
|
||||
args.preview,
|
||||
)
|
||||
else:
|
||||
exec_proxy_command(
|
||||
command,
|
||||
selected,
|
||||
ctx.repositories_base_dir,
|
||||
ctx.all_repositories,
|
||||
args.command,
|
||||
args.extra_args,
|
||||
args.preview,
|
||||
)
|
||||
|
||||
sys.exit(0)
|
||||
|
||||
return True
|
||||
0
src/pkgmgr/core/command/__init__.py
Normal file
0
src/pkgmgr/core/command/__init__.py
Normal file
42
src/pkgmgr/core/command/alias.py
Normal file
42
src/pkgmgr/core/command/alias.py
Normal file
@@ -0,0 +1,42 @@
|
||||
import os
|
||||
import hashlib
|
||||
import re
|
||||
|
||||
def generate_alias(repo, bin_dir, existing_aliases):
|
||||
"""
|
||||
Generate an alias for a repository based on its repository name.
|
||||
|
||||
Steps:
|
||||
1. Keep only consonants from the repository name (letters from BCDFGHJKLMNPQRSTVWXYZ).
|
||||
2. Collapse consecutive identical consonants.
|
||||
3. Truncate to at most 12 characters.
|
||||
4. If that alias conflicts (already in existing_aliases or a file exists in bin_dir),
|
||||
then prefix with the first letter of provider and account.
|
||||
5. If still conflicting, append a three-character hash until the alias is unique.
|
||||
"""
|
||||
repo_name = repo.get("repository")
|
||||
# Keep only consonants.
|
||||
consonants = re.sub(r"[^bcdfghjklmnpqrstvwxyzBCDFGHJKLMNPQRSTVWXYZ]", "", repo_name)
|
||||
# Collapse consecutive identical consonants.
|
||||
collapsed = re.sub(r"(.)\1+", r"\1", consonants)
|
||||
base_alias = collapsed[:12] if len(collapsed) > 12 else collapsed
|
||||
candidate = base_alias.lower()
|
||||
|
||||
def conflict(alias):
|
||||
alias_path = os.path.join(bin_dir, alias)
|
||||
return alias in existing_aliases or os.path.exists(alias_path)
|
||||
|
||||
if not conflict(candidate):
|
||||
return candidate
|
||||
|
||||
prefix = (repo.get("provider", "")[0] + repo.get("account", "")[0]).lower()
|
||||
candidate2 = (prefix + candidate)[:12]
|
||||
if not conflict(candidate2):
|
||||
return candidate2
|
||||
|
||||
h = hashlib.md5(repo_name.encode("utf-8")).hexdigest()[:3]
|
||||
candidate3 = (candidate2 + h)[:12]
|
||||
while conflict(candidate3):
|
||||
candidate3 += "x"
|
||||
candidate3 = candidate3[:12]
|
||||
return candidate3
|
||||
114
src/pkgmgr/core/command/ink.py
Normal file
114
src/pkgmgr/core/command/ink.py
Normal file
@@ -0,0 +1,114 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
from pkgmgr.core.repository.identifier import get_repo_identifier
|
||||
from pkgmgr.core.repository.dir import get_repo_dir
|
||||
|
||||
|
||||
def create_ink(
|
||||
repo,
|
||||
repositories_base_dir,
|
||||
bin_dir,
|
||||
all_repos,
|
||||
quiet: bool = False,
|
||||
preview: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
Create a symlink for the repository's command.
|
||||
|
||||
IMPORTANT:
|
||||
This function is intentionally kept *simple*. All decision logic for
|
||||
choosing the command lives inside resolve_command_for_repo().
|
||||
|
||||
Behavior:
|
||||
- If repo["command"] is defined → create a symlink to it.
|
||||
- If repo["command"] is missing or None → do NOT create a link.
|
||||
|
||||
Safety:
|
||||
- If the resolved command path is identical to the final link target,
|
||||
we skip symlink creation to avoid self-referential symlinks that
|
||||
would break shell resolution ("too many levels of symbolic links").
|
||||
"""
|
||||
|
||||
repo_identifier = get_repo_identifier(repo, all_repos)
|
||||
repo_dir = get_repo_dir(repositories_base_dir, repo)
|
||||
|
||||
command = repo.get("command")
|
||||
if not command:
|
||||
if not quiet:
|
||||
print(f"No command resolved for '{repo_identifier}'. Skipping link.")
|
||||
return
|
||||
|
||||
link_path = os.path.join(bin_dir, repo_identifier)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Safety guard: avoid self-referential symlinks
|
||||
#
|
||||
# Example of a broken situation we must avoid:
|
||||
# - command = ~/.local/bin/package-manager
|
||||
# - link_path = ~/.local/bin/package-manager
|
||||
# - create_ink() removes the real binary and creates a symlink
|
||||
# pointing to itself → zsh: too many levels of symbolic links
|
||||
#
|
||||
# If the resolved command already lives exactly at the target path,
|
||||
# we treat it as "already installed" and skip any modification.
|
||||
# ------------------------------------------------------------------
|
||||
if os.path.abspath(command) == os.path.abspath(link_path):
|
||||
if not quiet:
|
||||
print(
|
||||
f"[pkgmgr] Command for '{repo_identifier}' already lives at "
|
||||
f"'{link_path}'. Skipping symlink creation to avoid a "
|
||||
"self-referential link."
|
||||
)
|
||||
return
|
||||
|
||||
if preview:
|
||||
print(f"[Preview] Would link {link_path} → {command}")
|
||||
return
|
||||
|
||||
# Mark local repo scripts as executable if needed
|
||||
try:
|
||||
if os.path.realpath(command).startswith(os.path.realpath(repo_dir)):
|
||||
os.chmod(command, 0o755)
|
||||
except Exception as e:
|
||||
if not quiet:
|
||||
print(f"Failed to set permissions on '{command}': {e}")
|
||||
|
||||
# Create bin directory
|
||||
os.makedirs(bin_dir, exist_ok=True)
|
||||
|
||||
# Remove existing
|
||||
if os.path.exists(link_path) or os.path.islink(link_path):
|
||||
os.remove(link_path)
|
||||
|
||||
# Create the link
|
||||
os.symlink(command, link_path)
|
||||
|
||||
if not quiet:
|
||||
print(f"Symlink created: {link_path} → {command}")
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# Optional alias support (same as before)
|
||||
# ------------------------------------------------------------
|
||||
alias_name = repo.get("alias")
|
||||
if alias_name:
|
||||
alias_link_path = os.path.join(bin_dir, alias_name)
|
||||
|
||||
if alias_name == repo_identifier:
|
||||
if not quiet:
|
||||
print(
|
||||
f"Alias '{alias_name}' equals identifier. "
|
||||
"Skipping alias creation."
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
if os.path.exists(alias_link_path) or os.path.islink(alias_link_path):
|
||||
os.remove(alias_link_path)
|
||||
os.symlink(link_path, alias_link_path)
|
||||
if not quiet:
|
||||
print(f"Alias '{alias_name}' created → {repo_identifier}")
|
||||
except Exception as e:
|
||||
if not quiet:
|
||||
print(f"Error creating alias '{alias_name}': {e}")
|
||||
207
src/pkgmgr/core/command/resolve.py
Normal file
207
src/pkgmgr/core/command/resolve.py
Normal file
@@ -0,0 +1,207 @@
|
||||
import os
|
||||
import shutil
|
||||
from typing import Optional, List, Dict, Any
|
||||
|
||||
|
||||
Repository = Dict[str, Any]
|
||||
|
||||
|
||||
def _is_executable(path: str) -> bool:
|
||||
return os.path.exists(path) and os.access(path, os.X_OK)
|
||||
|
||||
|
||||
def _find_python_package_root(repo_dir: str) -> Optional[str]:
|
||||
"""
|
||||
Detect a Python src-layout package:
|
||||
|
||||
repo_dir/src/<package>/__main__.py
|
||||
|
||||
Returns the directory containing __main__.py (e.g. ".../src/arc")
|
||||
or None if no such structure exists.
|
||||
"""
|
||||
src_dir = os.path.join(repo_dir, "src")
|
||||
if not os.path.isdir(src_dir):
|
||||
return None
|
||||
|
||||
for root, _dirs, files in os.walk(src_dir):
|
||||
if "__main__.py" in files:
|
||||
return root
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _nix_binary_candidates(home: str, names: List[str]) -> List[str]:
|
||||
"""
|
||||
Build possible Nix profile binary paths for a list of candidate names.
|
||||
"""
|
||||
return [
|
||||
os.path.join(home, ".nix-profile", "bin", name)
|
||||
for name in names
|
||||
if name
|
||||
]
|
||||
|
||||
|
||||
def _path_binary_candidates(names: List[str]) -> List[str]:
|
||||
"""
|
||||
Resolve candidate names via PATH using shutil.which.
|
||||
Returns only existing, executable paths.
|
||||
"""
|
||||
binaries: List[str] = []
|
||||
for name in names:
|
||||
if not name:
|
||||
continue
|
||||
candidate = shutil.which(name)
|
||||
if candidate and _is_executable(candidate):
|
||||
binaries.append(candidate)
|
||||
return binaries
|
||||
|
||||
|
||||
def resolve_command_for_repo(
|
||||
repo: Repository,
|
||||
repo_identifier: str,
|
||||
repo_dir: str,
|
||||
) -> Optional[str]:
|
||||
"""
|
||||
Resolve the executable command for a repository.
|
||||
|
||||
Semantics:
|
||||
----------
|
||||
- If the repository explicitly defines the key "command" (even if None),
|
||||
that is treated as authoritative and returned immediately.
|
||||
This allows e.g.:
|
||||
|
||||
command: null
|
||||
|
||||
for pure library repositories with no CLI.
|
||||
|
||||
- If "command" is not defined, we try to discover a suitable CLI command:
|
||||
1. Prefer already installed binaries (PATH, Nix profile).
|
||||
2. For Python src-layout packages (src/*/__main__.py), try to infer
|
||||
a sensible command name (alias, repo identifier, repository name,
|
||||
package directory name) and resolve those via PATH / Nix.
|
||||
3. For script-style repos, fall back to main.sh / main.py.
|
||||
4. If nothing matches, return None (no CLI) instead of raising.
|
||||
|
||||
The caller can interpret:
|
||||
- str → path to the command (symlink target)
|
||||
- None → no CLI command for this repository
|
||||
"""
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 1) Explicit command declaration (including explicit "no command")
|
||||
# ------------------------------------------------------------------
|
||||
if "command" in repo:
|
||||
# May be a string path or None. None means: this repo intentionally
|
||||
# has no CLI command and should not be resolved.
|
||||
return repo.get("command")
|
||||
|
||||
home = os.path.expanduser("~")
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 2) Collect candidate names for CLI binaries
|
||||
#
|
||||
# Order of preference:
|
||||
# - repo_identifier (usually alias or configured id)
|
||||
# - alias (if defined)
|
||||
# - repository name (e.g. "analysis-ready-code")
|
||||
# - python package name (e.g. "arc" from src/arc/__main__.py)
|
||||
# ------------------------------------------------------------------
|
||||
alias = repo.get("alias")
|
||||
repository_name = repo.get("repository")
|
||||
|
||||
python_package_root = _find_python_package_root(repo_dir)
|
||||
if python_package_root:
|
||||
python_package_name = os.path.basename(python_package_root)
|
||||
else:
|
||||
python_package_name = None
|
||||
|
||||
candidate_names: List[str] = []
|
||||
seen: set[str] = set()
|
||||
|
||||
for name in (
|
||||
repo_identifier,
|
||||
alias,
|
||||
repository_name,
|
||||
python_package_name,
|
||||
):
|
||||
if name and name not in seen:
|
||||
seen.add(name)
|
||||
candidate_names.append(name)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 3) Try resolve via PATH (non-system and system) and Nix profile
|
||||
# ------------------------------------------------------------------
|
||||
# a) PATH binaries
|
||||
path_binaries = _path_binary_candidates(candidate_names)
|
||||
|
||||
# b) Classify system (/usr/...) vs non-system
|
||||
system_binary: Optional[str] = None
|
||||
non_system_binary: Optional[str] = None
|
||||
|
||||
for bin_path in path_binaries:
|
||||
if bin_path.startswith("/usr"):
|
||||
# Last system binary wins, but usually there is only one anyway
|
||||
system_binary = bin_path
|
||||
else:
|
||||
non_system_binary = bin_path
|
||||
break # prefer the first non-system binary
|
||||
|
||||
# c) Nix profile binaries
|
||||
nix_binaries = [
|
||||
path for path in _nix_binary_candidates(home, candidate_names)
|
||||
if _is_executable(path)
|
||||
]
|
||||
nix_binary = nix_binaries[0] if nix_binaries else None
|
||||
|
||||
# Decide priority:
|
||||
# 1) non-system PATH binary (user/venv)
|
||||
# 2) Nix profile binary
|
||||
# 3) system binary (/usr/...) → only if we want to expose it
|
||||
if non_system_binary:
|
||||
return non_system_binary
|
||||
|
||||
if nix_binary:
|
||||
return nix_binary
|
||||
|
||||
if system_binary:
|
||||
# Respect system packages. Depending on your policy you can decide
|
||||
# to return None (no symlink, OS owns the command) or to expose it.
|
||||
# Here we choose: no symlink for pure system binaries.
|
||||
if repo.get("ignore_system_binary", False):
|
||||
print(
|
||||
f"[pkgmgr] System binary for '{repo_identifier}' found at "
|
||||
f"{system_binary}; no symlink will be created."
|
||||
)
|
||||
return None
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 4) Script-style repository: fallback to main.sh / main.py
|
||||
# ------------------------------------------------------------------
|
||||
main_sh = os.path.join(repo_dir, "main.sh")
|
||||
main_py = os.path.join(repo_dir, "main.py")
|
||||
|
||||
if _is_executable(main_sh):
|
||||
return main_sh
|
||||
|
||||
if os.path.exists(main_py):
|
||||
return main_py
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 5) No CLI discovered
|
||||
#
|
||||
# At this point we may still have a Python package structure, but
|
||||
# without any installed CLI entry point and without main.sh/main.py.
|
||||
#
|
||||
# This is perfectly valid for library-only repositories, so we do
|
||||
# NOT treat this as an error. The caller can then decide to simply
|
||||
# skip symlink creation.
|
||||
# ------------------------------------------------------------------
|
||||
if python_package_root:
|
||||
print(
|
||||
f"[INFO] Repository '{repo_identifier}' appears to be a Python "
|
||||
f"package at '{python_package_root}' but no CLI entry point was "
|
||||
f"found (PATH, Nix, main.sh/main.py). Treating it as a "
|
||||
f"library-only repository with no command."
|
||||
)
|
||||
|
||||
return None
|
||||
45
src/pkgmgr/core/command/run.py
Normal file
45
src/pkgmgr/core/command/run.py
Normal file
@@ -0,0 +1,45 @@
|
||||
# pkgmgr/run_command.py
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import List, Optional, Union
|
||||
|
||||
|
||||
CommandType = Union[str, List[str]]
|
||||
|
||||
|
||||
def run_command(
|
||||
cmd: CommandType,
|
||||
cwd: Optional[str] = None,
|
||||
preview: bool = False,
|
||||
allow_failure: bool = False,
|
||||
) -> subprocess.CompletedProcess:
|
||||
"""
|
||||
Run a command and optionally exit on error.
|
||||
|
||||
- If `cmd` is a string, it is executed with `shell=True`.
|
||||
- If `cmd` is a list of strings, it is executed without a shell.
|
||||
"""
|
||||
if isinstance(cmd, str):
|
||||
display = cmd
|
||||
else:
|
||||
display = " ".join(cmd)
|
||||
|
||||
where = cwd or "."
|
||||
|
||||
if preview:
|
||||
print(f"[Preview] In '{where}': {display}")
|
||||
# Fake a successful result; most callers ignore the return value anyway
|
||||
return subprocess.CompletedProcess(cmd, 0) # type: ignore[arg-type]
|
||||
|
||||
print(f"Running in '{where}': {display}")
|
||||
|
||||
if isinstance(cmd, str):
|
||||
result = subprocess.run(cmd, cwd=cwd, shell=True)
|
||||
else:
|
||||
result = subprocess.run(cmd, cwd=cwd)
|
||||
|
||||
if result.returncode != 0 and not allow_failure:
|
||||
print(f"Command failed with exit code {result.returncode}. Exiting.")
|
||||
sys.exit(result.returncode)
|
||||
|
||||
return result
|
||||
0
src/pkgmgr/core/config/__init__.py
Normal file
0
src/pkgmgr/core/config/__init__.py
Normal file
310
src/pkgmgr/core/config/load.py
Normal file
310
src/pkgmgr/core/config/load.py
Normal file
@@ -0,0 +1,310 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Load and merge pkgmgr configuration.
|
||||
|
||||
Layering rules:
|
||||
|
||||
1. Defaults / category files:
|
||||
- Zuerst werden alle *.yml/*.yaml (außer config.yaml) im
|
||||
Benutzerverzeichnis geladen:
|
||||
~/.config/pkgmgr/
|
||||
|
||||
- Falls dort keine passenden Dateien existieren, wird auf die im
|
||||
Paket / Projekt mitgelieferten Config-Verzeichnisse zurückgegriffen:
|
||||
|
||||
<pkg_root>/config_defaults
|
||||
<pkg_root>/config
|
||||
<project_root>/config_defaults
|
||||
<project_root>/config
|
||||
|
||||
Dabei werden ebenfalls alle *.yml/*.yaml als Layer geladen.
|
||||
|
||||
- Der Dateiname ohne Endung (stem) wird als Kategorie-Name
|
||||
verwendet und in repo["category_files"] eingetragen.
|
||||
|
||||
2. User config:
|
||||
- ~/.config/pkgmgr/config.yaml (oder der übergebene Pfad)
|
||||
wird geladen und PER LISTEN-MERGE über die Defaults gelegt:
|
||||
- directories: dict deep-merge
|
||||
- repositories: per _merge_repo_lists (kein Löschen!)
|
||||
|
||||
3. Ergebnis:
|
||||
- Ein dict mit mindestens:
|
||||
config["directories"] (dict)
|
||||
config["repositories"] (list[dict])
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Tuple
|
||||
|
||||
import yaml
|
||||
|
||||
Repo = Dict[str, Any]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Hilfsfunktionen
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _deep_merge(base: Dict[str, Any], override: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""
|
||||
Recursively merge two dictionaries.
|
||||
|
||||
Values from `override` win over values in `base`.
|
||||
"""
|
||||
for key, value in override.items():
|
||||
if (
|
||||
key in base
|
||||
and isinstance(base[key], dict)
|
||||
and isinstance(value, dict)
|
||||
):
|
||||
_deep_merge(base[key], value)
|
||||
else:
|
||||
base[key] = value
|
||||
return base
|
||||
|
||||
|
||||
def _repo_key(repo: Repo) -> Tuple[str, str, str]:
|
||||
"""
|
||||
Normalised key for identifying a repository across config files.
|
||||
"""
|
||||
return (
|
||||
str(repo.get("provider", "")),
|
||||
str(repo.get("account", "")),
|
||||
str(repo.get("repository", "")),
|
||||
)
|
||||
|
||||
|
||||
def _merge_repo_lists(
|
||||
base_list: List[Repo],
|
||||
new_list: List[Repo],
|
||||
category_name: str | None = None,
|
||||
) -> List[Repo]:
|
||||
"""
|
||||
Merge two repository lists, matching by (provider, account, repository).
|
||||
|
||||
- Wenn ein Repo aus new_list noch nicht existiert, wird es hinzugefügt.
|
||||
- Wenn es existiert, werden seine Felder per Deep-Merge überschrieben.
|
||||
- Wenn category_name gesetzt ist, wird dieser in
|
||||
repo["category_files"] eingetragen.
|
||||
"""
|
||||
index: Dict[Tuple[str, str, str], Repo] = {
|
||||
_repo_key(r): r for r in base_list
|
||||
}
|
||||
|
||||
for src in new_list:
|
||||
key = _repo_key(src)
|
||||
if key == ("", "", ""):
|
||||
# Unvollständiger Schlüssel -> einfach anhängen
|
||||
dst = dict(src)
|
||||
if category_name:
|
||||
dst.setdefault("category_files", [])
|
||||
if category_name not in dst["category_files"]:
|
||||
dst["category_files"].append(category_name)
|
||||
base_list.append(dst)
|
||||
continue
|
||||
|
||||
existing = index.get(key)
|
||||
if existing is None:
|
||||
dst = dict(src)
|
||||
if category_name:
|
||||
dst.setdefault("category_files", [])
|
||||
if category_name not in dst["category_files"]:
|
||||
dst["category_files"].append(category_name)
|
||||
base_list.append(dst)
|
||||
index[key] = dst
|
||||
else:
|
||||
_deep_merge(existing, src)
|
||||
if category_name:
|
||||
existing.setdefault("category_files", [])
|
||||
if category_name not in existing["category_files"]:
|
||||
existing["category_files"].append(category_name)
|
||||
|
||||
return base_list
|
||||
|
||||
|
||||
def _load_yaml_file(path: Path) -> Dict[str, Any]:
|
||||
"""
|
||||
Load a single YAML file as dict. Non-dicts yield {}.
|
||||
"""
|
||||
if not path.is_file():
|
||||
return {}
|
||||
with path.open("r", encoding="utf-8") as f:
|
||||
data = yaml.safe_load(f) or {}
|
||||
if not isinstance(data, dict):
|
||||
return {}
|
||||
return data
|
||||
|
||||
|
||||
def _load_layer_dir(
|
||||
config_dir: Path,
|
||||
skip_filename: str | None = None,
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Load all *.yml/*.yaml from a directory as layered defaults.
|
||||
|
||||
- skip_filename: Dateiname (z.B. "config.yaml"), der ignoriert
|
||||
werden soll (z.B. User-Config).
|
||||
|
||||
Rückgabe:
|
||||
{
|
||||
"directories": {...},
|
||||
"repositories": [...],
|
||||
}
|
||||
"""
|
||||
defaults: Dict[str, Any] = {"directories": {}, "repositories": []}
|
||||
|
||||
if not config_dir.is_dir():
|
||||
return defaults
|
||||
|
||||
yaml_files = [
|
||||
p
|
||||
for p in config_dir.iterdir()
|
||||
if p.is_file()
|
||||
and p.suffix.lower() in (".yml", ".yaml")
|
||||
and (skip_filename is None or p.name != skip_filename)
|
||||
]
|
||||
if not yaml_files:
|
||||
return defaults
|
||||
|
||||
yaml_files.sort(key=lambda p: p.name)
|
||||
|
||||
for path in yaml_files:
|
||||
data = _load_yaml_file(path)
|
||||
category_name = path.stem # Dateiname ohne .yml/.yaml
|
||||
|
||||
dirs = data.get("directories")
|
||||
if isinstance(dirs, dict):
|
||||
defaults.setdefault("directories", {})
|
||||
_deep_merge(defaults["directories"], dirs)
|
||||
|
||||
repos = data.get("repositories")
|
||||
if isinstance(repos, list):
|
||||
defaults.setdefault("repositories", [])
|
||||
_merge_repo_lists(
|
||||
defaults["repositories"],
|
||||
repos,
|
||||
category_name=category_name,
|
||||
)
|
||||
|
||||
return defaults
|
||||
|
||||
|
||||
def _load_defaults_from_package_or_project() -> Dict[str, Any]:
|
||||
"""
|
||||
Fallback: load default configs from various possible install or development
|
||||
layouts (pip-installed, editable install, source repo with src/ layout).
|
||||
"""
|
||||
try:
|
||||
import pkgmgr # type: ignore
|
||||
except Exception:
|
||||
return {"directories": {}, "repositories": []}
|
||||
|
||||
pkg_root = Path(pkgmgr.__file__).resolve().parent
|
||||
roots = set()
|
||||
|
||||
# Case 1: installed package (site-packages/pkgmgr)
|
||||
roots.add(pkg_root)
|
||||
|
||||
# Case 2: parent directory (site-packages/, src/)
|
||||
roots.add(pkg_root.parent)
|
||||
|
||||
# Case 3: src-layout during development:
|
||||
# repo_root/src/pkgmgr -> repo_root
|
||||
parent = pkg_root.parent
|
||||
if parent.name == "src":
|
||||
roots.add(parent.parent)
|
||||
|
||||
# Candidate config dirs
|
||||
candidates = []
|
||||
for root in roots:
|
||||
candidates.append(root / "config_defaults")
|
||||
candidates.append(root / "config")
|
||||
|
||||
for cand in candidates:
|
||||
defaults = _load_layer_dir(cand, skip_filename=None)
|
||||
if defaults["directories"] or defaults["repositories"]:
|
||||
return defaults
|
||||
|
||||
return {"directories": {}, "repositories": []}
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Hauptfunktion
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def load_config(user_config_path: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Load and merge configuration for pkgmgr.
|
||||
|
||||
Schritte:
|
||||
1. Ermittle ~/.config/pkgmgr/ (oder das Verzeichnis von user_config_path).
|
||||
2. Lade alle *.yml/*.yaml dort (außer der User-Config selbst) als
|
||||
Defaults / Kategorie-Layer.
|
||||
3. Wenn dort nichts gefunden wurde, Fallback auf Paket/Projekt.
|
||||
4. Lade die User-Config-Datei selbst (falls vorhanden).
|
||||
5. Merge:
|
||||
- directories: deep-merge (Defaults <- User)
|
||||
- repositories: _merge_repo_lists (Defaults <- User)
|
||||
"""
|
||||
user_config_path_expanded = os.path.expanduser(user_config_path)
|
||||
user_cfg_path = Path(user_config_path_expanded)
|
||||
|
||||
config_dir = user_cfg_path.parent
|
||||
if not str(config_dir):
|
||||
# Fallback, falls jemand nur "config.yaml" übergibt
|
||||
config_dir = Path(os.path.expanduser("~/.config/pkgmgr"))
|
||||
config_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
user_cfg_name = user_cfg_path.name
|
||||
|
||||
# 1+2) Defaults / Kategorie-Layer aus dem User-Verzeichnis
|
||||
defaults = _load_layer_dir(config_dir, skip_filename=user_cfg_name)
|
||||
|
||||
# 3) Falls dort nichts gefunden wurde, Fallback auf Paket/Projekt
|
||||
if not defaults["directories"] and not defaults["repositories"]:
|
||||
defaults = _load_defaults_from_package_or_project()
|
||||
|
||||
defaults.setdefault("directories", {})
|
||||
defaults.setdefault("repositories", [])
|
||||
|
||||
# 4) User-Config
|
||||
user_cfg: Dict[str, Any] = {}
|
||||
if user_cfg_path.is_file():
|
||||
user_cfg = _load_yaml_file(user_cfg_path)
|
||||
user_cfg.setdefault("directories", {})
|
||||
user_cfg.setdefault("repositories", [])
|
||||
|
||||
# 5) Merge: directories deep-merge, repositories listen-merge
|
||||
merged: Dict[str, Any] = {}
|
||||
|
||||
# directories
|
||||
merged["directories"] = {}
|
||||
_deep_merge(merged["directories"], defaults["directories"])
|
||||
_deep_merge(merged["directories"], user_cfg["directories"])
|
||||
|
||||
# repositories
|
||||
merged["repositories"] = []
|
||||
_merge_repo_lists(merged["repositories"], defaults["repositories"], category_name=None)
|
||||
_merge_repo_lists(merged["repositories"], user_cfg["repositories"], category_name=None)
|
||||
|
||||
# andere Top-Level-Keys (falls vorhanden)
|
||||
other_keys = (set(defaults.keys()) | set(user_cfg.keys())) - {
|
||||
"directories",
|
||||
"repositories",
|
||||
}
|
||||
for key in other_keys:
|
||||
base_val = defaults.get(key)
|
||||
override_val = user_cfg.get(key)
|
||||
if isinstance(base_val, dict) and isinstance(override_val, dict):
|
||||
merged[key] = _deep_merge(dict(base_val), override_val)
|
||||
elif override_val is not None:
|
||||
merged[key] = override_val
|
||||
else:
|
||||
merged[key] = base_val
|
||||
|
||||
return merged
|
||||
9
src/pkgmgr/core/config/save.py
Normal file
9
src/pkgmgr/core/config/save.py
Normal file
@@ -0,0 +1,9 @@
|
||||
import yaml
|
||||
import os
|
||||
|
||||
def save_user_config(user_config,USER_CONFIG_PATH:str):
|
||||
"""Save the user configuration to USER_CONFIG_PATH."""
|
||||
os.makedirs(os.path.dirname(USER_CONFIG_PATH), exist_ok=True)
|
||||
with open(USER_CONFIG_PATH, 'w') as f:
|
||||
yaml.dump(user_config, f)
|
||||
print(f"User configuration updated in {USER_CONFIG_PATH}.")
|
||||
92
src/pkgmgr/core/git/__init__.py
Normal file
92
src/pkgmgr/core/git/__init__.py
Normal file
@@ -0,0 +1,92 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Lightweight helper functions around Git commands.
|
||||
|
||||
These helpers are intentionally small wrappers so that higher-level
|
||||
logic (release, version, changelog) does not have to deal with the
|
||||
details of subprocess handling.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import subprocess
|
||||
from typing import List, Optional
|
||||
|
||||
|
||||
class GitError(RuntimeError):
|
||||
"""Raised when a Git command fails in an unexpected way."""
|
||||
|
||||
|
||||
def run_git(args: List[str], cwd: str = ".") -> str:
|
||||
"""
|
||||
Run a Git command and return its stdout as a stripped string.
|
||||
|
||||
Raises GitError if the command fails.
|
||||
"""
|
||||
cmd = ["git"] + args
|
||||
try:
|
||||
result = subprocess.run(
|
||||
cmd,
|
||||
cwd=cwd,
|
||||
check=True,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
text=True,
|
||||
)
|
||||
except subprocess.CalledProcessError as exc:
|
||||
raise GitError(
|
||||
f"Git command failed in {cwd!r}: {' '.join(cmd)}\n"
|
||||
f"Exit code: {exc.returncode}\n"
|
||||
f"STDOUT:\n{exc.stdout}\n"
|
||||
f"STDERR:\n{exc.stderr}"
|
||||
) from exc
|
||||
|
||||
return result.stdout.strip()
|
||||
|
||||
|
||||
def get_tags(cwd: str = ".") -> List[str]:
|
||||
"""
|
||||
Return a list of all tags in the repository in `cwd`.
|
||||
|
||||
If there are no tags, an empty list is returned.
|
||||
"""
|
||||
try:
|
||||
output = run_git(["tag"], cwd=cwd)
|
||||
except GitError as exc:
|
||||
# If the repo has no tags or is not a git repo, surface a clear error.
|
||||
# You can decide later if you want to treat this differently.
|
||||
if "not a git repository" in str(exc):
|
||||
raise
|
||||
# No tags: stdout may just be empty; treat this as empty list.
|
||||
return []
|
||||
|
||||
if not output:
|
||||
return []
|
||||
|
||||
return [line.strip() for line in output.splitlines() if line.strip()]
|
||||
|
||||
|
||||
def get_head_commit(cwd: str = ".") -> Optional[str]:
|
||||
"""
|
||||
Return the current HEAD commit hash, or None if it cannot be determined.
|
||||
"""
|
||||
try:
|
||||
output = run_git(["rev-parse", "HEAD"], cwd=cwd)
|
||||
except GitError:
|
||||
return None
|
||||
return output or None
|
||||
|
||||
|
||||
def get_current_branch(cwd: str = ".") -> Optional[str]:
|
||||
"""
|
||||
Return the current branch name, or None if it cannot be determined.
|
||||
|
||||
Note: In detached HEAD state this will return 'HEAD'.
|
||||
"""
|
||||
try:
|
||||
output = run_git(["rev-parse", "--abbrev-ref", "HEAD"], cwd=cwd)
|
||||
except GitError:
|
||||
return None
|
||||
return output or None
|
||||
0
src/pkgmgr/core/repository/__init__.py
Normal file
0
src/pkgmgr/core/repository/__init__.py
Normal file
15
src/pkgmgr/core/repository/dir.py
Normal file
15
src/pkgmgr/core/repository/dir.py
Normal file
@@ -0,0 +1,15 @@
|
||||
import sys
|
||||
import os
|
||||
|
||||
def get_repo_dir(repositories_base_dir:str,repo:{})->str:
|
||||
try:
|
||||
return os.path.join(repositories_base_dir, repo.get("provider"), repo.get("account"), repo.get("repository"))
|
||||
except TypeError as e:
|
||||
if repositories_base_dir:
|
||||
print(f"Error: {e} \nThe repository {repo} seems not correct configured.\nPlease configure it correct.")
|
||||
for key in ["provider","account","repository"]:
|
||||
if not repo.get(key,False):
|
||||
print(f"Key '{key}' is missing.")
|
||||
else:
|
||||
print(f"Error: {e} \nThe base {base} seems not correct configured.\nPlease configure it correct.")
|
||||
sys.exit(3)
|
||||
12
src/pkgmgr/core/repository/identifier.py
Normal file
12
src/pkgmgr/core/repository/identifier.py
Normal file
@@ -0,0 +1,12 @@
|
||||
def get_repo_identifier(repo, all_repos):
|
||||
"""
|
||||
Return a unique identifier for the repository.
|
||||
If the repository name is unique among all_repos, return repository name;
|
||||
otherwise, return 'provider/account/repository'.
|
||||
"""
|
||||
repo_name = repo.get("repository")
|
||||
count = sum(1 for r in all_repos if r.get("repository") == repo_name)
|
||||
if count == 1:
|
||||
return repo_name
|
||||
else:
|
||||
return f'{repo.get("provider")}/{repo.get("account")}/{repo.get("repository")}'
|
||||
3
src/pkgmgr/core/repository/ignored.py
Normal file
3
src/pkgmgr/core/repository/ignored.py
Normal file
@@ -0,0 +1,3 @@
|
||||
def filter_ignored(repos):
|
||||
"""Filter out repositories that have 'ignore' set to True."""
|
||||
return [r for r in repos if not r.get("ignore", False)]
|
||||
28
src/pkgmgr/core/repository/resolve.py
Normal file
28
src/pkgmgr/core/repository/resolve.py
Normal file
@@ -0,0 +1,28 @@
|
||||
import os
|
||||
|
||||
def resolve_repos(identifiers:[], all_repos:[]):
|
||||
"""
|
||||
Given a list of identifier strings, return a list of repository configs.
|
||||
The identifier can be:
|
||||
- the full identifier "provider/account/repository"
|
||||
- the repository name (if unique among all_repos)
|
||||
- the alias (if defined)
|
||||
"""
|
||||
selected = []
|
||||
for ident in identifiers:
|
||||
matches = []
|
||||
for repo in all_repos:
|
||||
full_id = f'{repo.get("provider")}/{repo.get("account")}/{repo.get("repository")}'
|
||||
if ident == full_id:
|
||||
matches.append(repo)
|
||||
elif ident == repo.get("alias"):
|
||||
matches.append(repo)
|
||||
elif ident == repo.get("repository"):
|
||||
# Only match if repository name is unique among all_repos.
|
||||
if sum(1 for r in all_repos if r.get("repository") == ident) == 1:
|
||||
matches.append(repo)
|
||||
if not matches:
|
||||
print(f"Identifier '{ident}' did not match any repository in config.")
|
||||
else:
|
||||
selected.extend(matches)
|
||||
return selected
|
||||
200
src/pkgmgr/core/repository/selected.py
Normal file
200
src/pkgmgr/core/repository/selected.py
Normal file
@@ -0,0 +1,200 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import re
|
||||
from typing import Any, Dict, List, Sequence
|
||||
|
||||
from pkgmgr.core.repository.resolve import resolve_repos
|
||||
from pkgmgr.core.repository.ignored import filter_ignored
|
||||
|
||||
Repository = Dict[str, Any]
|
||||
|
||||
|
||||
def _compile_maybe_regex(pattern: str):
|
||||
"""
|
||||
If pattern is of the form /.../, return a compiled regex (case-insensitive).
|
||||
Otherwise return None.
|
||||
"""
|
||||
if len(pattern) >= 2 and pattern.startswith("/") and pattern.endswith("/"):
|
||||
try:
|
||||
return re.compile(pattern[1:-1], re.IGNORECASE)
|
||||
except re.error:
|
||||
return None
|
||||
return None
|
||||
|
||||
|
||||
def _match_pattern(value: str, pattern: str) -> bool:
|
||||
"""
|
||||
Match a value against a pattern that may be a substring or /regex/.
|
||||
"""
|
||||
if not pattern:
|
||||
return True
|
||||
regex = _compile_maybe_regex(pattern)
|
||||
if regex:
|
||||
return bool(regex.search(value))
|
||||
return pattern.lower() in value.lower()
|
||||
|
||||
|
||||
def _match_any(values: Sequence[str], pattern: str) -> bool:
|
||||
"""
|
||||
Return True if any of the values matches the pattern.
|
||||
"""
|
||||
for v in values:
|
||||
if _match_pattern(v, pattern):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _build_identifier_string(repo: Repository) -> str:
|
||||
"""
|
||||
Build a combined identifier string for string-based filtering.
|
||||
"""
|
||||
provider = str(repo.get("provider", ""))
|
||||
account = str(repo.get("account", ""))
|
||||
repository = str(repo.get("repository", ""))
|
||||
alias = str(repo.get("alias", ""))
|
||||
description = str(repo.get("description", ""))
|
||||
directory = str(repo.get("directory", ""))
|
||||
|
||||
parts = [
|
||||
provider,
|
||||
account,
|
||||
repository,
|
||||
alias,
|
||||
f"{provider}/{account}/{repository}",
|
||||
description,
|
||||
directory,
|
||||
]
|
||||
return " ".join(p for p in parts if p)
|
||||
|
||||
|
||||
def _apply_filters(
|
||||
repos: List[Repository],
|
||||
string_pattern: str,
|
||||
category_patterns: List[str],
|
||||
tag_patterns: List[str],
|
||||
) -> List[Repository]:
|
||||
if not string_pattern and not category_patterns and not tag_patterns:
|
||||
return repos
|
||||
|
||||
filtered: List[Repository] = []
|
||||
|
||||
for repo in repos:
|
||||
# String filter
|
||||
if string_pattern:
|
||||
ident_str = _build_identifier_string(repo)
|
||||
if not _match_pattern(ident_str, string_pattern):
|
||||
continue
|
||||
|
||||
# Category filter: only real categories, NOT tags
|
||||
if category_patterns:
|
||||
cats: List[str] = []
|
||||
cats.extend(map(str, repo.get("category_files", [])))
|
||||
if "category" in repo:
|
||||
cats.append(str(repo["category"]))
|
||||
|
||||
if not cats:
|
||||
continue
|
||||
|
||||
ok = True
|
||||
for pat in category_patterns:
|
||||
if not _match_any(cats, pat):
|
||||
ok = False
|
||||
break
|
||||
if not ok:
|
||||
continue
|
||||
|
||||
# Tag filter: YAML tags only
|
||||
if tag_patterns:
|
||||
tags: List[str] = list(map(str, repo.get("tags", [])))
|
||||
if not tags:
|
||||
continue
|
||||
|
||||
ok = True
|
||||
for pat in tag_patterns:
|
||||
if not _match_any(tags, pat):
|
||||
ok = False
|
||||
break
|
||||
if not ok:
|
||||
continue
|
||||
|
||||
filtered.append(repo)
|
||||
|
||||
return filtered
|
||||
|
||||
|
||||
def _maybe_filter_ignored(args, repos: List[Repository]) -> List[Repository]:
|
||||
"""
|
||||
Apply ignore filtering unless the caller explicitly opted to include ignored
|
||||
repositories (via args.include_ignored).
|
||||
|
||||
Note: this helper is used only for *implicit* selections (all / filters /
|
||||
by-directory). For *explicit* identifiers we do NOT filter ignored repos,
|
||||
so the user can still target them directly if desired.
|
||||
"""
|
||||
include_ignored: bool = bool(getattr(args, "include_ignored", False))
|
||||
if include_ignored:
|
||||
return repos
|
||||
return filter_ignored(repos)
|
||||
|
||||
|
||||
def get_selected_repos(args, all_repositories: List[Repository]) -> List[Repository]:
|
||||
"""
|
||||
Compute the list of repositories selected by CLI arguments.
|
||||
|
||||
Modes:
|
||||
- If identifiers are given: select via resolve_repos() from all_repositories.
|
||||
Ignored repositories are *not* filtered here, so explicit identifiers
|
||||
always win.
|
||||
- Else if any of --category/--string/--tag is used: start from
|
||||
all_repositories, apply filters and then drop ignored repos.
|
||||
- Else if --all is set: select all_repositories and then drop ignored repos.
|
||||
- Else: try to select the repository of the current working directory
|
||||
and then drop it if it is ignored.
|
||||
|
||||
The ignore filter can be bypassed by setting args.include_ignored = True
|
||||
(e.g. via a CLI flag --include-ignored).
|
||||
"""
|
||||
identifiers: List[str] = getattr(args, "identifiers", []) or []
|
||||
use_all: bool = bool(getattr(args, "all", False))
|
||||
category_patterns: List[str] = getattr(args, "category", []) or []
|
||||
string_pattern: str = getattr(args, "string", "") or ""
|
||||
tag_patterns: List[str] = getattr(args, "tag", []) or []
|
||||
|
||||
has_filters = bool(category_patterns or string_pattern or tag_patterns)
|
||||
|
||||
# 1) Explicit identifiers win and bypass ignore filtering
|
||||
if identifiers:
|
||||
base = resolve_repos(identifiers, all_repositories)
|
||||
return _apply_filters(base, string_pattern, category_patterns, tag_patterns)
|
||||
|
||||
# 2) Filter-only mode: start from all repositories
|
||||
if has_filters:
|
||||
base = _apply_filters(
|
||||
list(all_repositories),
|
||||
string_pattern,
|
||||
category_patterns,
|
||||
tag_patterns,
|
||||
)
|
||||
return _maybe_filter_ignored(args, base)
|
||||
|
||||
# 3) --all (no filters): all repos
|
||||
if use_all:
|
||||
base = list(all_repositories)
|
||||
return _maybe_filter_ignored(args, base)
|
||||
|
||||
# 4) Fallback: try to select repository of current working directory
|
||||
cwd = os.path.abspath(os.getcwd())
|
||||
by_dir = [
|
||||
repo
|
||||
for repo in all_repositories
|
||||
if os.path.abspath(str(repo.get("directory", ""))) == cwd
|
||||
]
|
||||
if by_dir:
|
||||
return _maybe_filter_ignored(args, by_dir)
|
||||
|
||||
# No specific match -> empty list
|
||||
return []
|
||||
107
src/pkgmgr/core/repository/verify.py
Normal file
107
src/pkgmgr/core/repository/verify.py
Normal file
@@ -0,0 +1,107 @@
|
||||
import subprocess
|
||||
|
||||
def verify_repository(repo, repo_dir, mode="local", no_verification=False):
|
||||
"""
|
||||
Verifies the repository based on its 'verified' field.
|
||||
|
||||
The 'verified' field can be a dictionary with the following keys:
|
||||
commit: The expected commit hash.
|
||||
gpg_keys: A list of valid GPG key IDs (at least one must match the signing key).
|
||||
|
||||
If mode == "pull", the remote HEAD commit is checked via "git ls-remote origin HEAD".
|
||||
Otherwise (mode "local", used for install and clone), the local HEAD commit is checked via "git rev-parse HEAD".
|
||||
|
||||
Returns a tuple:
|
||||
(verified_ok, error_details, commit_hash, signing_key)
|
||||
- verified_ok: True if the verification passed (or no verification info is set), False otherwise.
|
||||
- error_details: A list of error messages for any failed checks.
|
||||
- commit_hash: The obtained commit hash.
|
||||
- signing_key: The GPG key ID that signed the latest commit (obtained via "git log -1 --format=%GK").
|
||||
"""
|
||||
verified_info = repo.get("verified")
|
||||
if not verified_info:
|
||||
# Nothing to verify.
|
||||
commit_hash = ""
|
||||
signing_key = ""
|
||||
if mode == "pull":
|
||||
try:
|
||||
result = subprocess.run("git ls-remote origin HEAD", cwd=repo_dir, shell=True, check=True,
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
|
||||
commit_hash = result.stdout.split()[0].strip()
|
||||
except Exception:
|
||||
commit_hash = ""
|
||||
else:
|
||||
try:
|
||||
result = subprocess.run("git rev-parse HEAD", cwd=repo_dir, shell=True, check=True,
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
|
||||
commit_hash = result.stdout.strip()
|
||||
except Exception:
|
||||
commit_hash = ""
|
||||
try:
|
||||
result = subprocess.run(["git", "log", "-1", "--format=%GK"], cwd=repo_dir, shell=False, check=True,
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
|
||||
signing_key = result.stdout.strip()
|
||||
except Exception:
|
||||
signing_key = ""
|
||||
return True, [], commit_hash, signing_key
|
||||
|
||||
expected_commit = None
|
||||
expected_gpg_keys = None
|
||||
if isinstance(verified_info, dict):
|
||||
expected_commit = verified_info.get("commit")
|
||||
expected_gpg_keys = verified_info.get("gpg_keys")
|
||||
else:
|
||||
# If verified is a plain string, treat it as the expected commit.
|
||||
expected_commit = verified_info
|
||||
|
||||
error_details = []
|
||||
|
||||
# Get commit hash according to the mode.
|
||||
commit_hash = ""
|
||||
if mode == "pull":
|
||||
try:
|
||||
result = subprocess.run("git ls-remote origin HEAD", cwd=repo_dir, shell=True, check=True,
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
|
||||
commit_hash = result.stdout.split()[0].strip()
|
||||
except Exception as e:
|
||||
error_details.append(f"Error retrieving remote commit: {e}")
|
||||
else:
|
||||
try:
|
||||
result = subprocess.run("git rev-parse HEAD", cwd=repo_dir, shell=True, check=True,
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
|
||||
commit_hash = result.stdout.strip()
|
||||
except Exception as e:
|
||||
error_details.append(f"Error retrieving local commit: {e}")
|
||||
|
||||
# Get the signing key using "git log -1 --format=%GK"
|
||||
signing_key = ""
|
||||
try:
|
||||
result = subprocess.run(["git", "log", "-1", "--format=%GK"], cwd=repo_dir, shell=False, check=True,
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
|
||||
signing_key = result.stdout.strip()
|
||||
except Exception as e:
|
||||
error_details.append(f"Error retrieving signing key: {e}")
|
||||
|
||||
commit_check_passed = True
|
||||
gpg_check_passed = True
|
||||
|
||||
if expected_commit:
|
||||
if commit_hash != expected_commit:
|
||||
commit_check_passed = False
|
||||
error_details.append(f"Expected commit: {expected_commit}, found: {commit_hash}")
|
||||
|
||||
if expected_gpg_keys:
|
||||
if signing_key not in expected_gpg_keys:
|
||||
gpg_check_passed = False
|
||||
error_details.append(f"Expected one of GPG keys: {expected_gpg_keys}, found: {signing_key}")
|
||||
|
||||
if expected_commit and expected_gpg_keys:
|
||||
verified_ok = commit_check_passed and gpg_check_passed
|
||||
elif expected_commit:
|
||||
verified_ok = commit_check_passed
|
||||
elif expected_gpg_keys:
|
||||
verified_ok = gpg_check_passed
|
||||
else:
|
||||
verified_ok = True
|
||||
|
||||
return verified_ok, error_details, commit_hash, signing_key
|
||||
0
src/pkgmgr/core/version/__init__.py
Normal file
0
src/pkgmgr/core/version/__init__.py
Normal file
146
src/pkgmgr/core/version/semver.py
Normal file
146
src/pkgmgr/core/version/semver.py
Normal file
@@ -0,0 +1,146 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Utilities for working with semantic versions (SemVer).
|
||||
|
||||
This module is intentionally small and self-contained so it can be
|
||||
used by release/version/changelog commands without pulling in any
|
||||
heavy dependencies.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Iterable, List, Optional, Tuple
|
||||
|
||||
|
||||
@dataclass(frozen=True, order=True)
|
||||
class SemVer:
|
||||
"""Simple semantic version representation (MAJOR.MINOR.PATCH)."""
|
||||
|
||||
major: int
|
||||
minor: int
|
||||
patch: int
|
||||
|
||||
@classmethod
|
||||
def parse(cls, value: str) -> "SemVer":
|
||||
"""
|
||||
Parse a version string like '1.2.3' or 'v1.2.3' into a SemVer.
|
||||
|
||||
Raises ValueError if the format is invalid.
|
||||
"""
|
||||
text = value.strip()
|
||||
if text.startswith("v"):
|
||||
text = text[1:]
|
||||
|
||||
parts = text.split(".")
|
||||
if len(parts) != 3:
|
||||
raise ValueError(f"Not a valid semantic version: {value!r}")
|
||||
|
||||
try:
|
||||
major = int(parts[0])
|
||||
minor = int(parts[1])
|
||||
patch = int(parts[2])
|
||||
except ValueError as exc:
|
||||
raise ValueError(f"Semantic version components must be integers: {value!r}") from exc
|
||||
|
||||
if major < 0 or minor < 0 or patch < 0:
|
||||
raise ValueError(f"Semantic version components must be non-negative: {value!r}")
|
||||
|
||||
return cls(major=major, minor=minor, patch=patch)
|
||||
|
||||
def to_tag(self, with_prefix: bool = True) -> str:
|
||||
"""
|
||||
Convert the version into a tag string: 'v1.2.3' (default) or '1.2.3'.
|
||||
"""
|
||||
core = f"{self.major}.{self.minor}.{self.patch}"
|
||||
return f"v{core}" if with_prefix else core
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.to_tag(with_prefix=False)
|
||||
|
||||
|
||||
def is_semver_tag(tag: str) -> bool:
|
||||
"""
|
||||
Return True if the given tag string looks like a SemVer tag.
|
||||
|
||||
Accepts both '1.2.3' and 'v1.2.3' formats.
|
||||
"""
|
||||
try:
|
||||
SemVer.parse(tag)
|
||||
return True
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
|
||||
def extract_semver_from_tags(
|
||||
tags: Iterable[str],
|
||||
major: Optional[int] = None,
|
||||
minor: Optional[int] = None,
|
||||
) -> List[Tuple[str, SemVer]]:
|
||||
"""
|
||||
Filter and parse tags that match SemVer, optionally restricted
|
||||
to a specific MAJOR or MAJOR.MINOR line.
|
||||
|
||||
Returns a list of (tag_string, SemVer) pairs.
|
||||
"""
|
||||
result: List[Tuple[str, SemVer]] = []
|
||||
for tag in tags:
|
||||
try:
|
||||
ver = SemVer.parse(tag)
|
||||
except ValueError:
|
||||
# Ignore non-SemVer tags
|
||||
continue
|
||||
|
||||
if major is not None and ver.major != major:
|
||||
continue
|
||||
if minor is not None and ver.minor != minor:
|
||||
continue
|
||||
|
||||
result.append((tag, ver))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def find_latest_version(
|
||||
tags: Iterable[str],
|
||||
major: Optional[int] = None,
|
||||
minor: Optional[int] = None,
|
||||
) -> Optional[Tuple[str, SemVer]]:
|
||||
"""
|
||||
Find the latest SemVer tag from the given tags.
|
||||
|
||||
If `major` is given, only consider that MAJOR line.
|
||||
If `minor` is given as well, only consider that MAJOR.MINOR line.
|
||||
|
||||
Returns a tuple (tag_string, SemVer) or None if no SemVer tag matches.
|
||||
"""
|
||||
candidates = extract_semver_from_tags(tags, major=major, minor=minor)
|
||||
if not candidates:
|
||||
return None
|
||||
|
||||
# SemVer is orderable thanks to dataclass(order=True)
|
||||
tag, ver = max(candidates, key=lambda item: item[1])
|
||||
return tag, ver
|
||||
|
||||
|
||||
def bump_major(version: SemVer) -> SemVer:
|
||||
"""
|
||||
Bump MAJOR: MAJOR+1.0.0
|
||||
"""
|
||||
return SemVer(major=version.major + 1, minor=0, patch=0)
|
||||
|
||||
|
||||
def bump_minor(version: SemVer) -> SemVer:
|
||||
"""
|
||||
Bump MINOR: MAJOR.MINOR+1.0
|
||||
"""
|
||||
return SemVer(major=version.major, minor=version.minor + 1, patch=0)
|
||||
|
||||
|
||||
def bump_patch(version: SemVer) -> SemVer:
|
||||
"""
|
||||
Bump PATCH: MAJOR.MINOR.PATCH+1
|
||||
"""
|
||||
return SemVer(major=version.major, minor=version.minor, patch=version.patch + 1)
|
||||
235
src/pkgmgr/core/version/source.py
Normal file
235
src/pkgmgr/core/version/source.py
Normal file
@@ -0,0 +1,235 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Helpers to extract version information from various packaging files.
|
||||
|
||||
All functions take a repository directory and return either a version
|
||||
string or None if the corresponding file or version field is missing.
|
||||
|
||||
Supported sources:
|
||||
- pyproject.toml (PEP 621, [project].version)
|
||||
- flake.nix (version = "X.Y.Z";)
|
||||
- PKGBUILD (pkgver / pkgrel)
|
||||
- debian/changelog (first entry line: package (version) ...)
|
||||
- RPM spec file (package-manager.spec: Version / Release)
|
||||
- Ansible Galaxy (galaxy.yml or meta/main.yml)
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import re
|
||||
from typing import Optional
|
||||
|
||||
import yaml
|
||||
|
||||
|
||||
def read_pyproject_version(repo_dir: str) -> Optional[str]:
|
||||
"""
|
||||
Read the version from pyproject.toml in repo_dir, if present.
|
||||
|
||||
Expects a PEP 621-style [project] table with a 'version' field.
|
||||
Returns the version string or None.
|
||||
"""
|
||||
path = os.path.join(repo_dir, "pyproject.toml")
|
||||
if not os.path.exists(path):
|
||||
return None
|
||||
|
||||
try:
|
||||
try:
|
||||
import tomllib # Python 3.11+
|
||||
except ModuleNotFoundError: # pragma: no cover
|
||||
tomllib = None
|
||||
|
||||
if tomllib is None:
|
||||
return None
|
||||
|
||||
with open(path, "rb") as f:
|
||||
data = tomllib.load(f)
|
||||
|
||||
project = data.get("project", {})
|
||||
if isinstance(project, dict):
|
||||
version = project.get("version")
|
||||
if isinstance(version, str):
|
||||
return version.strip() or None
|
||||
except Exception:
|
||||
# Intentionally swallow errors and fall back to None.
|
||||
return None
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def read_flake_version(repo_dir: str) -> Optional[str]:
|
||||
"""
|
||||
Read the version from flake.nix in repo_dir, if present.
|
||||
|
||||
Looks for a line like:
|
||||
version = "1.2.3";
|
||||
and returns the string inside the quotes.
|
||||
"""
|
||||
path = os.path.join(repo_dir, "flake.nix")
|
||||
if not os.path.exists(path):
|
||||
return None
|
||||
|
||||
try:
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
text = f.read()
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
match = re.search(r'version\s*=\s*"([^"]+)"', text)
|
||||
if not match:
|
||||
return None
|
||||
version = match.group(1).strip()
|
||||
return version or None
|
||||
|
||||
|
||||
def read_pkgbuild_version(repo_dir: str) -> Optional[str]:
|
||||
"""
|
||||
Read the version from PKGBUILD in repo_dir, if present.
|
||||
|
||||
Expects:
|
||||
pkgver=1.2.3
|
||||
pkgrel=1
|
||||
|
||||
Returns either "1.2.3-1" (if both are present) or just "1.2.3".
|
||||
"""
|
||||
path = os.path.join(repo_dir, "PKGBUILD")
|
||||
if not os.path.exists(path):
|
||||
return None
|
||||
|
||||
try:
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
text = f.read()
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
ver_match = re.search(r"^pkgver\s*=\s*(.+)$", text, re.MULTILINE)
|
||||
if not ver_match:
|
||||
return None
|
||||
pkgver = ver_match.group(1).strip()
|
||||
|
||||
rel_match = re.search(r"^pkgrel\s*=\s*(.+)$", text, re.MULTILINE)
|
||||
if rel_match:
|
||||
pkgrel = rel_match.group(1).strip()
|
||||
if pkgrel:
|
||||
return f"{pkgver}-{pkgrel}"
|
||||
|
||||
return pkgver or None
|
||||
|
||||
|
||||
def read_debian_changelog_version(repo_dir: str) -> Optional[str]:
|
||||
"""
|
||||
Read the latest Debian version from debian/changelog in repo_dir, if present.
|
||||
|
||||
The first non-empty line typically looks like:
|
||||
package-name (1.2.3-1) unstable; urgency=medium
|
||||
|
||||
We extract the text inside the first parentheses.
|
||||
"""
|
||||
path = os.path.join(repo_dir, "debian", "changelog")
|
||||
if not os.path.exists(path):
|
||||
return None
|
||||
|
||||
try:
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
match = re.search(r"\(([^)]+)\)", line)
|
||||
if match:
|
||||
version = match.group(1).strip()
|
||||
return version or None
|
||||
break
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def read_spec_version(repo_dir: str) -> Optional[str]:
|
||||
"""
|
||||
Read the version from a RPM spec file.
|
||||
|
||||
For now, we assume a fixed file name 'package-manager.spec'
|
||||
in repo_dir with lines like:
|
||||
|
||||
Version: 1.2.3
|
||||
Release: 1%{?dist}
|
||||
|
||||
Returns either "1.2.3-1" (if Release is present) or "1.2.3".
|
||||
Any RPM macro suffix like '%{?dist}' is stripped from the release.
|
||||
"""
|
||||
path = os.path.join(repo_dir, "package-manager.spec")
|
||||
if not os.path.exists(path):
|
||||
return None
|
||||
|
||||
try:
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
text = f.read()
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
ver_match = re.search(r"^Version:\s*(.+)$", text, re.MULTILINE)
|
||||
if not ver_match:
|
||||
return None
|
||||
version = ver_match.group(1).strip()
|
||||
|
||||
rel_match = re.search(r"^Release:\s*(.+)$", text, re.MULTILINE)
|
||||
if rel_match:
|
||||
release_raw = rel_match.group(1).strip()
|
||||
# Strip common RPM macro suffix like %... (e.g. 1%{?dist})
|
||||
release = release_raw.split("%", 1)[0].strip()
|
||||
# Also strip anything after first whitespace, just in case
|
||||
release = release.split(" ", 1)[0].strip()
|
||||
if release:
|
||||
return f"{version}-{release}"
|
||||
|
||||
return version or None
|
||||
|
||||
|
||||
def read_ansible_galaxy_version(repo_dir: str) -> Optional[str]:
|
||||
"""
|
||||
Read the version from Ansible Galaxy metadata, if present.
|
||||
|
||||
Supported locations:
|
||||
- galaxy.yml (preferred for modern roles/collections)
|
||||
- meta/main.yml (legacy style roles; uses galaxy_info.version or version)
|
||||
"""
|
||||
# 1) galaxy.yml in repo root
|
||||
galaxy_path = os.path.join(repo_dir, "galaxy.yml")
|
||||
if os.path.exists(galaxy_path):
|
||||
try:
|
||||
with open(galaxy_path, "r", encoding="utf-8") as f:
|
||||
data = yaml.safe_load(f) or {}
|
||||
version = data.get("version")
|
||||
if isinstance(version, str) and version.strip():
|
||||
return version.strip()
|
||||
except Exception:
|
||||
# Ignore parse errors and fall through to meta/main.yml
|
||||
pass
|
||||
|
||||
# 2) meta/main.yml (classic Ansible role)
|
||||
meta_path = os.path.join(repo_dir, "meta", "main.yml")
|
||||
if os.path.exists(meta_path):
|
||||
try:
|
||||
with open(meta_path, "r", encoding="utf-8") as f:
|
||||
data = yaml.safe_load(f) or {}
|
||||
|
||||
# Preferred: galaxy_info.version
|
||||
galaxy_info = data.get("galaxy_info") or {}
|
||||
if isinstance(galaxy_info, dict):
|
||||
version = galaxy_info.get("version")
|
||||
if isinstance(version, str) and version.strip():
|
||||
return version.strip()
|
||||
|
||||
# Fallback: top-level 'version'
|
||||
version = data.get("version")
|
||||
if isinstance(version, str) and version.strip():
|
||||
return version.strip()
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
return None
|
||||
Reference in New Issue
Block a user