Refactor pkgmgr CLI into modular core and add E2E tests for config/release/make/tools (see ChatGPT conversation 2025-12-08 https://chatgpt.com/share/6936ffa5-4868-800f-ab63-6e367093adce)

This commit is contained in:
Kevin Veen-Birkenbach
2025-12-08 17:41:27 +01:00
parent ccf3b1aa3c
commit 0b96270f78
18 changed files with 1612 additions and 766 deletions

View File

@@ -1,84 +1,19 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from __future__ import annotations
import os
import yaml
import argparse
import json
import sys
from typing import Optional
from pkgmgr.load_config import load_config
from pkgmgr.cli_core import CLIContext, create_parser, dispatch_command
# Define configuration file paths.
PROJECT_ROOT = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
USER_CONFIG_PATH = os.path.join(PROJECT_ROOT, "config", "config.yaml")
from pkgmgr.clone_repos import clone_repos
from pkgmgr.config_init import config_init
from pkgmgr.create_ink import create_ink
from pkgmgr.deinstall_repos import deinstall_repos
from pkgmgr.delete_repos import delete_repos
from pkgmgr.exec_proxy_command import exec_proxy_command
from pkgmgr.filter_ignored import filter_ignored
from pkgmgr.get_repo_identifier import get_repo_identifier
from pkgmgr.get_selected_repos import get_selected_repos
from pkgmgr.install_repos import install_repos
from pkgmgr.interactive_add import interactive_add
from pkgmgr.list_repositories import list_repositories
from pkgmgr.load_config import load_config
from pkgmgr.resolve_repos import resolve_repos
from pkgmgr.run_command import run_command
from pkgmgr.save_user_config import save_user_config
from pkgmgr.show_config import show_config
from pkgmgr.status_repos import status_repos
from pkgmgr.update_repos import update_repos
# Commands proxied by package-manager
PROXY_COMMANDS = {
"git": [
"pull",
"push",
"diff",
"add",
"show",
"checkout",
"clone",
"reset",
"revert",
"rebase",
"commit",
],
"docker": [
"start",
"stop",
"build",
],
"docker compose": [
"up",
"down",
"exec",
"ps",
"restart",
],
}
class SortedSubParsersAction(argparse._SubParsersAction):
def add_parser(self, name, **kwargs):
parser = super().add_parser(name, **kwargs)
# Sort the list of subparsers each time one is added
self._choices_actions.sort(key=lambda a: a.dest)
return parser
def main() -> None:
CONFIG_MERGED = load_config(USER_CONFIG_PATH)
REPOSITORIES_BASE_DIR = os.path.expanduser(
CONFIG_MERGED["directories"]["repositories"]
)
ALL_REPOSITORIES = CONFIG_MERGED["repositories"]
BINARIES_DIRECTORY = os.path.expanduser(
CONFIG_MERGED["directories"]["binaries"]
)
description_text = """\
DESCRIPTION_TEXT = """\
\033[1;32mPackage Manager 🤖📦\033[0m
\033[3mKevin's Package Manager is a multi-repository, multi-package, and multi-format
development tool crafted by and designed for:\033[0m
@@ -126,697 +61,36 @@ For detailed help on each command, use:
\033[1mpkgmgr <command> --help\033[0m
"""
parser = argparse.ArgumentParser(
description=description_text,
formatter_class=argparse.RawTextHelpFormatter,
def main() -> None:
# Load merged configuration
config_merged = load_config(USER_CONFIG_PATH)
repositories_base_dir = os.path.expanduser(
config_merged["directories"]["repositories"]
)
subparsers = parser.add_subparsers(
dest="command", help="Subcommands", action=SortedSubParsersAction
binaries_dir = os.path.expanduser(
config_merged["directories"]["binaries"]
)
all_repositories = config_merged["repositories"]
ctx = CLIContext(
config_merged=config_merged,
repositories_base_dir=repositories_base_dir,
all_repositories=all_repositories,
binaries_dir=binaries_dir,
user_config_path=USER_CONFIG_PATH,
)
def add_identifier_arguments(subparser):
subparser.add_argument(
"identifiers",
nargs="*",
help=(
"Identifier(s) for repositories. "
"Default: Repository of current folder."
),
)
subparser.add_argument(
"--all",
action="store_true",
default=False,
help=(
"Apply the subcommand to all repositories in the config. "
"Some subcommands ask for confirmation. If you want to give this "
"confirmation for all repositories, pipe 'yes'. E.g: "
"yes | pkgmgr {subcommand} --all"
),
)
subparser.add_argument(
"--preview",
action="store_true",
help="Preview changes without executing commands",
)
subparser.add_argument(
"--list",
action="store_true",
help="List affected repositories (with preview or status)",
)
subparser.add_argument(
"-a",
"--args",
nargs=argparse.REMAINDER,
dest="extra_args",
help="Additional parameters to be attached.",
default=[],
)
def add_install_update_arguments(subparser):
add_identifier_arguments(subparser)
subparser.add_argument(
"-q",
"--quiet",
action="store_true",
help="Suppress warnings and info messages",
)
subparser.add_argument(
"--no-verification",
action="store_true",
default=False,
help="Disable verification via commit/gpg",
)
subparser.add_argument(
"--dependencies",
action="store_true",
help="Also pull and update dependencies",
)
subparser.add_argument(
"--clone-mode",
choices=["ssh", "https", "shallow"],
default="ssh",
help=(
"Specify the clone mode: ssh, https, or shallow "
"(HTTPS shallow clone; default: ssh)"
),
)
install_parser = subparsers.add_parser(
"install",
help="Setup repository/repositories alias links to executables",
)
add_install_update_arguments(install_parser)
update_parser = subparsers.add_parser(
"update", help="Update (pull + install) repository/repositories"
)
add_install_update_arguments(update_parser)
update_parser.add_argument(
"--system",
action="store_true",
help="Include system update commands",
)
deinstall_parser = subparsers.add_parser(
"deinstall", help="Remove alias links to repository/repositories"
)
add_identifier_arguments(deinstall_parser)
delete_parser = subparsers.add_parser(
"delete",
help="Delete repository/repositories alias links to executables",
)
add_identifier_arguments(delete_parser)
create_parser = subparsers.add_parser(
"create",
help=(
"Create new repository entries: add them to the config if not "
"already present, initialize the local repository, and push "
"remotely if --remote is set."
),
)
add_identifier_arguments(create_parser)
create_parser.add_argument(
"--remote",
action="store_true",
help="If set, add the remote and push the initial commit.",
)
status_parser = subparsers.add_parser(
"status", help="Show status for repository/repositories or system"
)
add_identifier_arguments(status_parser)
status_parser.add_argument(
"--system",
action="store_true",
help="Show system status",
)
config_parser = subparsers.add_parser("config", help="Manage configuration")
config_subparsers = config_parser.add_subparsers(
dest="subcommand", help="Config subcommands", required=True
)
config_show = config_subparsers.add_parser(
"show", help="Show configuration"
)
add_identifier_arguments(config_show)
config_add = config_subparsers.add_parser(
"add", help="Interactively add a new repository entry"
)
config_edit = config_subparsers.add_parser(
"edit", help="Edit configuration file with nano"
)
config_init_parser = config_subparsers.add_parser(
"init",
help=(
"Initialize user configuration by scanning the base directory"
),
)
config_delete = config_subparsers.add_parser(
"delete", help="Delete repository entry from user config"
)
add_identifier_arguments(config_delete)
config_ignore = config_subparsers.add_parser(
"ignore",
help="Set ignore flag for repository entries in user config",
)
add_identifier_arguments(config_ignore)
config_ignore.add_argument(
"--set",
choices=["true", "false"],
required=True,
help="Set ignore to true or false",
)
path_parser = subparsers.add_parser(
"path", help="Print the path(s) of repository/repositories"
)
add_identifier_arguments(path_parser)
explore_parser = subparsers.add_parser(
"explore", help="Open repository in Nautilus file manager"
)
add_identifier_arguments(explore_parser)
terminal_parser = subparsers.add_parser(
"terminal", help="Open repository in a new GNOME Terminal tab"
)
add_identifier_arguments(terminal_parser)
release_parser = subparsers.add_parser(
"release",
help=(
"Create a release for repository/ies by incrementing version "
"and updating the changelog."
),
)
release_parser.add_argument(
"release_type",
choices=["major", "minor", "patch"],
help="Type of version increment for the release (major, minor, patch).",
)
release_parser.add_argument(
"-m",
"--message",
default="",
help="Optional release message to add to the changelog and tag.",
)
add_identifier_arguments(release_parser)
# Version command: like other repo commands, supports identifiers + --all
version_parser = subparsers.add_parser(
"version",
help=(
"Show version information for repository/ies "
"(git tags, pyproject.toml, flake.nix, PKGBUILD, debian, spec, Ansible Galaxy)."
),
)
add_identifier_arguments(version_parser)
code_parser = subparsers.add_parser(
"code", help="Open repository workspace with VS Code"
)
add_identifier_arguments(code_parser)
list_parser = subparsers.add_parser(
"list", help="List all repositories with details and status"
)
list_parser.add_argument(
"--search",
default="",
help="Filter repositories that contain the given string",
)
list_parser.add_argument(
"--status",
type=str,
default="",
help="Filter repositories by status (case insensitive)",
)
shell_parser = subparsers.add_parser(
"shell", help="Execute a shell command in each repository"
)
add_identifier_arguments(shell_parser)
shell_parser.add_argument(
"-c",
"--command",
nargs=argparse.REMAINDER,
dest="shell_command",
help=(
"The shell command (and its arguments) to execute in each "
"repository"
),
default=[],
)
make_parser = subparsers.add_parser(
"make", help="Executes make commands"
)
add_identifier_arguments(make_parser)
make_subparsers = make_parser.add_subparsers(
dest="subcommand", help="Make subcommands", required=True
)
make_install = make_subparsers.add_parser(
"install", help="Executes the make install command"
)
add_identifier_arguments(make_install)
make_deinstall = make_subparsers.add_parser(
"deinstall", help="Executes the make deinstall command"
)
proxy_command_parsers = {}
for command, subcommands in PROXY_COMMANDS.items():
for subcommand in subcommands:
proxy_command_parsers[f"{command}_{subcommand}"] = (
subparsers.add_parser(
subcommand,
help=f"Proxies '{command} {subcommand}' to repository/ies",
description=(
f"Executes '{command} {subcommand}' for the "
"identified repos.\nTo recieve more help execute "
f"'{command} {subcommand} --help'"
),
formatter_class=argparse.RawTextHelpFormatter,
)
)
if subcommand in ["pull", "clone"]:
proxy_command_parsers[
f"{command}_{subcommand}"
].add_argument(
"--no-verification",
action="store_true",
default=False,
help="Disable verification via commit/gpg",
)
if subcommand == "clone":
proxy_command_parsers[
f"{command}_{subcommand}"
].add_argument(
"--clone-mode",
choices=["ssh", "https", "shallow"],
default="ssh",
help=(
"Specify the clone mode: ssh, https, or shallow "
"(HTTPS shallow clone; default: ssh)"
),
)
add_identifier_arguments(
proxy_command_parsers[f"{command}_{subcommand}"]
)
parser = create_parser(DESCRIPTION_TEXT)
args = parser.parse_args()
# Select repositories for commands that operate on the repository list.
# (config, list, create work differently and don't use selection)
if args.command and args.command not in ["config", "list", "create"]:
selected = get_selected_repos(
args.all, ALL_REPOSITORIES, getattr(args, "identifiers", [])
)
else:
selected = []
# Proxy commands (git, docker, docker compose)
for command, subcommands in PROXY_COMMANDS.items():
for subcommand in subcommands:
if args.command == subcommand:
if args.command == "clone":
clone_repos(
selected,
REPOSITORIES_BASE_DIR,
ALL_REPOSITORIES,
args.preview,
args.no_verification,
args.clone_mode,
)
elif args.command == "pull":
from pkgmgr.pull_with_verification import (
pull_with_verification,
)
pull_with_verification(
selected,
REPOSITORIES_BASE_DIR,
ALL_REPOSITORIES,
args.extra_args,
args.no_verification,
args.preview,
)
else:
exec_proxy_command(
command,
selected,
REPOSITORIES_BASE_DIR,
ALL_REPOSITORIES,
args.command,
args.extra_args,
args.preview,
)
sys.exit(0)
if args.command in ["make"]:
exec_proxy_command(
args.command,
selected,
REPOSITORIES_BASE_DIR,
ALL_REPOSITORIES,
args.subcommand,
args.extra_args,
args.preview,
)
sys.exit(0)
# Dispatch commands.
if args.command == "install":
install_repos(
selected,
REPOSITORIES_BASE_DIR,
BINARIES_DIRECTORY,
ALL_REPOSITORIES,
args.no_verification,
args.preview,
args.quiet,
args.clone_mode,
args.dependencies,
)
elif args.command == "create":
from pkgmgr.create_repo import create_repo
if not args.identifiers:
print(
"No identifiers provided. Please specify at least one identifier "
"in the format provider/account/repository."
)
sys.exit(1)
else:
selected = get_selected_repos(True, ALL_REPOSITORIES, None)
for identifier in args.identifiers:
create_repo(
identifier,
CONFIG_MERGED,
USER_CONFIG_PATH,
BINARIES_DIRECTORY,
remote=args.remote,
preview=args.preview,
)
elif args.command == "list":
list_repositories(
ALL_REPOSITORIES,
REPOSITORIES_BASE_DIR,
BINARIES_DIRECTORY,
search_filter=args.search,
status_filter=args.status,
)
elif args.command == "deinstall":
deinstall_repos(
selected,
REPOSITORIES_BASE_DIR,
BINARIES_DIRECTORY,
ALL_REPOSITORIES,
preview=args.preview,
)
elif args.command == "delete":
delete_repos(
selected, REPOSITORIES_BASE_DIR, ALL_REPOSITORIES, preview=args.preview
)
elif args.command == "update":
update_repos(
selected,
REPOSITORIES_BASE_DIR,
BINARIES_DIRECTORY,
ALL_REPOSITORIES,
args.no_verification,
args.system,
args.preview,
args.quiet,
args.dependencies,
args.clone_mode,
)
elif args.command == "release":
if not selected:
print("No repositories selected for release.")
sys.exit(1)
from pkgmgr import release as rel
original_dir = os.getcwd()
for repo in selected:
repo_dir: Optional[str] = repo.get("directory")
if not repo_dir:
from pkgmgr.get_repo_dir import get_repo_dir
repo_dir = get_repo_dir(REPOSITORIES_BASE_DIR, repo)
pyproject_path = os.path.join(repo_dir, "pyproject.toml")
changelog_path = os.path.join(repo_dir, "CHANGELOG.md")
print(
f"Releasing repository '{repo.get('repository')}' in '{repo_dir}'..."
)
os.chdir(repo_dir)
rel.release(
pyproject_path=pyproject_path,
changelog_path=changelog_path,
release_type=args.release_type,
message=args.message,
)
os.chdir(original_dir)
elif args.command == "version":
from pkgmgr.git_utils import get_tags
from pkgmgr.versioning import SemVer, find_latest_version
from pkgmgr.version_sources import (
read_pyproject_version,
read_flake_version,
read_pkgbuild_version,
read_debian_changelog_version,
read_spec_version,
read_ansible_galaxy_version,
)
from pkgmgr.get_repo_dir import get_repo_dir
repo_list = selected
if not repo_list:
print("No repositories selected for version.")
sys.exit(1)
print("pkgmgr version info")
print("====================")
for repo in repo_list:
# Resolve repository directory
repo_dir = repo.get("directory")
if not repo_dir:
try:
repo_dir = get_repo_dir(REPOSITORIES_BASE_DIR, repo)
except Exception:
repo_dir = None
# If no local clone exists, skip gracefully with info message
if not repo_dir or not os.path.isdir(repo_dir):
identifier = get_repo_identifier(repo, ALL_REPOSITORIES)
print(f"\nRepository: {identifier}")
print("----------------------------------------")
print(
"[INFO] Skipped: repository directory does not exist "
"locally, version detection is not possible."
)
continue
print(f"\nRepository: {repo_dir}")
print("----------------------------------------")
# 1) Git tags (SemVer)
try:
tags = get_tags(cwd=repo_dir)
except Exception as exc:
print(f"[ERROR] Could not read git tags: {exc}")
tags = []
latest_tag_info = find_latest_version(tags) if tags else None
if latest_tag_info is None:
latest_tag_str = None
latest_ver = None
else:
latest_tag_str, latest_ver = latest_tag_info
# 2) Packaging / metadata sources
pyproject_version = read_pyproject_version(repo_dir)
flake_version = read_flake_version(repo_dir)
pkgbuild_version = read_pkgbuild_version(repo_dir)
debian_version = read_debian_changelog_version(repo_dir)
spec_version = read_spec_version(repo_dir)
ansible_version = read_ansible_galaxy_version(repo_dir)
# 3) Print version summary
if latest_ver is not None:
print(
f"Git (latest SemVer tag): {latest_tag_str} (parsed: {latest_ver})"
)
else:
print("Git (latest SemVer tag): <none found>")
print(f"pyproject.toml: {pyproject_version or '<not found>'}")
print(f"flake.nix: {flake_version or '<not found>'}")
print(f"PKGBUILD: {pkgbuild_version or '<not found>'}")
print(f"debian/changelog: {debian_version or '<not found>'}")
print(f"package-manager.spec: {spec_version or '<not found>'}")
print(f"Ansible Galaxy meta: {ansible_version or '<not found>'}")
# 4) Consistency hint (Git tag vs. pyproject)
if latest_ver is not None and pyproject_version is not None:
try:
file_ver = SemVer.parse(pyproject_version)
if file_ver != latest_ver:
print(
f"[WARN] Version mismatch: Git={latest_ver}, pyproject={file_ver}"
)
except ValueError:
print(
f"[WARN] pyproject version {pyproject_version!r} is not valid SemVer."
)
elif args.command == "status":
status_repos(
selected,
REPOSITORIES_BASE_DIR,
ALL_REPOSITORIES,
args.extra_args,
list_only=args.list,
system_status=args.system,
preview=args.preview,
)
elif args.command == "explore":
for repository in selected:
run_command(f"nautilus {repository['directory']} & disown")
elif args.command == "code":
if not selected:
print("No repositories selected.")
else:
identifiers = [
get_repo_identifier(repo, ALL_REPOSITORIES) for repo in selected
]
sorted_identifiers = sorted(identifiers)
workspace_name = "_".join(sorted_identifiers) + ".code-workspace"
workspaces_dir = os.path.expanduser(
CONFIG_MERGED.get("directories").get("workspaces")
)
os.makedirs(workspaces_dir, exist_ok=True)
workspace_file = os.path.join(workspaces_dir, workspace_name)
folders = []
for repository in selected:
folders.append({"path": repository["directory"]})
workspace_data = {
"folders": folders,
"settings": {},
}
if not os.path.exists(workspace_file):
with open(workspace_file, "w") as f:
json.dump(workspace_data, f, indent=4)
print(f"Created workspace file: {workspace_file}")
else:
print(f"Using existing workspace file: {workspace_file}")
run_command(f'code "{workspace_file}"')
elif args.command == "terminal":
for repository in selected:
run_command(
f'gnome-terminal --tab --working-directory="{repository["directory"]}"'
)
elif args.command == "path":
for repository in selected:
print(repository["directory"])
elif args.command == "shell":
if not args.shell_command:
print("No shell command specified.")
sys.exit(2)
command_to_run = " ".join(args.shell_command)
for repository in selected:
print(
f"Executing in '{repository['directory']}': {command_to_run}"
)
run_command(
command_to_run, cwd=repository["directory"], preview=args.preview
)
elif args.command == "config":
if args.subcommand == "show":
if args.all or (not args.identifiers):
show_config([], USER_CONFIG_PATH, full_config=True)
else:
selected = resolve_repos(args.identifiers, ALL_REPOSITORIES)
if selected:
show_config(
selected, USER_CONFIG_PATH, full_config=False
)
elif args.subcommand == "add":
interactive_add(CONFIG_MERGED, USER_CONFIG_PATH)
elif args.subcommand == "edit":
run_command(f"nano {USER_CONFIG_PATH}")
elif args.subcommand == "init":
if os.path.exists(USER_CONFIG_PATH):
with open(USER_CONFIG_PATH, "r") as f:
user_config = yaml.safe_load(f) or {}
else:
user_config = {"repositories": []}
config_init(
user_config,
CONFIG_MERGED,
BINARIES_DIRECTORY,
USER_CONFIG_PATH,
)
elif args.subcommand == "delete":
if os.path.exists(USER_CONFIG_PATH):
with open(USER_CONFIG_PATH, "r") as f:
user_config = yaml.safe_load(f) or {"repositories": []}
else:
user_config = {"repositories": []}
if args.all or not args.identifiers:
print("You must specify identifiers to delete.")
else:
to_delete = resolve_repos(
args.identifiers, user_config.get("repositories", [])
)
new_repos = [
entry
for entry in user_config.get("repositories", [])
if entry not in to_delete
]
user_config["repositories"] = new_repos
save_user_config(user_config, USER_CONFIG_PATH)
print(
f"Deleted {len(to_delete)} entries from user config."
)
elif args.subcommand == "ignore":
if os.path.exists(USER_CONFIG_PATH):
with open(USER_CONFIG_PATH, "r") as f:
user_config = yaml.safe_load(f) or {"repositories": []}
else:
user_config = {"repositories": []}
if args.all or not args.identifiers:
print(
"You must specify identifiers to modify ignore flag."
)
else:
to_modify = resolve_repos(
args.identifiers, user_config.get("repositories", [])
)
for entry in user_config["repositories"]:
key = (
entry.get("provider"),
entry.get("account"),
entry.get("repository"),
)
for mod in to_modify:
mod_key = (
mod.get("provider"),
mod.get("account"),
mod.get("repository"),
)
if key == mod_key:
entry["ignore"] = args.set == "true"
print(
f"Set ignore for {key} to {entry['ignore']}"
)
save_user_config(user_config, USER_CONFIG_PATH)
else:
# If no subcommand is provided, show help
if not getattr(args, "command", None):
parser.print_help()
return
dispatch_command(args, ctx)
if __name__ == "__main__":

View File

@@ -0,0 +1,5 @@
from .context import CLIContext
from .parser import create_parser
from .dispatch import dispatch_command
__all__ = ["CLIContext", "create_parser", "dispatch_command"]

View File

@@ -0,0 +1,15 @@
from .repos import handle_repos_command
from .config import handle_config
from .tools import handle_tools_command
from .release import handle_release
from .version import handle_version
from .make import handle_make
__all__ = [
"handle_repos_command",
"handle_config",
"handle_tools_command",
"handle_release",
"handle_version",
"handle_make",
]

View File

@@ -0,0 +1,140 @@
from __future__ import annotations
import os
import sys
from typing import Any, Dict, List
import yaml
from pkgmgr.cli_core.context import CLIContext
from pkgmgr.config_init import config_init
from pkgmgr.interactive_add import interactive_add
from pkgmgr.resolve_repos import resolve_repos
from pkgmgr.save_user_config import save_user_config
from pkgmgr.show_config import show_config
from pkgmgr.run_command import run_command
def _load_user_config(user_config_path: str) -> Dict[str, Any]:
"""
Load the user config file, returning a default structure if it does not exist.
"""
if os.path.exists(user_config_path):
with open(user_config_path, "r") as f:
return yaml.safe_load(f) or {"repositories": []}
return {"repositories": []}
def handle_config(args, ctx: CLIContext) -> None:
"""
Handle the 'config' command and its subcommands.
"""
user_config_path = ctx.user_config_path
# --------------------------------------------------------
# config show
# --------------------------------------------------------
if args.subcommand == "show":
if args.all or (not args.identifiers):
show_config([], user_config_path, full_config=True)
else:
selected = resolve_repos(args.identifiers, ctx.all_repositories)
if selected:
show_config(
selected,
user_config_path,
full_config=False,
)
return
# --------------------------------------------------------
# config add
# --------------------------------------------------------
if args.subcommand == "add":
interactive_add(ctx.config_merged, user_config_path)
return
# --------------------------------------------------------
# config edit
# --------------------------------------------------------
if args.subcommand == "edit":
run_command(f"nano {user_config_path}")
return
# --------------------------------------------------------
# config init
# --------------------------------------------------------
if args.subcommand == "init":
user_config = _load_user_config(user_config_path)
config_init(
user_config,
ctx.config_merged,
ctx.binaries_dir,
user_config_path,
)
return
# --------------------------------------------------------
# config delete
# --------------------------------------------------------
if args.subcommand == "delete":
user_config = _load_user_config(user_config_path)
if args.all or not args.identifiers:
print("You must specify identifiers to delete.")
return
to_delete = resolve_repos(
args.identifiers,
user_config.get("repositories", []),
)
new_repos = [
entry
for entry in user_config.get("repositories", [])
if entry not in to_delete
]
user_config["repositories"] = new_repos
save_user_config(user_config, user_config_path)
print(f"Deleted {len(to_delete)} entries from user config.")
return
# --------------------------------------------------------
# config ignore
# --------------------------------------------------------
if args.subcommand == "ignore":
user_config = _load_user_config(user_config_path)
if args.all or not args.identifiers:
print("You must specify identifiers to modify ignore flag.")
return
to_modify = resolve_repos(
args.identifiers,
user_config.get("repositories", []),
)
for entry in user_config["repositories"]:
key = (
entry.get("provider"),
entry.get("account"),
entry.get("repository"),
)
for mod in to_modify:
mod_key = (
mod.get("provider"),
mod.get("account"),
mod.get("repository"),
)
if key == mod_key:
entry["ignore"] = args.set == "true"
print(
f"Set ignore for {key} to {entry['ignore']}"
)
save_user_config(user_config, user_config_path)
return
# If we end up here, something is wrong with subcommand routing
print(f"Unknown config subcommand: {args.subcommand}")
sys.exit(2)

View File

@@ -0,0 +1,33 @@
from __future__ import annotations
import sys
from typing import Any, Dict, List
from pkgmgr.cli_core.context import CLIContext
from pkgmgr.exec_proxy_command import exec_proxy_command
Repository = Dict[str, Any]
def handle_make(
args,
ctx: CLIContext,
selected: List[Repository],
) -> None:
"""
Handle the 'make' command by delegating to exec_proxy_command.
This mirrors the old behaviour where `make` was treated as a
special proxy command.
"""
exec_proxy_command(
"make",
selected,
ctx.repositories_base_dir,
ctx.all_repositories,
args.subcommand,
getattr(args, "extra_args", []),
getattr(args, "preview", False),
)
sys.exit(0)

View File

@@ -0,0 +1,52 @@
from __future__ import annotations
import os
import sys
from typing import Any, Dict, List, Optional
from pkgmgr.cli_core.context import CLIContext
from pkgmgr.get_repo_dir import get_repo_dir
from pkgmgr import release as rel
Repository = Dict[str, Any]
def handle_release(
args,
ctx: CLIContext,
selected: List[Repository],
) -> None:
"""
Handle the 'release' command.
Creates a release by incrementing the version and updating the changelog
in the selected repositories.
"""
if not selected:
print("No repositories selected for release.")
sys.exit(1)
original_dir = os.getcwd()
for repo in selected:
repo_dir: Optional[str] = repo.get("directory")
if not repo_dir:
repo_dir = get_repo_dir(ctx.repositories_base_dir, repo)
pyproject_path = os.path.join(repo_dir, "pyproject.toml")
changelog_path = os.path.join(repo_dir, "CHANGELOG.md")
print(
f"Releasing repository '{repo.get('repository')}' in '{repo_dir}'..."
)
os.chdir(repo_dir)
rel.release(
pyproject_path=pyproject_path,
changelog_path=changelog_path,
release_type=args.release_type,
message=args.message,
)
os.chdir(original_dir)

View File

@@ -0,0 +1,161 @@
from __future__ import annotations
import sys
from typing import Any, Dict, List
from pkgmgr.cli_core.context import CLIContext
from pkgmgr.install_repos import install_repos
from pkgmgr.deinstall_repos import deinstall_repos
from pkgmgr.delete_repos import delete_repos
from pkgmgr.update_repos import update_repos
from pkgmgr.status_repos import status_repos
from pkgmgr.list_repositories import list_repositories
from pkgmgr.run_command import run_command
from pkgmgr.create_repo import create_repo
Repository = Dict[str, Any]
def handle_repos_command(
args,
ctx: CLIContext,
selected: List[Repository],
) -> None:
"""
Handle repository-related commands:
- install / update / deinstall / delete / status
- path / shell
- create / list
"""
# --------------------------------------------------------
# install / update
# --------------------------------------------------------
if args.command == "install":
install_repos(
selected,
ctx.repositories_base_dir,
ctx.binaries_dir,
ctx.all_repositories,
args.no_verification,
args.preview,
args.quiet,
args.clone_mode,
args.dependencies,
)
return
if args.command == "update":
update_repos(
selected,
ctx.repositories_base_dir,
ctx.binaries_dir,
ctx.all_repositories,
args.no_verification,
args.system,
args.preview,
args.quiet,
args.dependencies,
args.clone_mode,
)
return
# --------------------------------------------------------
# deinstall / delete
# --------------------------------------------------------
if args.command == "deinstall":
deinstall_repos(
selected,
ctx.repositories_base_dir,
ctx.binaries_dir,
ctx.all_repositories,
preview=args.preview,
)
return
if args.command == "delete":
delete_repos(
selected,
ctx.repositories_base_dir,
ctx.all_repositories,
preview=args.preview,
)
return
# --------------------------------------------------------
# status
# --------------------------------------------------------
if args.command == "status":
status_repos(
selected,
ctx.repositories_base_dir,
ctx.all_repositories,
args.extra_args,
list_only=args.list,
system_status=args.system,
preview=args.preview,
)
return
# --------------------------------------------------------
# path
# --------------------------------------------------------
if args.command == "path":
for repository in selected:
print(repository["directory"])
return
# --------------------------------------------------------
# shell
# --------------------------------------------------------
if args.command == "shell":
if not args.shell_command:
print("No shell command specified.")
sys.exit(2)
command_to_run = " ".join(args.shell_command)
for repository in selected:
print(
f"Executing in '{repository['directory']}': {command_to_run}"
)
run_command(
command_to_run,
cwd=repository["directory"],
preview=args.preview,
)
return
# --------------------------------------------------------
# create
# --------------------------------------------------------
if args.command == "create":
if not args.identifiers:
print(
"No identifiers provided. Please specify at least one identifier "
"in the format provider/account/repository."
)
sys.exit(1)
for identifier in args.identifiers:
create_repo(
identifier,
ctx.config_merged,
ctx.user_config_path,
ctx.binaries_dir,
remote=args.remote,
preview=args.preview,
)
return
# --------------------------------------------------------
# list
# --------------------------------------------------------
if args.command == "list":
list_repositories(
ctx.all_repositories,
ctx.repositories_base_dir,
ctx.binaries_dir,
search_filter=args.search,
status_filter=args.status,
)
return

View File

@@ -0,0 +1,83 @@
from __future__ import annotations
import json
import os
from typing import Any, Dict, List
from pkgmgr.cli_core.context import CLIContext
from pkgmgr.run_command import run_command
from pkgmgr.get_repo_identifier import get_repo_identifier
Repository = Dict[str, Any]
def handle_tools_command(
args,
ctx: CLIContext,
selected: List[Repository],
) -> None:
"""
Handle integration commands:
- explore (file manager)
- terminal (GNOME Terminal)
- code (VS Code workspace)
"""
# --------------------------------------------------------
# explore
# --------------------------------------------------------
if args.command == "explore":
for repository in selected:
run_command(
f"nautilus {repository['directory']} & disown"
)
return
# --------------------------------------------------------
# terminal
# --------------------------------------------------------
if args.command == "terminal":
for repository in selected:
run_command(
f'gnome-terminal --tab --working-directory="{repository["directory"]}"'
)
return
# --------------------------------------------------------
# code
# --------------------------------------------------------
if args.command == "code":
if not selected:
print("No repositories selected.")
return
identifiers = [
get_repo_identifier(repo, ctx.all_repositories)
for repo in selected
]
sorted_identifiers = sorted(identifiers)
workspace_name = "_".join(sorted_identifiers) + ".code-workspace"
workspaces_dir = os.path.expanduser(
ctx.config_merged.get("directories").get("workspaces")
)
os.makedirs(workspaces_dir, exist_ok=True)
workspace_file = os.path.join(workspaces_dir, workspace_name)
folders = [{"path": repository["directory"]} for repository in selected]
workspace_data = {
"folders": folders,
"settings": {},
}
if not os.path.exists(workspace_file):
with open(workspace_file, "w") as f:
json.dump(workspace_data, f, indent=4)
print(f"Created workspace file: {workspace_file}")
else:
print(f"Using existing workspace file: {workspace_file}")
run_command(f'code "{workspace_file}"')
return

View File

@@ -0,0 +1,118 @@
from __future__ import annotations
import os
import sys
from typing import Any, Dict, List, Optional, Tuple
from pkgmgr.cli_core.context import CLIContext
from pkgmgr.get_repo_dir import get_repo_dir
from pkgmgr.get_repo_identifier import get_repo_identifier
from pkgmgr.git_utils import get_tags
from pkgmgr.versioning import SemVer, find_latest_version
from pkgmgr.version_sources import (
read_pyproject_version,
read_flake_version,
read_pkgbuild_version,
read_debian_changelog_version,
read_spec_version,
read_ansible_galaxy_version,
)
Repository = Dict[str, Any]
def handle_version(
args,
ctx: CLIContext,
selected: List[Repository],
) -> None:
"""
Handle the 'version' command.
Shows version information from various sources (git tags, pyproject,
flake.nix, PKGBUILD, debian, spec, Ansible Galaxy).
"""
repo_list = selected
if not repo_list:
print("No repositories selected for version.")
sys.exit(1)
print("pkgmgr version info")
print("====================")
for repo in repo_list:
# Resolve repository directory
repo_dir = repo.get("directory")
if not repo_dir:
try:
repo_dir = get_repo_dir(ctx.repositories_base_dir, repo)
except Exception:
repo_dir = None
# If no local clone exists, skip gracefully with info message
if not repo_dir or not os.path.isdir(repo_dir):
identifier = get_repo_identifier(repo, ctx.all_repositories)
print(f"\nRepository: {identifier}")
print("----------------------------------------")
print(
"[INFO] Skipped: repository directory does not exist "
"locally, version detection is not possible."
)
continue
print(f"\nRepository: {repo_dir}")
print("----------------------------------------")
# 1) Git tags (SemVer)
try:
tags = get_tags(cwd=repo_dir)
except Exception as exc:
print(f"[ERROR] Could not read git tags: {exc}")
tags = []
latest_tag_info: Optional[Tuple[str, SemVer]]
latest_tag_info = find_latest_version(tags) if tags else None
if latest_tag_info is None:
latest_tag_str = None
latest_ver = None
else:
latest_tag_str, latest_ver = latest_tag_info
# 2) Packaging / metadata sources
pyproject_version = read_pyproject_version(repo_dir)
flake_version = read_flake_version(repo_dir)
pkgbuild_version = read_pkgbuild_version(repo_dir)
debian_version = read_debian_changelog_version(repo_dir)
spec_version = read_spec_version(repo_dir)
ansible_version = read_ansible_galaxy_version(repo_dir)
# 3) Print version summary
if latest_ver is not None:
print(
f"Git (latest SemVer tag): {latest_tag_str} (parsed: {latest_ver})"
)
else:
print("Git (latest SemVer tag): <none found>")
print(f"pyproject.toml: {pyproject_version or '<not found>'}")
print(f"flake.nix: {flake_version or '<not found>'}")
print(f"PKGBUILD: {pkgbuild_version or '<not found>'}")
print(f"debian/changelog: {debian_version or '<not found>'}")
print(f"package-manager.spec: {spec_version or '<not found>'}")
print(f"Ansible Galaxy meta: {ansible_version or '<not found>'}")
# 4) Consistency hint (Git tag vs. pyproject)
if latest_ver is not None and pyproject_version is not None:
try:
file_ver = SemVer.parse(pyproject_version)
if file_ver != latest_ver:
print(
f"[WARN] Version mismatch: Git={latest_ver}, pyproject={file_ver}"
)
except ValueError:
print(
f"[WARN] pyproject version {pyproject_version!r} is not valid SemVer."
)

View File

@@ -0,0 +1,20 @@
from __future__ import annotations
from dataclasses import dataclass
from typing import Any, Dict, List
@dataclass
class CLIContext:
"""
Shared runtime context for CLI commands.
This avoids passing many individual parameters around and
keeps the CLI layer thin and structured.
"""
config_merged: Dict[str, Any]
repositories_base_dir: str
all_repositories: List[Dict[str, Any]]
binaries_dir: str
user_config_path: str

View File

@@ -0,0 +1,82 @@
from __future__ import annotations
import sys
from typing import List
from pkgmgr.cli_core.context import CLIContext
from pkgmgr.cli_core.proxy import maybe_handle_proxy
from pkgmgr.get_selected_repos import get_selected_repos
from pkgmgr.cli_core.commands.repos import handle_repos_command
from pkgmgr.cli_core.commands.tools import handle_tools_command
from pkgmgr.cli_core.commands.release import handle_release
from pkgmgr.cli_core.commands.version import handle_version
from pkgmgr.cli_core.commands.config import handle_config
from pkgmgr.cli_core.commands.make import handle_make
def dispatch_command(args, ctx: CLIContext) -> None:
"""
Top-level command dispatcher.
Responsible for:
- computing selected repositories (where applicable)
- delegating to the correct command handler module
"""
# 1) Proxy commands (git, docker, docker compose) short-circuit.
if maybe_handle_proxy(args, ctx):
return
# 2) Determine if this command uses repository selection.
commands_with_selection: List[str] = [
"install",
"update",
"deinstall",
"delete",
"status",
"path",
"shell",
"code",
"explore",
"terminal",
"release",
"version",
"make",
]
if args.command in commands_with_selection:
selected = get_selected_repos(
getattr(args, "all", False),
ctx.all_repositories,
getattr(args, "identifiers", []),
)
else:
selected = []
# 3) Delegate based on command.
if args.command in (
"install",
"update",
"deinstall",
"delete",
"status",
"path",
"shell",
"create",
"list",
):
handle_repos_command(args, ctx, selected)
elif args.command in ("code", "explore", "terminal"):
handle_tools_command(args, ctx, selected)
elif args.command == "release":
handle_release(args, ctx, selected)
elif args.command == "version":
handle_version(args, ctx, selected)
elif args.command == "config":
handle_config(args, ctx)
elif args.command == "make":
handle_make(args, ctx, selected)
else:
print(f"Unknown command: {args.command}")
sys.exit(2)

359
pkgmgr/cli_core/parser.py Normal file
View File

@@ -0,0 +1,359 @@
from __future__ import annotations
import argparse
from pkgmgr.cli_core.proxy import register_proxy_commands
class SortedSubParsersAction(argparse._SubParsersAction):
"""
Subparsers action that keeps choices sorted alphabetically.
"""
def add_parser(self, name, **kwargs):
parser = super().add_parser(name, **kwargs)
self._choices_actions.sort(key=lambda a: a.dest)
return parser
def add_identifier_arguments(subparser: argparse.ArgumentParser) -> None:
"""
Attach generic repository selection arguments to a subparser.
"""
subparser.add_argument(
"identifiers",
nargs="*",
help=(
"Identifier(s) for repositories. "
"Default: Repository of current folder."
),
)
subparser.add_argument(
"--all",
action="store_true",
default=False,
help=(
"Apply the subcommand to all repositories in the config. "
"Some subcommands ask for confirmation. If you want to give this "
"confirmation for all repositories, pipe 'yes'. E.g: "
"yes | pkgmgr {subcommand} --all"
),
)
subparser.add_argument(
"--preview",
action="store_true",
help="Preview changes without executing commands",
)
subparser.add_argument(
"--list",
action="store_true",
help="List affected repositories (with preview or status)",
)
subparser.add_argument(
"-a",
"--args",
nargs=argparse.REMAINDER,
dest="extra_args",
help="Additional parameters to be attached.",
default=[],
)
def add_install_update_arguments(subparser: argparse.ArgumentParser) -> None:
"""
Attach shared flags for install/update-like commands.
"""
add_identifier_arguments(subparser)
subparser.add_argument(
"-q",
"--quiet",
action="store_true",
help="Suppress warnings and info messages",
)
subparser.add_argument(
"--no-verification",
action="store_true",
default=False,
help="Disable verification via commit/gpg",
)
subparser.add_argument(
"--dependencies",
action="store_true",
help="Also pull and update dependencies",
)
subparser.add_argument(
"--clone-mode",
choices=["ssh", "https", "shallow"],
default="ssh",
help=(
"Specify the clone mode: ssh, https, or shallow "
"(HTTPS shallow clone; default: ssh)"
),
)
def create_parser(description_text: str) -> argparse.ArgumentParser:
"""
Create and configure the top-level argument parser for pkgmgr.
This function defines *only* the CLI surface (arguments & subcommands),
but no business logic.
"""
parser = argparse.ArgumentParser(
description=description_text,
formatter_class=argparse.RawTextHelpFormatter,
)
subparsers = parser.add_subparsers(
dest="command",
help="Subcommands",
action=SortedSubParsersAction,
)
# ------------------------------------------------------------
# install / update
# ------------------------------------------------------------
install_parser = subparsers.add_parser(
"install",
help="Setup repository/repositories alias links to executables",
)
add_install_update_arguments(install_parser)
update_parser = subparsers.add_parser(
"update",
help="Update (pull + install) repository/repositories",
)
add_install_update_arguments(update_parser)
update_parser.add_argument(
"--system",
action="store_true",
help="Include system update commands",
)
# ------------------------------------------------------------
# deinstall / delete
# ------------------------------------------------------------
deinstall_parser = subparsers.add_parser(
"deinstall",
help="Remove alias links to repository/repositories",
)
add_identifier_arguments(deinstall_parser)
delete_parser = subparsers.add_parser(
"delete",
help="Delete repository/repositories alias links to executables",
)
add_identifier_arguments(delete_parser)
# ------------------------------------------------------------
# create
# ------------------------------------------------------------
create_parser = subparsers.add_parser(
"create",
help=(
"Create new repository entries: add them to the config if not "
"already present, initialize the local repository, and push "
"remotely if --remote is set."
),
)
add_identifier_arguments(create_parser)
create_parser.add_argument(
"--remote",
action="store_true",
help="If set, add the remote and push the initial commit.",
)
# ------------------------------------------------------------
# status
# ------------------------------------------------------------
status_parser = subparsers.add_parser(
"status",
help="Show status for repository/repositories or system",
)
add_identifier_arguments(status_parser)
status_parser.add_argument(
"--system",
action="store_true",
help="Show system status",
)
# ------------------------------------------------------------
# config
# ------------------------------------------------------------
config_parser = subparsers.add_parser(
"config",
help="Manage configuration",
)
config_subparsers = config_parser.add_subparsers(
dest="subcommand",
help="Config subcommands",
required=True,
)
config_show = config_subparsers.add_parser(
"show",
help="Show configuration",
)
add_identifier_arguments(config_show)
config_subparsers.add_parser(
"add",
help="Interactively add a new repository entry",
)
config_subparsers.add_parser(
"edit",
help="Edit configuration file with nano",
)
config_subparsers.add_parser(
"init",
help="Initialize user configuration by scanning the base directory",
)
config_delete = config_subparsers.add_parser(
"delete",
help="Delete repository entry from user config",
)
add_identifier_arguments(config_delete)
config_ignore = config_subparsers.add_parser(
"ignore",
help="Set ignore flag for repository entries in user config",
)
add_identifier_arguments(config_ignore)
config_ignore.add_argument(
"--set",
choices=["true", "false"],
required=True,
help="Set ignore to true or false",
)
# ------------------------------------------------------------
# path / explore / terminal / code / shell
# ------------------------------------------------------------
path_parser = subparsers.add_parser(
"path",
help="Print the path(s) of repository/repositories",
)
add_identifier_arguments(path_parser)
explore_parser = subparsers.add_parser(
"explore",
help="Open repository in Nautilus file manager",
)
add_identifier_arguments(explore_parser)
terminal_parser = subparsers.add_parser(
"terminal",
help="Open repository in a new GNOME Terminal tab",
)
add_identifier_arguments(terminal_parser)
code_parser = subparsers.add_parser(
"code",
help="Open repository workspace with VS Code",
)
add_identifier_arguments(code_parser)
shell_parser = subparsers.add_parser(
"shell",
help="Execute a shell command in each repository",
)
add_identifier_arguments(shell_parser)
shell_parser.add_argument(
"-c",
"--command",
nargs=argparse.REMAINDER,
dest="shell_command",
help="The shell command (and its arguments) to execute in each repository",
default=[],
)
# ------------------------------------------------------------
# release
# ------------------------------------------------------------
release_parser = subparsers.add_parser(
"release",
help=(
"Create a release for repository/ies by incrementing version "
"and updating the changelog."
),
)
release_parser.add_argument(
"release_type",
choices=["major", "minor", "patch"],
help="Type of version increment for the release (major, minor, patch).",
)
release_parser.add_argument(
"-m",
"--message",
default="",
help=(
"Optional release message to add to the changelog and tag."
),
)
add_identifier_arguments(release_parser)
# ------------------------------------------------------------
# version
# ------------------------------------------------------------
version_parser = subparsers.add_parser(
"version",
help=(
"Show version information for repository/ies "
"(git tags, pyproject.toml, flake.nix, PKGBUILD, debian, spec, Ansible Galaxy)."
),
)
add_identifier_arguments(version_parser)
# ------------------------------------------------------------
# list
# ------------------------------------------------------------
list_parser = subparsers.add_parser(
"list",
help="List all repositories with details and status",
)
list_parser.add_argument(
"--search",
default="",
help="Filter repositories that contain the given string",
)
list_parser.add_argument(
"--status",
type=str,
default="",
help="Filter repositories by status (case insensitive)",
)
# ------------------------------------------------------------
# make (wrapper around make in repositories)
# ------------------------------------------------------------
make_parser = subparsers.add_parser(
"make",
help="Executes make commands",
)
add_identifier_arguments(make_parser)
make_subparsers = make_parser.add_subparsers(
dest="subcommand",
help="Make subcommands",
required=True,
)
make_install = make_subparsers.add_parser(
"install",
help="Executes the make install command",
)
add_identifier_arguments(make_install)
make_deinstall = make_subparsers.add_parser(
"deinstall",
help="Executes the make deinstall command",
)
add_identifier_arguments(make_deinstall)
# ------------------------------------------------------------
# Proxy commands (git, docker, docker compose)
# ------------------------------------------------------------
register_proxy_commands(subparsers)
return parser

184
pkgmgr/cli_core/proxy.py Normal file
View File

@@ -0,0 +1,184 @@
from __future__ import annotations
import argparse
import sys
from typing import Dict, List
from pkgmgr.cli_core.context import CLIContext
from pkgmgr.clone_repos import clone_repos
from pkgmgr.exec_proxy_command import exec_proxy_command
from pkgmgr.get_selected_repos import get_selected_repos
from pkgmgr.pull_with_verification import pull_with_verification
PROXY_COMMANDS: Dict[str, List[str]] = {
"git": [
"pull",
"push",
"diff",
"add",
"show",
"checkout",
"clone",
"reset",
"revert",
"rebase",
"commit",
],
"docker": [
"start",
"stop",
"build",
],
"docker compose": [
"up",
"down",
"exec",
"ps",
"restart",
],
}
def _add_proxy_identifier_arguments(parser: argparse.ArgumentParser) -> None:
"""
Local copy of the identifier argument set for proxy commands.
This duplicates the semantics of cli.parser.add_identifier_arguments
to avoid circular imports.
"""
parser.add_argument(
"identifiers",
nargs="*",
help=(
"Identifier(s) for repositories. "
"Default: Repository of current folder."
),
)
parser.add_argument(
"--all",
action="store_true",
default=False,
help=(
"Apply the subcommand to all repositories in the config. "
"Some subcommands ask for confirmation. If you want to give this "
"confirmation for all repositories, pipe 'yes'. E.g: "
"yes | pkgmgr {subcommand} --all"
),
)
parser.add_argument(
"--preview",
action="store_true",
help="Preview changes without executing commands",
)
parser.add_argument(
"--list",
action="store_true",
help="List affected repositories (with preview or status)",
)
parser.add_argument(
"-a",
"--args",
nargs=argparse.REMAINDER,
dest="extra_args",
help="Additional parameters to be attached.",
default=[],
)
def register_proxy_commands(
subparsers: argparse._SubParsersAction,
) -> None:
"""
Register proxy commands (git, docker, docker compose) as
top-level subcommands on the given subparsers.
"""
for command, subcommands in PROXY_COMMANDS.items():
for subcommand in subcommands:
parser = subparsers.add_parser(
subcommand,
help=f"Proxies '{command} {subcommand}' to repository/ies",
description=(
f"Executes '{command} {subcommand}' for the "
"identified repos.\nTo recieve more help execute "
f"'{command} {subcommand} --help'"
),
formatter_class=argparse.RawTextHelpFormatter,
)
if subcommand in ["pull", "clone"]:
parser.add_argument(
"--no-verification",
action="store_true",
default=False,
help="Disable verification via commit/gpg",
)
if subcommand == "clone":
parser.add_argument(
"--clone-mode",
choices=["ssh", "https", "shallow"],
default="ssh",
help=(
"Specify the clone mode: ssh, https, or shallow "
"(HTTPS shallow clone; default: ssh)"
),
)
_add_proxy_identifier_arguments(parser)
def maybe_handle_proxy(args: argparse.Namespace, ctx: CLIContext) -> bool:
"""
If the parsed command is a proxy command, execute it and return True.
Otherwise return False to let the main dispatcher continue.
"""
all_proxy_subcommands = {
sub for subs in PROXY_COMMANDS.values() for sub in subs
}
if args.command not in all_proxy_subcommands:
return False
# Use generic selection semantics for proxies
selected = get_selected_repos(
getattr(args, "all", False),
ctx.all_repositories,
getattr(args, "identifiers", []),
)
for command, subcommands in PROXY_COMMANDS.items():
if args.command not in subcommands:
continue
if args.command == "clone":
clone_repos(
selected,
ctx.repositories_base_dir,
ctx.all_repositories,
args.preview,
args.no_verification,
args.clone_mode,
)
elif args.command == "pull":
pull_with_verification(
selected,
ctx.repositories_base_dir,
ctx.all_repositories,
args.extra_args,
args.no_verification,
args.preview,
)
else:
exec_proxy_command(
command,
selected,
ctx.repositories_base_dir,
ctx.all_repositories,
args.command,
args.extra_args,
args.preview,
)
sys.exit(0)
return True

View File

@@ -0,0 +1,74 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Integration tests for the `pkgmgr config` command.
We only exercise non-interactive, read-only subcommands here:
- pkgmgr config show --all
- pkgmgr config show pkgmgr
Interactive or mutating subcommands like `add`, `edit`, `init`,
`delete`, `ignore` are intentionally not covered in E2E tests to keep
the CI environment non-interactive and side-effect free.
"""
from __future__ import annotations
import runpy
import sys
import unittest
def _run_pkgmgr_config(extra_args: list[str]) -> None:
"""
Run `pkgmgr config ...` with the given extra args.
Any non-zero SystemExit is treated as a test failure and turned into
an AssertionError with diagnostics.
"""
cmd_repr = "pkgmgr " + " ".join(extra_args)
original_argv = list(sys.argv)
try:
sys.argv = ["pkgmgr"] + extra_args
try:
runpy.run_module("main", run_name="__main__")
except SystemExit as exc:
code = exc.code if isinstance(exc.code, int) else str(exc.code)
if code != 0:
print("[TEST] SystemExit caught while running", cmd_repr)
print(f"[TEST] Exit code: {code}")
raise AssertionError(
f"{cmd_repr!r} failed with exit code {code}. "
"Scroll up to inspect the output printed before failure."
) from exc
# exit code 0 is success
finally:
sys.argv = original_argv
class TestIntegrationConfigCommands(unittest.TestCase):
"""
E2E tests for `pkgmgr config` subcommands.
"""
def test_config_show_all(self) -> None:
"""
Run: pkgmgr config show --all
"""
_run_pkgmgr_config(["config", "show", "--all"])
def test_config_show_pkgmgr(self) -> None:
"""
Run: pkgmgr config show pkgmgr
Uses 'pkgmgr' as the standard repository identifier.
"""
_run_pkgmgr_config(["config", "show", "pkgmgr"])
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,78 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Integration tests for the `pkgmgr make` command.
We exercise the wrapper around `make` using the pkgmgr repository as
the standard target, but only in --preview mode to avoid side effects.
"""
from __future__ import annotations
import os
import runpy
import sys
import unittest
from test_integration_version_commands import _load_pkgmgr_repo_dir
class TestIntegrationMakeCommands(unittest.TestCase):
"""
E2E tests for the pkgmgr `make` wrapper.
"""
@classmethod
def setUpClass(cls) -> None:
# Reuse the helper from the version tests to locate the pkgmgr repo
cls.pkgmgr_repo_dir = _load_pkgmgr_repo_dir()
def _run_pkgmgr_make(self, extra_args: list[str]) -> None:
"""
Run `pkgmgr make ...` with the given extra args, from inside
the pkgmgr repository.
Any non-zero exit code is treated as test failure.
"""
cmd_repr = "pkgmgr " + " ".join(extra_args)
original_argv = list(sys.argv)
original_cwd = os.getcwd()
try:
os.chdir(self.pkgmgr_repo_dir)
sys.argv = ["pkgmgr"] + extra_args
try:
runpy.run_module("main", run_name="__main__")
except SystemExit as exc:
code = exc.code if isinstance(exc.code, int) else str(exc.code)
if code != 0:
print("[TEST] SystemExit caught while running", cmd_repr)
print(f"[TEST] Working directory: {os.getcwd()}")
print(f"[TEST] Exit code: {code}")
raise AssertionError(
f"{cmd_repr!r} failed with exit code {code}. "
"Scroll up to inspect the output printed before failure."
) from exc
# exit code 0 is success
finally:
os.chdir(original_cwd)
sys.argv = original_argv
def test_make_install_pkgmgr_preview(self) -> None:
"""
Run: pkgmgr make pkgmgr install --preview
- 'pkgmgr' is used as the standard repository identifier.
- '--preview' ensures that no destructive make commands are
actually executed inside the container.
"""
self._run_pkgmgr_make(
["make", "install", "--preview", "pkgmgr"]
)
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,63 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Integration tests for the `pkgmgr release` command.
We deliberately only test a *negative* path here, to avoid mutating
the real repositories (bumping versions, editing changelogs) during
CI runs.
The test verifies that:
- Calling `pkgmgr release` with a non-existent repository identifier
results in a non-zero exit code and a helpful error.
"""
from __future__ import annotations
import runpy
import sys
import unittest
class TestIntegrationReleaseCommand(unittest.TestCase):
"""
E2E tests for `pkgmgr release`.
"""
def _run_release_expect_failure(self) -> None:
cmd_repr = "pkgmgr release patch does-not-exist-xyz"
original_argv = list(sys.argv)
try:
sys.argv = [
"pkgmgr",
"release",
"patch",
"does-not-exist-xyz",
]
try:
runpy.run_module("main", run_name="__main__")
except SystemExit as exc:
code = exc.code if isinstance(exc.code, int) else str(exc.code)
# Hier wirklich verifizieren:
assert code != 0, f"{cmd_repr!r} unexpectedly succeeded with exit code 0"
print("[TEST] pkgmgr release failed as expected")
print(f"[TEST] Command : {cmd_repr}")
print(f"[TEST] Exit code : {code}")
else:
# Kein SystemExit -> auf jeden Fall falsch
raise AssertionError(
f"{cmd_repr!r} returned normally (expected non-zero exit)."
)
finally:
sys.argv = original_argv
def test_release_for_unknown_repo_fails_cleanly(self) -> None:
self._run_release_expect_failure()
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,95 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Integration tests for the "tools" commands:
- pkgmgr code
- pkgmgr explore
- pkgmgr terminal
These commands spawn external GUI tools (VS Code, Nautilus,
GNOME Terminal) which are usually not available in a headless CI
container. Therefore, the entire test class is skipped by default.
If you run the tests on a local desktop environment where these
commands exist and can be spawned, you can remove or modify the
@skip decorator.
"""
from __future__ import annotations
import os
import runpy
import sys
import unittest
from test_integration_version_commands import _load_pkgmgr_repo_dir
@unittest.skip(
"Requires GUI tools (code, nautilus, gnome-terminal) inside the "
"test environment; skipped by default in CI."
)
class TestIntegrationToolsCommands(unittest.TestCase):
"""
E2E tests for pkgmgr 'code', 'explore', and 'terminal' commands.
"""
@classmethod
def setUpClass(cls) -> None:
cls.pkgmgr_repo_dir = _load_pkgmgr_repo_dir()
def _run_pkgmgr_tools_command(self, extra_args: list[str]) -> None:
"""
Run a 'tools' style command (code/explore/terminal) for pkgmgr.
Any non-zero exit code is treated as a test failure.
"""
cmd_repr = "pkgmgr " + " ".join(extra_args)
original_argv = list(sys.argv)
original_cwd = os.getcwd()
try:
os.chdir(self.pkgmgr_repo_dir)
sys.argv = ["pkgmgr"] + extra_args
try:
runpy.run_module("main", run_name="__main__")
except SystemExit as exc:
code = exc.code if isinstance(exc.code, int) else str(exc.code)
if code != 0:
print("[TEST] SystemExit caught while running", cmd_repr)
print(f"[TEST] Working directory: {os.getcwd()}")
print(f"[TEST] Exit code: {code}")
raise AssertionError(
f"{cmd_repr!r} failed with exit code {code}. "
"Scroll up to inspect the output printed before failure."
) from exc
# exit code 0 is success
finally:
os.chdir(original_cwd)
sys.argv = original_argv
def test_code_workspace_for_pkgmgr(self) -> None:
"""
Run: pkgmgr code pkgmgr
"""
self._run_pkgmgr_tools_command(["code", "pkgmgr"])
def test_explore_pkgmgr(self) -> None:
"""
Run: pkgmgr explore pkgmgr
"""
self._run_pkgmgr_tools_command(["explore", "pkgmgr"])
def test_terminal_pkgmgr(self) -> None:
"""
Run: pkgmgr terminal pkgmgr
"""
self._run_pkgmgr_tools_command(["terminal", "pkgmgr"])
if __name__ == "__main__":
unittest.main()

View File

@@ -70,8 +70,14 @@ class TestCliVersion(unittest.TestCase):
)
self.mock_load_config = self._patch_load_config.start()
# Patch get_selected_repos so that 'version' operates on our temp dir
def _fake_selected_repos(all_flag: bool, repos: List[dict], identifiers: List[str]):
# Patch get_selected_repos so that 'version' operates on our temp dir.
# In the new modular CLI this function is used inside
# pkgmgr.cli_core.dispatch, so we patch it there.
def _fake_selected_repos(
all_flag: bool,
repos: List[dict],
identifiers: List[str],
):
# We always return exactly one "repository" whose directory is the temp dir.
return [
{
@@ -83,7 +89,8 @@ class TestCliVersion(unittest.TestCase):
]
self._patch_get_selected_repos = mock.patch(
"pkgmgr.cli.get_selected_repos", side_effect=_fake_selected_repos
"pkgmgr.cli_core.dispatch.get_selected_repos",
side_effect=_fake_selected_repos,
)
self.mock_get_selected_repos = self._patch_get_selected_repos.start()
@@ -125,7 +132,10 @@ class TestCliVersion(unittest.TestCase):
f.write(content)
return path
def _run_cli_version_and_capture(self, extra_args: List[str] | None = None) -> str:
def _run_cli_version_and_capture(
self,
extra_args: List[str] | None = None,
) -> str:
"""
Run 'pkgmgr version [extra_args]' via cli.main() and return captured stdout.
"""
@@ -158,9 +168,9 @@ class TestCliVersion(unittest.TestCase):
# Arrange: pyproject.toml with version 1.2.3
self._write_pyproject("1.2.3")
# Arrange: mock git tags
# Arrange: mock git tags used by handle_version
with mock.patch(
"pkgmgr.git_utils.get_tags",
"pkgmgr.cli_core.commands.version.get_tags",
return_value=["v1.2.0", "v1.2.3", "v1.0.0"],
):
# Act
@@ -192,7 +202,7 @@ class TestCliVersion(unittest.TestCase):
# Arrange: mock git tags (latest is 1.2.3)
with mock.patch(
"pkgmgr.git_utils.get_tags",
"pkgmgr.cli_core.commands.version.get_tags",
return_value=["v1.2.3"],
):
stdout = self._run_cli_version_and_capture()
@@ -218,9 +228,9 @@ class TestCliVersion(unittest.TestCase):
# Arrange: pyproject.toml exists
self._write_pyproject("0.0.1")
# Arrange: no tags returned
# Arrange: no tags returned (again: patch handle_version's get_tags)
with mock.patch(
"pkgmgr.git_utils.get_tags",
"pkgmgr.cli_core.commands.version.get_tags",
return_value=[],
):
stdout = self._run_cli_version_and_capture()