Extend 'pkgmgr version' command with multi-source version detection (pyproject, flake, PKGBUILD, debian, spec, AnsibleGalaxy), implement SemVer parsing, consistency warnings, full E2E + unit test coverage.

Ref: https://chatgpt.com/share/6936ef7e-ad5c-800f-96b2-e5d0f32b39ca
This commit is contained in:
Kevin Veen-Birkenbach
2025-12-08 16:32:38 +01:00
parent a5aaacc8d0
commit 0933e73e1c
4 changed files with 1141 additions and 144 deletions

View File

@@ -4,8 +4,8 @@ import os
import yaml
import argparse
import json
import os
import sys
from typing import Optional
# Define configuration file paths.
PROJECT_ROOT = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
@@ -33,7 +33,7 @@ from pkgmgr.update_repos import update_repos
# Commands proxied by package-manager
PROXY_COMMANDS = {
"git":[
"git": [
"pull",
"push",
"diff",
@@ -44,22 +44,23 @@ PROXY_COMMANDS = {
"reset",
"revert",
"rebase",
"commit"
"commit",
],
"docker":[
"docker": [
"start",
"stop",
"build"
"build",
],
"docker compose":[
"docker compose": [
"up",
"down",
"exec",
"ps",
"restart",
]
],
}
class SortedSubParsersAction(argparse._SubParsersAction):
def add_parser(self, name, **kwargs):
parser = super().add_parser(name, **kwargs)
@@ -67,12 +68,16 @@ class SortedSubParsersAction(argparse._SubParsersAction):
self._choices_actions.sort(key=lambda a: a.dest)
return parser
# Main program.
def main() -> None:
CONFIG_MERGED = load_config(USER_CONFIG_PATH)
REPOSITORIES_BASE_DIR = os.path.expanduser(CONFIG_MERGED["directories"]["repositories"])
REPOSITORIES_BASE_DIR = os.path.expanduser(
CONFIG_MERGED["directories"]["repositories"]
)
ALL_REPOSITORIES = CONFIG_MERGED["repositories"]
BINARIES_DIRECTORY = os.path.expanduser(CONFIG_MERGED["directories"]["binaries"])
BINARIES_DIRECTORY = os.path.expanduser(
CONFIG_MERGED["directories"]["binaries"]
)
description_text = """\
\033[1;32mPackage Manager 🤖📦\033[0m
\033[3mKevin's Package Manager is a multi-repository, multi-package, and multi-format
@@ -121,23 +126,52 @@ For detailed help on each command, use:
\033[1mpkgmgr <command> --help\033[0m
"""
parser = argparse.ArgumentParser(description=description_text,formatter_class=argparse.RawTextHelpFormatter)
subparsers = parser.add_subparsers(dest="command", help="Subcommands", action=SortedSubParsersAction)
parser = argparse.ArgumentParser(
description=description_text,
formatter_class=argparse.RawTextHelpFormatter,
)
subparsers = parser.add_subparsers(
dest="command", help="Subcommands", action=SortedSubParsersAction
)
def add_identifier_arguments(subparser):
subparser.add_argument(
"identifiers",
nargs="*",
help="Identifier(s) for repositories. Default: Repository of current folder.",
)
help=(
"Identifier(s) for repositories. "
"Default: Repository of current folder."
),
)
subparser.add_argument(
"--all",
action="store_true",
default=False,
help="Apply the subcommand to all repositories in the config. Some subcommands ask for confirmation. If you want to give this confirmation for all repositories, pipe 'yes'. E.g: yes | pkgmgr {subcommand} --all"
)
subparser.add_argument("--preview", action="store_true", help="Preview changes without executing commands")
subparser.add_argument("--list", action="store_true", help="List affected repositories (with preview or status)")
subparser.add_argument("-a", "--args", nargs=argparse.REMAINDER, dest="extra_args", help="Additional parameters to be attached.",default=[])
"--all",
action="store_true",
default=False,
help=(
"Apply the subcommand to all repositories in the config. "
"Some subcommands ask for confirmation. If you want to give this "
"confirmation for all repositories, pipe 'yes'. E.g: "
"yes | pkgmgr {subcommand} --all"
),
)
subparser.add_argument(
"--preview",
action="store_true",
help="Preview changes without executing commands",
)
subparser.add_argument(
"--list",
action="store_true",
help="List affected repositories (with preview or status)",
)
subparser.add_argument(
"-a",
"--args",
nargs=argparse.REMAINDER,
dest="extra_args",
help="Additional parameters to be attached.",
default=[],
)
def add_install_update_arguments(subparser):
add_identifier_arguments(subparser)
@@ -162,127 +196,248 @@ For detailed help on each command, use:
"--clone-mode",
choices=["ssh", "https", "shallow"],
default="ssh",
help="Specify the clone mode: ssh, https, or shallow (HTTPS shallow clone; default: ssh)",
help=(
"Specify the clone mode: ssh, https, or shallow "
"(HTTPS shallow clone; default: ssh)"
),
)
install_parser = subparsers.add_parser("install", help="Setup repository/repositories alias links to executables")
install_parser = subparsers.add_parser(
"install",
help="Setup repository/repositories alias links to executables",
)
add_install_update_arguments(install_parser)
update_parser = subparsers.add_parser("update", help="Update (pull + install) repository/repositories")
update_parser = subparsers.add_parser(
"update", help="Update (pull + install) repository/repositories"
)
add_install_update_arguments(update_parser)
update_parser.add_argument("--system", action="store_true", help="Include system update commands")
update_parser.add_argument(
"--system",
action="store_true",
help="Include system update commands",
)
deinstall_parser = subparsers.add_parser("deinstall", help="Remove alias links to repository/repositories")
deinstall_parser = subparsers.add_parser(
"deinstall", help="Remove alias links to repository/repositories"
)
add_identifier_arguments(deinstall_parser)
delete_parser = subparsers.add_parser("delete", help="Delete repository/repositories alias links to executables")
delete_parser = subparsers.add_parser(
"delete",
help="Delete repository/repositories alias links to executables",
)
add_identifier_arguments(delete_parser)
# Add the 'create' subcommand (with existing identifier arguments)
create_parser = subparsers.add_parser(
"create",
help="Create new repository entries: add them to the config if not already present, initialize the local repository, and push remotely if --remote is set."
help=(
"Create new repository entries: add them to the config if not "
"already present, initialize the local repository, and push "
"remotely if --remote is set."
),
)
# Reuse the common identifier arguments
add_identifier_arguments(create_parser)
create_parser.add_argument(
"--remote",
action="store_true",
help="If set, add the remote and push the initial commit."
help="If set, add the remote and push the initial commit.",
)
status_parser = subparsers.add_parser("status", help="Show status for repository/repositories or system")
status_parser = subparsers.add_parser(
"status", help="Show status for repository/repositories or system"
)
add_identifier_arguments(status_parser)
status_parser.add_argument("--system", action="store_true", help="Show system status")
status_parser.add_argument(
"--system",
action="store_true",
help="Show system status",
)
config_parser = subparsers.add_parser("config", help="Manage configuration")
config_subparsers = config_parser.add_subparsers(dest="subcommand", help="Config subcommands", required=True)
config_show = config_subparsers.add_parser("show", help="Show configuration")
config_subparsers = config_parser.add_subparsers(
dest="subcommand", help="Config subcommands", required=True
)
config_show = config_subparsers.add_parser(
"show", help="Show configuration"
)
add_identifier_arguments(config_show)
config_add = config_subparsers.add_parser("add", help="Interactively add a new repository entry")
config_edit = config_subparsers.add_parser("edit", help="Edit configuration file with nano")
config_init_parser = config_subparsers.add_parser("init", help="Initialize user configuration by scanning the base directory")
config_delete = config_subparsers.add_parser("delete", help="Delete repository entry from user config")
config_add = config_subparsers.add_parser(
"add", help="Interactively add a new repository entry"
)
config_edit = config_subparsers.add_parser(
"edit", help="Edit configuration file with nano"
)
config_init_parser = config_subparsers.add_parser(
"init",
help=(
"Initialize user configuration by scanning the base directory"
),
)
config_delete = config_subparsers.add_parser(
"delete", help="Delete repository entry from user config"
)
add_identifier_arguments(config_delete)
config_ignore = config_subparsers.add_parser("ignore", help="Set ignore flag for repository entries in user config")
config_ignore = config_subparsers.add_parser(
"ignore",
help="Set ignore flag for repository entries in user config",
)
add_identifier_arguments(config_ignore)
config_ignore.add_argument("--set", choices=["true", "false"], required=True, help="Set ignore to true or false")
path_parser = subparsers.add_parser("path", help="Print the path(s) of repository/repositories")
config_ignore.add_argument(
"--set",
choices=["true", "false"],
required=True,
help="Set ignore to true or false",
)
path_parser = subparsers.add_parser(
"path", help="Print the path(s) of repository/repositories"
)
add_identifier_arguments(path_parser)
explore_parser = subparsers.add_parser("explore", help="Open repository in Nautilus file manager")
explore_parser = subparsers.add_parser(
"explore", help="Open repository in Nautilus file manager"
)
add_identifier_arguments(explore_parser)
terminal_parser = subparsers.add_parser("terminal", help="Open repository in a new GNOME Terminal tab")
terminal_parser = subparsers.add_parser(
"terminal", help="Open repository in a new GNOME Terminal tab"
)
add_identifier_arguments(terminal_parser)
release_parser = subparsers.add_parser(
"release",
help="Create a release for repository/ies by incrementing version and updating the changelog."
help=(
"Create a release for repository/ies by incrementing version "
"and updating the changelog."
),
)
release_parser.add_argument(
"release_type",
choices=["major", "minor", "patch"],
help="Type of version increment for the release (major, minor, patch)."
help="Type of version increment for the release (major, minor, patch).",
)
release_parser.add_argument(
"-m", "--message",
"-m",
"--message",
default="",
help="Optional release message to add to the changelog and tag."
help="Optional release message to add to the changelog and tag.",
)
add_identifier_arguments(release_parser)
code_parser = subparsers.add_parser("code", help="Open repository workspace with VS Code")
add_identifier_arguments(code_parser)
list_parser = subparsers.add_parser("list", help="List all repositories with details and status")
list_parser.add_argument("--search", default="", help="Filter repositories that contain the given string")
list_parser.add_argument("--status", type=str, default="", help="Filter repositories by status (case insensitive)")
# Add the subcommand parser for "shell"
shell_parser = subparsers.add_parser("shell", help="Execute a shell command in each repository")
add_identifier_arguments(shell_parser)
shell_parser.add_argument("-c", "--command", nargs=argparse.REMAINDER, dest="shell_command", help="The shell command (and its arguments) to execute in each repository",default=[])
# Version command: like other repo commands, supports identifiers + --all
version_parser = subparsers.add_parser(
"version",
help=(
"Show version information for repository/ies "
"(git tags, pyproject.toml, flake.nix, PKGBUILD, debian, spec, Ansible Galaxy)."
),
)
add_identifier_arguments(version_parser)
make_parser = subparsers.add_parser("make", help="Executes make commands")
code_parser = subparsers.add_parser(
"code", help="Open repository workspace with VS Code"
)
add_identifier_arguments(code_parser)
list_parser = subparsers.add_parser(
"list", help="List all repositories with details and status"
)
list_parser.add_argument(
"--search",
default="",
help="Filter repositories that contain the given string",
)
list_parser.add_argument(
"--status",
type=str,
default="",
help="Filter repositories by status (case insensitive)",
)
shell_parser = subparsers.add_parser(
"shell", help="Execute a shell command in each repository"
)
add_identifier_arguments(shell_parser)
shell_parser.add_argument(
"-c",
"--command",
nargs=argparse.REMAINDER,
dest="shell_command",
help=(
"The shell command (and its arguments) to execute in each "
"repository"
),
default=[],
)
make_parser = subparsers.add_parser(
"make", help="Executes make commands"
)
add_identifier_arguments(make_parser)
make_subparsers = make_parser.add_subparsers(dest="subcommand", help="Make subcommands", required=True)
make_install = make_subparsers.add_parser("install", help="Executes the make install command")
make_subparsers = make_parser.add_subparsers(
dest="subcommand", help="Make subcommands", required=True
)
make_install = make_subparsers.add_parser(
"install", help="Executes the make install command"
)
add_identifier_arguments(make_install)
make_deinstall = make_subparsers.add_parser("deinstall", help="Executes the make deinstall command")
make_deinstall = make_subparsers.add_parser(
"deinstall", help="Executes the make deinstall command"
)
proxy_command_parsers = {}
for command, subcommands in PROXY_COMMANDS.items():
for subcommand in subcommands:
proxy_command_parsers[f"{command}_{subcommand}"] = subparsers.add_parser(
subcommand,
help=f"Proxies '{command} {subcommand}' to repository/ies",
description=f"Executes '{command} {subcommand}' for the identified repos.\nTo recieve more help execute '{command} {subcommand} --help'",
formatter_class=argparse.RawTextHelpFormatter
proxy_command_parsers[f"{command}_{subcommand}"] = (
subparsers.add_parser(
subcommand,
help=f"Proxies '{command} {subcommand}' to repository/ies",
description=(
f"Executes '{command} {subcommand}' for the "
"identified repos.\nTo recieve more help execute "
f"'{command} {subcommand} --help'"
),
formatter_class=argparse.RawTextHelpFormatter,
)
)
if subcommand in ["pull", "clone"]:
proxy_command_parsers[f"{command}_{subcommand}"].add_argument(
proxy_command_parsers[
f"{command}_{subcommand}"
].add_argument(
"--no-verification",
action="store_true",
default=False,
help="Disable verification via commit/gpg",
)
if subcommand == "clone":
proxy_command_parsers[f"{command}_{subcommand}"].add_argument(
proxy_command_parsers[
f"{command}_{subcommand}"
].add_argument(
"--clone-mode",
choices=["ssh", "https", "shallow"],
default="ssh",
help="Specify the clone mode: ssh, https, or shallow (HTTPS shallow clone; default: ssh)",
help=(
"Specify the clone mode: ssh, https, or shallow "
"(HTTPS shallow clone; default: ssh)"
),
)
add_identifier_arguments(proxy_command_parsers[f"{command}_{subcommand}"])
add_identifier_arguments(
proxy_command_parsers[f"{command}_{subcommand}"]
)
args = parser.parse_args()
# All
if args.command and not args.command in ["config","list","create"]:
selected = get_selected_repos(args.all,ALL_REPOSITORIES,args.identifiers)
# Select repositories for commands that operate on the repository list.
# (config, list, create work differently and don't use selection)
if args.command and args.command not in ["config", "list", "create"]:
selected = get_selected_repos(
args.all, ALL_REPOSITORIES, getattr(args, "identifiers", [])
)
else:
selected = []
# Proxy commands (git, docker, docker compose)
for command, subcommands in PROXY_COMMANDS.items():
for subcommand in subcommands:
if args.command == subcommand:
@@ -293,26 +448,45 @@ For detailed help on each command, use:
ALL_REPOSITORIES,
args.preview,
args.no_verification,
args.clone_mode
)
args.clone_mode,
)
elif args.command == "pull":
from pkgmgr.pull_with_verification import pull_with_verification
from pkgmgr.pull_with_verification import (
pull_with_verification,
)
pull_with_verification(
selected,
REPOSITORIES_BASE_DIR,
ALL_REPOSITORIES,
args.extra_args,
args.no_verification,
args.preview
)
args.preview,
)
else:
exec_proxy_command(command,selected, REPOSITORIES_BASE_DIR, ALL_REPOSITORIES, args.command, args.extra_args, args.preview)
exit(0)
exec_proxy_command(
command,
selected,
REPOSITORIES_BASE_DIR,
ALL_REPOSITORIES,
args.command,
args.extra_args,
args.preview,
)
sys.exit(0)
if args.command in ["make"]:
exec_proxy_command(args.command,selected, REPOSITORIES_BASE_DIR, ALL_REPOSITORIES, args.subcommand, args.extra_args, args.preview)
exit(0)
exec_proxy_command(
args.command,
selected,
REPOSITORIES_BASE_DIR,
ALL_REPOSITORIES,
args.subcommand,
args.extra_args,
args.preview,
)
sys.exit(0)
# Dispatch commands.
if args.command == "install":
install_repos(
@@ -325,24 +499,47 @@ For detailed help on each command, use:
args.quiet,
args.clone_mode,
args.dependencies,
)
)
elif args.command == "create":
from pkgmgr.create_repo import create_repo
# If no identifiers are provided, you can decide to either use the repository of the current folder
# or prompt the user to supply at least one identifier.
if not args.identifiers:
print("No identifiers provided. Please specify at least one identifier in the format provider/account/repository.")
print(
"No identifiers provided. Please specify at least one identifier "
"in the format provider/account/repository."
)
sys.exit(1)
else:
selected = get_selected_repos(True,ALL_REPOSITORIES,None)
selected = get_selected_repos(True, ALL_REPOSITORIES, None)
for identifier in args.identifiers:
create_repo(identifier, CONFIG_MERGED, USER_CONFIG_PATH, BINARIES_DIRECTORY, remote=args.remote, preview=args.preview)
create_repo(
identifier,
CONFIG_MERGED,
USER_CONFIG_PATH,
BINARIES_DIRECTORY,
remote=args.remote,
preview=args.preview,
)
elif args.command == "list":
list_repositories(ALL_REPOSITORIES, REPOSITORIES_BASE_DIR, BINARIES_DIRECTORY, search_filter=args.search, status_filter=args.status)
list_repositories(
ALL_REPOSITORIES,
REPOSITORIES_BASE_DIR,
BINARIES_DIRECTORY,
search_filter=args.search,
status_filter=args.status,
)
elif args.command == "deinstall":
deinstall_repos(selected,REPOSITORIES_BASE_DIR, BINARIES_DIRECTORY, ALL_REPOSITORIES, preview=args.preview)
deinstall_repos(
selected,
REPOSITORIES_BASE_DIR,
BINARIES_DIRECTORY,
ALL_REPOSITORIES,
preview=args.preview,
)
elif args.command == "delete":
delete_repos(selected,REPOSITORIES_BASE_DIR, ALL_REPOSITORIES, preview=args.preview)
delete_repos(
selected, REPOSITORIES_BASE_DIR, ALL_REPOSITORIES, preview=args.preview
)
elif args.command == "update":
update_repos(
selected,
@@ -354,39 +551,137 @@ For detailed help on each command, use:
args.preview,
args.quiet,
args.dependencies,
args.clone_mode
args.clone_mode,
)
elif args.command == "release":
if not selected:
print("No repositories selected for release.")
exit(1)
# Import the release function from pkgmgr/release.py
sys.exit(1)
from pkgmgr import release as rel
# Save the original working directory.
original_dir = os.getcwd()
for repo in selected:
# Determine the repository directory
repo_dir = repo.get("directory")
repo_dir: Optional[str] = repo.get("directory")
if not repo_dir:
from pkgmgr.get_repo_dir import get_repo_dir
repo_dir = get_repo_dir(REPOSITORIES_BASE_DIR, repo)
# Dynamically determine the file paths for pyproject.toml and CHANGELOG.md.
pyproject_path = os.path.join(repo_dir, "pyproject.toml")
changelog_path = os.path.join(repo_dir, "CHANGELOG.md")
print(f"Releasing repository '{repo.get('repository')}' in '{repo_dir}'...")
# Change into the repository directory so Git commands run in the right context.
print(
f"Releasing repository '{repo.get('repository')}' in '{repo_dir}'..."
)
os.chdir(repo_dir)
# Call the release function with the proper parameters.
rel.release(
pyproject_path=pyproject_path,
changelog_path=changelog_path,
release_type=args.release_type,
message=args.message
message=args.message,
)
# Change back to the original working directory.
os.chdir(original_dir)
elif args.command == "version":
from pkgmgr.git_utils import get_tags
from pkgmgr.versioning import SemVer, find_latest_version
from pkgmgr.version_sources import (
read_pyproject_version,
read_flake_version,
read_pkgbuild_version,
read_debian_changelog_version,
read_spec_version,
read_ansible_galaxy_version,
)
from pkgmgr.get_repo_dir import get_repo_dir
repo_list = selected
if not repo_list:
print("No repositories selected for version.")
sys.exit(1)
print("pkgmgr version info")
print("====================")
for repo in repo_list:
# Resolve repository directory
repo_dir = repo.get("directory")
if not repo_dir:
try:
repo_dir = get_repo_dir(REPOSITORIES_BASE_DIR, repo)
except Exception:
repo_dir = None
# If no local clone exists, skip gracefully with info message
if not repo_dir or not os.path.isdir(repo_dir):
identifier = get_repo_identifier(repo, ALL_REPOSITORIES)
print(f"\nRepository: {identifier}")
print("----------------------------------------")
print(
"[INFO] Skipped: repository directory does not exist "
"locally, version detection is not possible."
)
continue
print(f"\nRepository: {repo_dir}")
print("----------------------------------------")
# 1) Git tags (SemVer)
try:
tags = get_tags(cwd=repo_dir)
except Exception as exc:
print(f"[ERROR] Could not read git tags: {exc}")
tags = []
latest_tag_info = find_latest_version(tags) if tags else None
if latest_tag_info is None:
latest_tag_str = None
latest_ver = None
else:
latest_tag_str, latest_ver = latest_tag_info
# 2) Packaging / metadata sources
pyproject_version = read_pyproject_version(repo_dir)
flake_version = read_flake_version(repo_dir)
pkgbuild_version = read_pkgbuild_version(repo_dir)
debian_version = read_debian_changelog_version(repo_dir)
spec_version = read_spec_version(repo_dir)
ansible_version = read_ansible_galaxy_version(repo_dir)
# 3) Print version summary
if latest_ver is not None:
print(
f"Git (latest SemVer tag): {latest_tag_str} (parsed: {latest_ver})"
)
else:
print("Git (latest SemVer tag): <none found>")
print(f"pyproject.toml: {pyproject_version or '<not found>'}")
print(f"flake.nix: {flake_version or '<not found>'}")
print(f"PKGBUILD: {pkgbuild_version or '<not found>'}")
print(f"debian/changelog: {debian_version or '<not found>'}")
print(f"package-manager.spec: {spec_version or '<not found>'}")
print(f"Ansible Galaxy meta: {ansible_version or '<not found>'}")
# 4) Consistency hint (Git tag vs. pyproject)
if latest_ver is not None and pyproject_version is not None:
try:
file_ver = SemVer.parse(pyproject_version)
if file_ver != latest_ver:
print(
f"[WARN] Version mismatch: Git={latest_ver}, pyproject={file_ver}"
)
except ValueError:
print(
f"[WARN] pyproject version {pyproject_version!r} is not valid SemVer."
)
elif args.command == "status":
status_repos(selected,REPOSITORIES_BASE_DIR, ALL_REPOSITORIES, args.extra_args, list_only=args.list, system_status=args.system, preview=args.preview)
status_repos(
selected,
REPOSITORIES_BASE_DIR,
ALL_REPOSITORIES,
args.extra_args,
list_only=args.list,
system_status=args.system,
preview=args.preview,
)
elif args.command == "explore":
for repository in selected:
run_command(f"nautilus {repository['directory']} & disown")
@@ -394,20 +689,24 @@ For detailed help on each command, use:
if not selected:
print("No repositories selected.")
else:
identifiers = [get_repo_identifier(repo, ALL_REPOSITORIES) for repo in selected]
identifiers = [
get_repo_identifier(repo, ALL_REPOSITORIES) for repo in selected
]
sorted_identifiers = sorted(identifiers)
workspace_name = "_".join(sorted_identifiers) + ".code-workspace"
workspaces_dir = os.path.expanduser(CONFIG_MERGED.get("directories").get("workspaces"))
workspaces_dir = os.path.expanduser(
CONFIG_MERGED.get("directories").get("workspaces")
)
os.makedirs(workspaces_dir, exist_ok=True)
workspace_file = os.path.join(workspaces_dir, workspace_name)
folders = []
for repository in selected:
folders.append({"path": repository["directory"]})
workspace_data = {
"folders": folders,
"settings": {}
"settings": {},
}
if not os.path.exists(workspace_file):
with open(workspace_file, "w") as f:
@@ -418,19 +717,24 @@ For detailed help on each command, use:
run_command(f'code "{workspace_file}"')
elif args.command == "terminal":
for repository in selected:
run_command(f'gnome-terminal --tab --working-directory="{repository["directory"]}"')
run_command(
f'gnome-terminal --tab --working-directory="{repository["directory"]}"'
)
elif args.command == "path":
for repository in selected:
print(repository["directory"])
elif args.command == "shell":
if not args.shell_command:
print("No shell command specified.")
exit(2)
# Join the provided shell command parts into one string.
sys.exit(2)
command_to_run = " ".join(args.shell_command)
for repository in selected:
print(f"Executing in '{repository['directory']}': {command_to_run}")
run_command(command_to_run, cwd=repository["directory"], preview=args.preview)
print(
f"Executing in '{repository['directory']}': {command_to_run}"
)
run_command(
command_to_run, cwd=repository["directory"], preview=args.preview
)
elif args.command == "config":
if args.subcommand == "show":
if args.all or (not args.identifiers):
@@ -438,52 +742,82 @@ For detailed help on each command, use:
else:
selected = resolve_repos(args.identifiers, ALL_REPOSITORIES)
if selected:
show_config(selected, USER_CONFIG_PATH, full_config=False)
show_config(
selected, USER_CONFIG_PATH, full_config=False
)
elif args.subcommand == "add":
interactive_add(CONFIG_MERGED,USER_CONFIG_PATH)
interactive_add(CONFIG_MERGED, USER_CONFIG_PATH)
elif args.subcommand == "edit":
"""Open the user configuration file in nano."""
run_command(f"nano {USER_CONFIG_PATH}")
elif args.subcommand == "init":
if os.path.exists(USER_CONFIG_PATH):
with open(USER_CONFIG_PATH, 'r') as f:
with open(USER_CONFIG_PATH, "r") as f:
user_config = yaml.safe_load(f) or {}
else:
user_config = {"repositories": []}
config_init(user_config, CONFIG_MERGED, BINARIES_DIRECTORY, USER_CONFIG_PATH)
config_init(
user_config,
CONFIG_MERGED,
BINARIES_DIRECTORY,
USER_CONFIG_PATH,
)
elif args.subcommand == "delete":
# Load user config from USER_CONFIG_PATH.
if os.path.exists(USER_CONFIG_PATH):
with open(USER_CONFIG_PATH, 'r') as f:
with open(USER_CONFIG_PATH, "r") as f:
user_config = yaml.safe_load(f) or {"repositories": []}
else:
user_config = {"repositories": []}
if args.all or not args.identifiers:
print("You must specify identifiers to delete.")
else:
to_delete = resolve_repos(args.identifiers, user_config.get("repositories", []))
new_repos = [entry for entry in user_config.get("repositories", []) if entry not in to_delete]
to_delete = resolve_repos(
args.identifiers, user_config.get("repositories", [])
)
new_repos = [
entry
for entry in user_config.get("repositories", [])
if entry not in to_delete
]
user_config["repositories"] = new_repos
save_user_config(user_config,USER_CONFIG_PATH)
print(f"Deleted {len(to_delete)} entries from user config.")
save_user_config(user_config, USER_CONFIG_PATH)
print(
f"Deleted {len(to_delete)} entries from user config."
)
elif args.subcommand == "ignore":
# Load user config from USER_CONFIG_PATH.
if os.path.exists(USER_CONFIG_PATH):
with open(USER_CONFIG_PATH, 'r') as f:
with open(USER_CONFIG_PATH, "r") as f:
user_config = yaml.safe_load(f) or {"repositories": []}
else:
user_config = {"repositories": []}
if args.all or not args.identifiers:
print("You must specify identifiers to modify ignore flag.")
print(
"You must specify identifiers to modify ignore flag."
)
else:
to_modify = resolve_repos(args.identifiers, user_config.get("repositories", []))
to_modify = resolve_repos(
args.identifiers, user_config.get("repositories", [])
)
for entry in user_config["repositories"]:
key = (entry.get("provider"), entry.get("account"), entry.get("repository"))
key = (
entry.get("provider"),
entry.get("account"),
entry.get("repository"),
)
for mod in to_modify:
mod_key = (mod.get("provider"), mod.get("account"), mod.get("repository"))
mod_key = (
mod.get("provider"),
mod.get("account"),
mod.get("repository"),
)
if key == mod_key:
entry["ignore"] = (args.set == "true")
print(f"Set ignore for {key} to {entry['ignore']}")
save_user_config(user_config,USER_CONFIG_PATH)
entry["ignore"] = args.set == "true"
print(
f"Set ignore for {key} to {entry['ignore']}"
)
save_user_config(user_config, USER_CONFIG_PATH)
else:
parser.print_help()
if __name__ == "__main__":
main()

235
pkgmgr/version_sources.py Normal file
View File

@@ -0,0 +1,235 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Helpers to extract version information from various packaging files.
All functions take a repository directory and return either a version
string or None if the corresponding file or version field is missing.
Supported sources:
- pyproject.toml (PEP 621, [project].version)
- flake.nix (version = "X.Y.Z";)
- PKGBUILD (pkgver / pkgrel)
- debian/changelog (first entry line: package (version) ...)
- RPM spec file (package-manager.spec: Version / Release)
- Ansible Galaxy (galaxy.yml or meta/main.yml)
"""
from __future__ import annotations
import os
import re
from typing import Optional
import yaml
def read_pyproject_version(repo_dir: str) -> Optional[str]:
"""
Read the version from pyproject.toml in repo_dir, if present.
Expects a PEP 621-style [project] table with a 'version' field.
Returns the version string or None.
"""
path = os.path.join(repo_dir, "pyproject.toml")
if not os.path.exists(path):
return None
try:
try:
import tomllib # Python 3.11+
except ModuleNotFoundError: # pragma: no cover
tomllib = None
if tomllib is None:
return None
with open(path, "rb") as f:
data = tomllib.load(f)
project = data.get("project", {})
if isinstance(project, dict):
version = project.get("version")
if isinstance(version, str):
return version.strip() or None
except Exception:
# Intentionally swallow errors and fall back to None.
return None
return None
def read_flake_version(repo_dir: str) -> Optional[str]:
"""
Read the version from flake.nix in repo_dir, if present.
Looks for a line like:
version = "1.2.3";
and returns the string inside the quotes.
"""
path = os.path.join(repo_dir, "flake.nix")
if not os.path.exists(path):
return None
try:
with open(path, "r", encoding="utf-8") as f:
text = f.read()
except Exception:
return None
match = re.search(r'version\s*=\s*"([^"]+)"', text)
if not match:
return None
version = match.group(1).strip()
return version or None
def read_pkgbuild_version(repo_dir: str) -> Optional[str]:
"""
Read the version from PKGBUILD in repo_dir, if present.
Expects:
pkgver=1.2.3
pkgrel=1
Returns either "1.2.3-1" (if both are present) or just "1.2.3".
"""
path = os.path.join(repo_dir, "PKGBUILD")
if not os.path.exists(path):
return None
try:
with open(path, "r", encoding="utf-8") as f:
text = f.read()
except Exception:
return None
ver_match = re.search(r"^pkgver\s*=\s*(.+)$", text, re.MULTILINE)
if not ver_match:
return None
pkgver = ver_match.group(1).strip()
rel_match = re.search(r"^pkgrel\s*=\s*(.+)$", text, re.MULTILINE)
if rel_match:
pkgrel = rel_match.group(1).strip()
if pkgrel:
return f"{pkgver}-{pkgrel}"
return pkgver or None
def read_debian_changelog_version(repo_dir: str) -> Optional[str]:
"""
Read the latest Debian version from debian/changelog in repo_dir, if present.
The first non-empty line typically looks like:
package-name (1.2.3-1) unstable; urgency=medium
We extract the text inside the first parentheses.
"""
path = os.path.join(repo_dir, "debian", "changelog")
if not os.path.exists(path):
return None
try:
with open(path, "r", encoding="utf-8") as f:
for line in f:
line = line.strip()
if not line:
continue
match = re.search(r"\(([^)]+)\)", line)
if match:
version = match.group(1).strip()
return version or None
break
except Exception:
return None
return None
def read_spec_version(repo_dir: str) -> Optional[str]:
"""
Read the version from a RPM spec file.
For now, we assume a fixed file name 'package-manager.spec'
in repo_dir with lines like:
Version: 1.2.3
Release: 1%{?dist}
Returns either "1.2.3-1" (if Release is present) or "1.2.3".
Any RPM macro suffix like '%{?dist}' is stripped from the release.
"""
path = os.path.join(repo_dir, "package-manager.spec")
if not os.path.exists(path):
return None
try:
with open(path, "r", encoding="utf-8") as f:
text = f.read()
except Exception:
return None
ver_match = re.search(r"^Version:\s*(.+)$", text, re.MULTILINE)
if not ver_match:
return None
version = ver_match.group(1).strip()
rel_match = re.search(r"^Release:\s*(.+)$", text, re.MULTILINE)
if rel_match:
release_raw = rel_match.group(1).strip()
# Strip common RPM macro suffix like %... (e.g. 1%{?dist})
release = release_raw.split("%", 1)[0].strip()
# Also strip anything after first whitespace, just in case
release = release.split(" ", 1)[0].strip()
if release:
return f"{version}-{release}"
return version or None
def read_ansible_galaxy_version(repo_dir: str) -> Optional[str]:
"""
Read the version from Ansible Galaxy metadata, if present.
Supported locations:
- galaxy.yml (preferred for modern roles/collections)
- meta/main.yml (legacy style roles; uses galaxy_info.version or version)
"""
# 1) galaxy.yml in repo root
galaxy_path = os.path.join(repo_dir, "galaxy.yml")
if os.path.exists(galaxy_path):
try:
with open(galaxy_path, "r", encoding="utf-8") as f:
data = yaml.safe_load(f) or {}
version = data.get("version")
if isinstance(version, str) and version.strip():
return version.strip()
except Exception:
# Ignore parse errors and fall through to meta/main.yml
pass
# 2) meta/main.yml (classic Ansible role)
meta_path = os.path.join(repo_dir, "meta", "main.yml")
if os.path.exists(meta_path):
try:
with open(meta_path, "r", encoding="utf-8") as f:
data = yaml.safe_load(f) or {}
# Preferred: galaxy_info.version
galaxy_info = data.get("galaxy_info") or {}
if isinstance(galaxy_info, dict):
version = galaxy_info.get("version")
if isinstance(version, str) and version.strip():
return version.strip()
# Fallback: top-level 'version'
version = data.get("version")
if isinstance(version, str) and version.strip():
return version.strip()
except Exception:
return None
return None

View File

@@ -0,0 +1,187 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
End-to-end tests for the `pkgmgr version` command.
We verify three usage patterns:
1) pkgmgr version
- Run from inside the package-manager repository
so that "current repository" resolution works.
2) pkgmgr version pkgmgr
- Run from inside the package-manager repository
with an explicit identifier.
3) pkgmgr version --all
- Run from the project root (or wherever the tests are started),
ensuring that the --all flag does not depend on the current
working directory.
"""
from __future__ import annotations
import os
import runpy
import sys
import unittest
from typing import List
from pkgmgr.load_config import load_config
# Resolve project root (the repo where main.py lives, e.g. /src)
PROJECT_ROOT = os.path.abspath(
os.path.join(os.path.dirname(__file__), "..", "..")
)
CONFIG_PATH = os.path.join(PROJECT_ROOT, "config", "config.yaml")
def _load_pkgmgr_repo_dir() -> str:
"""
Load the merged configuration (defaults + user config) and determine
the directory of the package-manager repository managed by pkgmgr.
We keep this lookup deliberately flexible to avoid depending on
specific provider/account values. We match either
repository == "package-manager" OR
alias == "pkgmgr"
and then derive the repository directory from either an explicit
'directory' field or from the base repositories directory plus
provider/account/repository.
"""
cfg = load_config(CONFIG_PATH) or {}
directories = cfg.get("directories", {})
base_repos_dir = os.path.expanduser(directories.get("repositories", ""))
candidates: List[dict] = cfg.get("repositories", []) or []
for repo in candidates:
repo_name = (repo.get("repository") or "").strip()
alias = (repo.get("alias") or "").strip()
if repo_name == "package-manager" or alias == "pkgmgr":
# Prefer an explicit directory if present.
repo_dir = repo.get("directory")
if not repo_dir:
provider = (repo.get("provider") or "").strip()
account = (repo.get("account") or "").strip()
# Best-effort reconstruction of the directory path.
if provider and account and repo_name:
repo_dir = os.path.join(
base_repos_dir, provider, account, repo_name
)
elif repo_name:
# Fallback: place directly under the base repo dir
repo_dir = os.path.join(base_repos_dir, repo_name)
else:
# If we still have nothing usable, skip this entry.
continue
return os.path.expanduser(repo_dir)
raise RuntimeError(
"Could not locate a 'package-manager' repository entry in the merged "
"configuration (no entry with repository='package-manager' or "
"alias='pkgmgr' found)."
)
class TestIntegrationVersionCommands(unittest.TestCase):
"""
E2E tests for the pkgmgr 'version' command.
Important:
- We treat any non-zero SystemExit as a test failure and print
helpful diagnostics (command, working directory, exit code).
"""
@classmethod
def setUpClass(cls) -> None:
# Determine the package-manager repo directory from the merged config
cls.pkgmgr_repo_dir = _load_pkgmgr_repo_dir()
# ------------------------------------------------------------------
# Helper
# ------------------------------------------------------------------
def _run_pkgmgr_version(self, extra_args, cwd: str | None = None) -> None:
"""
Run `pkgmgr version` with optional extra arguments and
an optional working directory override.
Any non-zero exit code is turned into an AssertionError
with additional diagnostics.
"""
if extra_args is None:
extra_args = []
cmd_repr = "pkgmgr version " + " ".join(extra_args)
original_argv = list(sys.argv)
original_cwd = os.getcwd()
try:
if cwd is not None:
os.chdir(cwd)
sys.argv = ["pkgmgr", "version"] + extra_args
try:
runpy.run_module("main", run_name="__main__")
except SystemExit as exc:
code = exc.code if isinstance(exc.code, int) else str(exc.code)
if code != 0:
print("[TEST] SystemExit caught while running pkgmgr version")
print(f"[TEST] Command : {cmd_repr}")
print(f"[TEST] Working directory: {os.getcwd()}")
print(f"[TEST] Exit code: {code}")
raise AssertionError(
f"{cmd_repr!r} failed with exit code {code}. "
"Scroll up to inspect the output printed before failure."
) from exc
# exit code 0 is considered success
finally:
# Restore environment
os.chdir(original_cwd)
sys.argv = original_argv
# ------------------------------------------------------------------
# Tests
# ------------------------------------------------------------------
def test_version_current_repo(self) -> None:
"""
Run: pkgmgr version
We run this from inside the package-manager repository so that
"current repository" resolution works and no identifier lookup
for 'src' (or similar) is performed.
"""
self._run_pkgmgr_version(extra_args=[], cwd=self.pkgmgr_repo_dir)
def test_version_specific_identifier(self) -> None:
"""
Run: pkgmgr version pkgmgr
Also executed from inside the package-manager repository, but
with an explicit identifier.
"""
self._run_pkgmgr_version(extra_args=["pkgmgr"], cwd=self.pkgmgr_repo_dir)
def test_version_all_repositories(self) -> None:
"""
Run: pkgmgr version --all
This does not depend on the current working directory, but we
run it from PROJECT_ROOT for clarity and to mirror typical usage
in CI.
"""
self._run_pkgmgr_version(extra_args=["--all"], cwd=PROJECT_ROOT)
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,241 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Unit tests for the pkgmgr CLI (version command).
These tests focus on the 'version' subcommand and its interaction with:
- git tags (SemVer),
- pyproject.toml version,
- and the mismatch warning logic.
Important:
- Uses only the Python standard library unittest framework.
- Does not use pytest.
- Does not rely on a real git repository or real config files.
"""
from __future__ import annotations
import io
import os
import sys
import tempfile
import textwrap
import unittest
from contextlib import redirect_stdout
from typing import Any, Dict, List
from unittest import mock
from pkgmgr import cli
def _fake_config() -> Dict[str, Any]:
"""
Provide a minimal configuration dict sufficient for cli.main()
to start without touching real config files.
"""
return {
"directories": {
"repositories": "/tmp/pkgmgr-repos",
"binaries": "/tmp/pkgmgr-bin",
"workspaces": "/tmp/pkgmgr-workspaces",
},
# The actual list of repositories is not used directly by the tests,
# because we mock get_selected_repos(). It must exist, though.
"repositories": [],
}
class TestCliVersion(unittest.TestCase):
"""
Tests for the 'pkgmgr version' command.
Each test:
- Runs in a temporary working directory.
- Uses a fake configuration via load_config().
- Uses a mocked get_selected_repos() that returns a single repo
pointing to the temporary directory.
"""
def setUp(self) -> None:
# Create a temporary directory and switch into it
self._tmp_dir = tempfile.TemporaryDirectory()
self._old_cwd = os.getcwd()
os.chdir(self._tmp_dir.name)
# Patch load_config so cli.main() does not read real config files
self._patch_load_config = mock.patch(
"pkgmgr.cli.load_config", return_value=_fake_config()
)
self.mock_load_config = self._patch_load_config.start()
# Patch get_selected_repos so that 'version' operates on our temp dir
def _fake_selected_repos(all_flag: bool, repos: List[dict], identifiers: List[str]):
# We always return exactly one "repository" whose directory is the temp dir.
return [
{
"provider": "github.com",
"account": "test",
"repository": "pkgmgr-test",
"directory": self._tmp_dir.name,
}
]
self._patch_get_selected_repos = mock.patch(
"pkgmgr.cli.get_selected_repos", side_effect=_fake_selected_repos
)
self.mock_get_selected_repos = self._patch_get_selected_repos.start()
# Keep a reference to the original sys.argv, so we can restore it
self._old_argv = list(sys.argv)
def tearDown(self) -> None:
# Restore sys.argv
sys.argv = self._old_argv
# Stop all patches
self._patch_get_selected_repos.stop()
self._patch_load_config.stop()
# Restore working directory
os.chdir(self._old_cwd)
# Cleanup temp directory
self._tmp_dir.cleanup()
# ------------------------------------------------------------
# Helpers
# ------------------------------------------------------------
def _write_pyproject(self, version: str) -> str:
"""
Write a minimal PEP 621-style pyproject.toml into the temp directory.
"""
content = textwrap.dedent(
f"""
[project]
name = "pkgmgr-test"
version = "{version}"
"""
).strip() + "\n"
path = os.path.join(self._tmp_dir.name, "pyproject.toml")
with open(path, "w", encoding="utf-8") as f:
f.write(content)
return path
def _run_cli_version_and_capture(self, extra_args: List[str] | None = None) -> str:
"""
Run 'pkgmgr version [extra_args]' via cli.main() and return captured stdout.
"""
if extra_args is None:
extra_args = []
sys.argv = ["pkgmgr", "version"] + list(extra_args)
buf = io.StringIO()
with redirect_stdout(buf):
try:
cli.main()
except SystemExit as exc:
# Re-raise as AssertionError to make failures easier to read
raise AssertionError(
f"'pkgmgr version' exited with code {exc.code}"
) from exc
return buf.getvalue()
# ------------------------------------------------------------
# Tests
# ------------------------------------------------------------
def test_version_matches_tag(self) -> None:
"""
If the latest SemVer tag matches the pyproject.toml version,
the CLI should:
- show both values
- NOT emit a mismatch warning.
"""
# Arrange: pyproject.toml with version 1.2.3
self._write_pyproject("1.2.3")
# Arrange: mock git tags
with mock.patch(
"pkgmgr.git_utils.get_tags",
return_value=["v1.2.0", "v1.2.3", "v1.0.0"],
):
# Act
stdout = self._run_cli_version_and_capture()
# Basic header
self.assertIn("pkgmgr version info", stdout)
self.assertIn("Repository:", stdout)
# Git SemVer tag line
self.assertIn("Git (latest SemVer tag):", stdout)
self.assertIn("v1.2.3", stdout)
self.assertIn("(parsed: 1.2.3)", stdout)
# pyproject line
self.assertIn("pyproject.toml:", stdout)
self.assertIn("1.2.3", stdout)
# No warning expected if versions are equal
self.assertNotIn("[WARN]", stdout)
def test_version_mismatch_warns(self) -> None:
"""
If the latest SemVer tag differs from the pyproject.toml version,
the CLI should emit a mismatch warning.
"""
# Arrange: pyproject.toml says 1.2.4
self._write_pyproject("1.2.4")
# Arrange: mock git tags (latest is 1.2.3)
with mock.patch(
"pkgmgr.git_utils.get_tags",
return_value=["v1.2.3"],
):
stdout = self._run_cli_version_and_capture()
# Git line
self.assertIn("Git (latest SemVer tag):", stdout)
self.assertIn("v1.2.3", stdout)
# pyproject line
self.assertIn("pyproject.toml:", stdout)
self.assertIn("1.2.4", stdout)
# Mismatch warning must be printed
self.assertIn("[WARN]", stdout)
self.assertIn("Version mismatch", stdout)
def test_version_no_tags(self) -> None:
"""
If no tags exist at all, the CLI should handle this gracefully,
show "<none found>" for tags and still display the pyproject version.
No mismatch warning should be emitted because there is no tag.
"""
# Arrange: pyproject.toml exists
self._write_pyproject("0.0.1")
# Arrange: no tags returned
with mock.patch(
"pkgmgr.git_utils.get_tags",
return_value=[],
):
stdout = self._run_cli_version_and_capture()
# Indicates that no SemVer tag was found
self.assertIn("Git (latest SemVer tag): <none found>", stdout)
# pyproject version is still shown
self.assertIn("pyproject.toml:", stdout)
self.assertIn("0.0.1", stdout)
# No mismatch warning expected
self.assertNotIn("Version mismatch", stdout)
self.assertNotIn("[WARN]", stdout)
if __name__ == "__main__":
unittest.main()