Package pkgmgr with multi-format manifests
- Add pyproject.toml and setuptools config for pkgmgr packaging - Add Nix flake (devShell + pkgmgr package output) - Add Arch PKGBUILD for system packaging - Introduce pkgmgr.yml manifest for repo-level dependencies - Refactor CLI into pkgmgr/cli.py and make main.py a thin entrypoint - Extend install_repos to handle pkgmgr.yml, PKGBUILD, flake.nix, Ansible and Python manifests - Enhance status/update to show Nix/yay system status and upgrades - Improve .gitignore and document requirements.yml Created with AI (ChatGPT) – see conversation: https://chatgpt.com/share/6932f2ca-f560-800f-8bb0-52cb82f27e88
This commit is contained in:
27
.gitignore
vendored
27
.gitignore
vendored
@@ -1,2 +1,27 @@
|
|||||||
|
|
||||||
|
# Prevents unwanted files from being committed to version control.
|
||||||
|
|
||||||
|
# Custom Config file
|
||||||
config/config.yaml
|
config/config.yaml
|
||||||
*__pycache__*
|
|
||||||
|
# Python bytecode
|
||||||
|
__pycache__/
|
||||||
|
*.pyc
|
||||||
|
|
||||||
|
# Virtual environments
|
||||||
|
.venv/
|
||||||
|
venv/
|
||||||
|
.venvs/
|
||||||
|
|
||||||
|
# Build artifacts
|
||||||
|
dist/
|
||||||
|
build/
|
||||||
|
*.egg-info/
|
||||||
|
|
||||||
|
# Editor files
|
||||||
|
.vscode/
|
||||||
|
.idea/
|
||||||
|
|
||||||
|
# OS noise
|
||||||
|
.DS_Store
|
||||||
|
Thumbs.db
|
||||||
|
|||||||
39
PKGBUILD
Normal file
39
PKGBUILD
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
# Maintainer: Kevin Veen-Birkenbach <info@veen.world>
|
||||||
|
|
||||||
|
pkgname=package-manager
|
||||||
|
pkgver=0.1.0
|
||||||
|
pkgrel=1
|
||||||
|
pkgdesc="A configurable Python tool to manage multiple repositories via Bash and automate common Git operations."
|
||||||
|
arch=('any')
|
||||||
|
url="https://github.com/kevinveenbirkenbach/package-manager"
|
||||||
|
license=('MIT')
|
||||||
|
|
||||||
|
depends=(
|
||||||
|
'python'
|
||||||
|
'python-yaml'
|
||||||
|
'git'
|
||||||
|
'bash'
|
||||||
|
)
|
||||||
|
|
||||||
|
makedepends=(
|
||||||
|
'python-build'
|
||||||
|
'python-installer'
|
||||||
|
'python-wheel'
|
||||||
|
'python-setuptools'
|
||||||
|
)
|
||||||
|
|
||||||
|
source=("$pkgname-$pkgver.tar.gz::$url/archive/refs/tags/v$pkgver.tar.gz")
|
||||||
|
sha256sums=('SKIP')
|
||||||
|
|
||||||
|
build() {
|
||||||
|
cd "$srcdir/$pkgname-$pkgver"
|
||||||
|
python -m build --wheel --no-isolation
|
||||||
|
}
|
||||||
|
|
||||||
|
package() {
|
||||||
|
cd "$srcdir/$pkgname-$pkgver"
|
||||||
|
python -m installer --destdir="$pkgdir" dist/*.whl
|
||||||
|
|
||||||
|
# Optional: add pkgmgr executable symlink
|
||||||
|
install -Dm755 main.py "$pkgdir/usr/bin/pkgmgr"
|
||||||
|
}
|
||||||
0
__init__.py
Normal file
0
__init__.py
Normal file
40
flake.nix
Normal file
40
flake.nix
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
# flake.nix
|
||||||
|
# This file defines a Nix flake providing a reproducible development environment
|
||||||
|
# and optional installation package for the package-manager tool.
|
||||||
|
|
||||||
|
{
|
||||||
|
description = "Nix flake for Kevin's package-manager tool";
|
||||||
|
|
||||||
|
inputs = {
|
||||||
|
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
|
||||||
|
};
|
||||||
|
|
||||||
|
outputs = { self, nixpkgs }:
|
||||||
|
let
|
||||||
|
pkgs = nixpkgs.legacyPackages.x86_64-linux;
|
||||||
|
in {
|
||||||
|
|
||||||
|
# Development environment used via: nix develop
|
||||||
|
devShells.default = pkgs.mkShell {
|
||||||
|
# System packages for development
|
||||||
|
buildInputs = [
|
||||||
|
pkgs.python311
|
||||||
|
pkgs.python311Packages.pyyaml
|
||||||
|
pkgs.git
|
||||||
|
];
|
||||||
|
|
||||||
|
# Message shown on environment entry
|
||||||
|
shellHook = ''
|
||||||
|
echo "Entered pkgmgr development environment";
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
|
||||||
|
# Optional installable package for "nix profile install"
|
||||||
|
packages.pkgmgr = pkgs.python311Packages.buildPythonApplication {
|
||||||
|
pname = "package-manager";
|
||||||
|
version = "0.1.0";
|
||||||
|
src = ./.;
|
||||||
|
propagatedBuildInputs = [ pkgs.python311Packages.pyyaml ];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
||||||
458
main.py
458
main.py
@@ -1,460 +1,6 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
import os
|
from pkgmgr.cli import main
|
||||||
import yaml
|
|
||||||
import argparse
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
# Define configuration file paths.
|
|
||||||
USER_CONFIG_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "config", "config.yaml")
|
|
||||||
|
|
||||||
from pkgmgr.clone_repos import clone_repos
|
|
||||||
from pkgmgr.config_init import config_init
|
|
||||||
from pkgmgr.create_ink import create_ink
|
|
||||||
from pkgmgr.deinstall_repos import deinstall_repos
|
|
||||||
from pkgmgr.delete_repos import delete_repos
|
|
||||||
from pkgmgr.exec_proxy_command import exec_proxy_command
|
|
||||||
from pkgmgr.filter_ignored import filter_ignored
|
|
||||||
from pkgmgr.get_repo_identifier import get_repo_identifier
|
|
||||||
from pkgmgr.get_selected_repos import get_selected_repos
|
|
||||||
from pkgmgr.install_repos import install_repos
|
|
||||||
from pkgmgr.interactive_add import interactive_add
|
|
||||||
from pkgmgr.list_repositories import list_repositories
|
|
||||||
from pkgmgr.load_config import load_config
|
|
||||||
from pkgmgr.resolve_repos import resolve_repos
|
|
||||||
from pkgmgr.run_command import run_command
|
|
||||||
from pkgmgr.save_user_config import save_user_config
|
|
||||||
from pkgmgr.show_config import show_config
|
|
||||||
from pkgmgr.status_repos import status_repos
|
|
||||||
from pkgmgr.update_repos import update_repos
|
|
||||||
|
|
||||||
# Commands proxied by package-manager
|
|
||||||
PROXY_COMMANDS = {
|
|
||||||
"git":[
|
|
||||||
"pull",
|
|
||||||
"push",
|
|
||||||
"diff",
|
|
||||||
"add",
|
|
||||||
"show",
|
|
||||||
"checkout",
|
|
||||||
"clone",
|
|
||||||
"reset",
|
|
||||||
"revert",
|
|
||||||
"rebase",
|
|
||||||
"commit"
|
|
||||||
],
|
|
||||||
"docker":[
|
|
||||||
"start",
|
|
||||||
"stop",
|
|
||||||
"build"
|
|
||||||
],
|
|
||||||
"docker compose":[
|
|
||||||
"up",
|
|
||||||
"down",
|
|
||||||
"exec",
|
|
||||||
"ps",
|
|
||||||
"restart",
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
class SortedSubParsersAction(argparse._SubParsersAction):
|
|
||||||
def add_parser(self, name, **kwargs):
|
|
||||||
parser = super().add_parser(name, **kwargs)
|
|
||||||
# Sort the list of subparsers each time one is added
|
|
||||||
self._choices_actions.sort(key=lambda a: a.dest)
|
|
||||||
return parser
|
|
||||||
|
|
||||||
# Main program.
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
CONFIG_MERGED = load_config(USER_CONFIG_PATH)
|
main()
|
||||||
REPOSITORIES_BASE_DIR = os.path.expanduser(CONFIG_MERGED["directories"]["repositories"])
|
|
||||||
ALL_REPOSITORIES = CONFIG_MERGED["repositories"]
|
|
||||||
BINARIES_DIRECTORY = os.path.expanduser(CONFIG_MERGED["directories"]["binaries"])
|
|
||||||
description_text = """\
|
|
||||||
\033[1;32mPackage Manager 🤖📦\033[0m
|
|
||||||
\033[3mKevin's Package Manager ist drafted by and designed for:
|
|
||||||
\033[1;34mKevin Veen-Birkenbach
|
|
||||||
\033[0m\033[4mhttps://www.veen.world/\033[0m
|
|
||||||
|
|
||||||
\033[1mOverview:\033[0m
|
|
||||||
A configurable Python tool to manage multiple repositories via a unified command-line interface.
|
|
||||||
This tool automates common Git operations (clone, pull, push, status, etc.) and creates executable wrappers and custom aliases to simplify your workflow.
|
|
||||||
|
|
||||||
\033[1mFeatures:\033[0m
|
|
||||||
• \033[1;33mAuto-install & Setup:\033[0m Automatically detect and set up repositories.
|
|
||||||
• \033[1;33mGit Command Integration:\033[0m Execute Git commands with extra parameters.
|
|
||||||
• \033[1;33mExplorer & Terminal Support:\033[0m Open repositories in your file manager or a new terminal tab.
|
|
||||||
• \033[1;33mComprehensive Configuration:\033[0m Manage settings via YAML files (default & user-specific).
|
|
||||||
|
|
||||||
For detailed help on each command, use:
|
|
||||||
\033[1m pkgmgr <command> --help\033[0m
|
|
||||||
"""
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description=description_text,formatter_class=argparse.RawTextHelpFormatter)
|
|
||||||
subparsers = parser.add_subparsers(dest="command", help="Subcommands", action=SortedSubParsersAction)
|
|
||||||
def add_identifier_arguments(subparser):
|
|
||||||
subparser.add_argument(
|
|
||||||
"identifiers",
|
|
||||||
nargs="*",
|
|
||||||
help="Identifier(s) for repositories. Default: Repository of current folder.",
|
|
||||||
)
|
|
||||||
subparser.add_argument(
|
|
||||||
"--all",
|
|
||||||
action="store_true",
|
|
||||||
default=False,
|
|
||||||
help="Apply the subcommand to all repositories in the config. Some subcommands ask for confirmation. If you want to give this confirmation for all repositories, pipe 'yes'. E.g: yes | pkgmgr {subcommand} --all"
|
|
||||||
)
|
|
||||||
subparser.add_argument("--preview", action="store_true", help="Preview changes without executing commands")
|
|
||||||
subparser.add_argument("--list", action="store_true", help="List affected repositories (with preview or status)")
|
|
||||||
subparser.add_argument("-a", "--args", nargs=argparse.REMAINDER, dest="extra_args", help="Additional parameters to be attached.",default=[])
|
|
||||||
|
|
||||||
def add_install_update_arguments(subparser):
|
|
||||||
add_identifier_arguments(subparser)
|
|
||||||
subparser.add_argument(
|
|
||||||
"-q",
|
|
||||||
"--quiet",
|
|
||||||
action="store_true",
|
|
||||||
help="Suppress warnings and info messages",
|
|
||||||
)
|
|
||||||
subparser.add_argument(
|
|
||||||
"--no-verification",
|
|
||||||
action="store_true",
|
|
||||||
default=False,
|
|
||||||
help="Disable verification via commit/gpg",
|
|
||||||
)
|
|
||||||
subparser.add_argument(
|
|
||||||
"--dependencies",
|
|
||||||
action="store_true",
|
|
||||||
help="Also pull and update dependencies",
|
|
||||||
)
|
|
||||||
subparser.add_argument(
|
|
||||||
"--clone-mode",
|
|
||||||
choices=["ssh", "https", "shallow"],
|
|
||||||
default="ssh",
|
|
||||||
help="Specify the clone mode: ssh, https, or shallow (HTTPS shallow clone; default: ssh)",
|
|
||||||
)
|
|
||||||
|
|
||||||
install_parser = subparsers.add_parser("install", help="Setup repository/repositories alias links to executables")
|
|
||||||
add_install_update_arguments(install_parser)
|
|
||||||
|
|
||||||
update_parser = subparsers.add_parser("update", help="Update (pull + install) repository/repositories")
|
|
||||||
add_install_update_arguments(update_parser)
|
|
||||||
update_parser.add_argument("--system", action="store_true", help="Include system update commands")
|
|
||||||
|
|
||||||
|
|
||||||
deinstall_parser = subparsers.add_parser("deinstall", help="Remove alias links to repository/repositories")
|
|
||||||
add_identifier_arguments(deinstall_parser)
|
|
||||||
|
|
||||||
delete_parser = subparsers.add_parser("delete", help="Delete repository/repositories alias links to executables")
|
|
||||||
add_identifier_arguments(delete_parser)
|
|
||||||
|
|
||||||
# Add the 'create' subcommand (with existing identifier arguments)
|
|
||||||
create_parser = subparsers.add_parser(
|
|
||||||
"create",
|
|
||||||
help="Create new repository entries: add them to the config if not already present, initialize the local repository, and push remotely if --remote is set."
|
|
||||||
)
|
|
||||||
# Reuse the common identifier arguments
|
|
||||||
add_identifier_arguments(create_parser)
|
|
||||||
create_parser.add_argument(
|
|
||||||
"--remote",
|
|
||||||
action="store_true",
|
|
||||||
help="If set, add the remote and push the initial commit."
|
|
||||||
)
|
|
||||||
|
|
||||||
status_parser = subparsers.add_parser("status", help="Show status for repository/repositories or system")
|
|
||||||
add_identifier_arguments(status_parser)
|
|
||||||
status_parser.add_argument("--system", action="store_true", help="Show system status")
|
|
||||||
|
|
||||||
config_parser = subparsers.add_parser("config", help="Manage configuration")
|
|
||||||
config_subparsers = config_parser.add_subparsers(dest="subcommand", help="Config subcommands", required=True)
|
|
||||||
config_show = config_subparsers.add_parser("show", help="Show configuration")
|
|
||||||
add_identifier_arguments(config_show)
|
|
||||||
config_add = config_subparsers.add_parser("add", help="Interactively add a new repository entry")
|
|
||||||
config_edit = config_subparsers.add_parser("edit", help="Edit configuration file with nano")
|
|
||||||
config_init_parser = config_subparsers.add_parser("init", help="Initialize user configuration by scanning the base directory")
|
|
||||||
config_delete = config_subparsers.add_parser("delete", help="Delete repository entry from user config")
|
|
||||||
add_identifier_arguments(config_delete)
|
|
||||||
config_ignore = config_subparsers.add_parser("ignore", help="Set ignore flag for repository entries in user config")
|
|
||||||
add_identifier_arguments(config_ignore)
|
|
||||||
config_ignore.add_argument("--set", choices=["true", "false"], required=True, help="Set ignore to true or false")
|
|
||||||
path_parser = subparsers.add_parser("path", help="Print the path(s) of repository/repositories")
|
|
||||||
add_identifier_arguments(path_parser)
|
|
||||||
explore_parser = subparsers.add_parser("explore", help="Open repository in Nautilus file manager")
|
|
||||||
add_identifier_arguments(explore_parser)
|
|
||||||
|
|
||||||
terminal_parser = subparsers.add_parser("terminal", help="Open repository in a new GNOME Terminal tab")
|
|
||||||
add_identifier_arguments(terminal_parser)
|
|
||||||
|
|
||||||
release_parser = subparsers.add_parser(
|
|
||||||
"release",
|
|
||||||
help="Create a release for repository/ies by incrementing version and updating the changelog."
|
|
||||||
)
|
|
||||||
release_parser.add_argument(
|
|
||||||
"release_type",
|
|
||||||
choices=["major", "minor", "patch"],
|
|
||||||
help="Type of version increment for the release (major, minor, patch)."
|
|
||||||
)
|
|
||||||
release_parser.add_argument(
|
|
||||||
"-m", "--message",
|
|
||||||
default="",
|
|
||||||
help="Optional release message to add to the changelog and tag."
|
|
||||||
)
|
|
||||||
add_identifier_arguments(release_parser)
|
|
||||||
|
|
||||||
|
|
||||||
code_parser = subparsers.add_parser("code", help="Open repository workspace with VS Code")
|
|
||||||
add_identifier_arguments(code_parser)
|
|
||||||
|
|
||||||
list_parser = subparsers.add_parser("list", help="List all repositories with details and status")
|
|
||||||
list_parser.add_argument("--search", default="", help="Filter repositories that contain the given string")
|
|
||||||
list_parser.add_argument("--status", type=str, default="", help="Filter repositories by status (case insensitive)")
|
|
||||||
|
|
||||||
# Add the subcommand parser for "shell"
|
|
||||||
shell_parser = subparsers.add_parser("shell", help="Execute a shell command in each repository")
|
|
||||||
add_identifier_arguments(shell_parser)
|
|
||||||
shell_parser.add_argument("-c", "--command", nargs=argparse.REMAINDER, dest="shell_command", help="The shell command (and its arguments) to execute in each repository",default=[])
|
|
||||||
|
|
||||||
make_parser = subparsers.add_parser("make", help="Executes make commands")
|
|
||||||
add_identifier_arguments(make_parser)
|
|
||||||
make_subparsers = make_parser.add_subparsers(dest="subcommand", help="Make subcommands", required=True)
|
|
||||||
make_install = make_subparsers.add_parser("install", help="Executes the make install command")
|
|
||||||
add_identifier_arguments(make_install)
|
|
||||||
make_deinstall = make_subparsers.add_parser("deinstall", help="Executes the make deinstall command")
|
|
||||||
|
|
||||||
proxy_command_parsers = {}
|
|
||||||
for command, subcommands in PROXY_COMMANDS.items():
|
|
||||||
for subcommand in subcommands:
|
|
||||||
proxy_command_parsers[f"{command}_{subcommand}"] = subparsers.add_parser(
|
|
||||||
subcommand,
|
|
||||||
help=f"Proxies '{command} {subcommand}' to repository/ies",
|
|
||||||
description=f"Executes '{command} {subcommand}' for the identified repos.\nTo recieve more help execute '{command} {subcommand} --help'",
|
|
||||||
formatter_class=argparse.RawTextHelpFormatter
|
|
||||||
)
|
|
||||||
if subcommand in ["pull", "clone"]:
|
|
||||||
proxy_command_parsers[f"{command}_{subcommand}"].add_argument(
|
|
||||||
"--no-verification",
|
|
||||||
action="store_true",
|
|
||||||
default=False,
|
|
||||||
help="Disable verification via commit/gpg",
|
|
||||||
)
|
|
||||||
if subcommand == "clone":
|
|
||||||
proxy_command_parsers[f"{command}_{subcommand}"].add_argument(
|
|
||||||
"--clone-mode",
|
|
||||||
choices=["ssh", "https", "shallow"],
|
|
||||||
default="ssh",
|
|
||||||
help="Specify the clone mode: ssh, https, or shallow (HTTPS shallow clone; default: ssh)",
|
|
||||||
)
|
|
||||||
add_identifier_arguments(proxy_command_parsers[f"{command}_{subcommand}"])
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
# All
|
|
||||||
if args.command and not args.command in ["config","list","create"]:
|
|
||||||
selected = get_selected_repos(args.all,ALL_REPOSITORIES,args.identifiers)
|
|
||||||
|
|
||||||
for command, subcommands in PROXY_COMMANDS.items():
|
|
||||||
for subcommand in subcommands:
|
|
||||||
if args.command == subcommand:
|
|
||||||
if args.command == "clone":
|
|
||||||
clone_repos(
|
|
||||||
selected,
|
|
||||||
REPOSITORIES_BASE_DIR,
|
|
||||||
ALL_REPOSITORIES,
|
|
||||||
args.preview,
|
|
||||||
args.no_verification,
|
|
||||||
args.clone_mode
|
|
||||||
)
|
|
||||||
elif args.command == "pull":
|
|
||||||
from pkgmgr.pull_with_verification import pull_with_verification
|
|
||||||
pull_with_verification(
|
|
||||||
selected,
|
|
||||||
REPOSITORIES_BASE_DIR,
|
|
||||||
ALL_REPOSITORIES,
|
|
||||||
args.extra_args,
|
|
||||||
args.no_verification,
|
|
||||||
args.preview
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
exec_proxy_command(command,selected, REPOSITORIES_BASE_DIR, ALL_REPOSITORIES, args.command, args.extra_args, args.preview)
|
|
||||||
exit(0)
|
|
||||||
|
|
||||||
if args.command in ["make"]:
|
|
||||||
exec_proxy_command(args.command,selected, REPOSITORIES_BASE_DIR, ALL_REPOSITORIES, args.subcommand, args.extra_args, args.preview)
|
|
||||||
exit(0)
|
|
||||||
|
|
||||||
# Dispatch commands.
|
|
||||||
if args.command == "install":
|
|
||||||
install_repos(
|
|
||||||
selected,
|
|
||||||
REPOSITORIES_BASE_DIR,
|
|
||||||
BINARIES_DIRECTORY,
|
|
||||||
ALL_REPOSITORIES,
|
|
||||||
args.no_verification,
|
|
||||||
args.preview,
|
|
||||||
args.quiet,
|
|
||||||
args.clone_mode,
|
|
||||||
args.dependencies,
|
|
||||||
)
|
|
||||||
elif args.command == "create":
|
|
||||||
from pkgmgr.create_repo import create_repo
|
|
||||||
# If no identifiers are provided, you can decide to either use the repository of the current folder
|
|
||||||
# or prompt the user to supply at least one identifier.
|
|
||||||
if not args.identifiers:
|
|
||||||
print("No identifiers provided. Please specify at least one identifier in the format provider/account/repository.")
|
|
||||||
sys.exit(1)
|
|
||||||
else:
|
|
||||||
selected = get_selected_repos(True,ALL_REPOSITORIES,None)
|
|
||||||
for identifier in args.identifiers:
|
|
||||||
create_repo(identifier, CONFIG_MERGED, USER_CONFIG_PATH, BINARIES_DIRECTORY, remote=args.remote, preview=args.preview)
|
|
||||||
elif args.command == "list":
|
|
||||||
list_repositories(ALL_REPOSITORIES, REPOSITORIES_BASE_DIR, BINARIES_DIRECTORY, search_filter=args.search, status_filter=args.status)
|
|
||||||
elif args.command == "deinstall":
|
|
||||||
deinstall_repos(selected,REPOSITORIES_BASE_DIR, BINARIES_DIRECTORY, ALL_REPOSITORIES, preview=args.preview)
|
|
||||||
elif args.command == "delete":
|
|
||||||
delete_repos(selected,REPOSITORIES_BASE_DIR, ALL_REPOSITORIES, preview=args.preview)
|
|
||||||
elif args.command == "update":
|
|
||||||
update_repos(
|
|
||||||
selected,
|
|
||||||
REPOSITORIES_BASE_DIR,
|
|
||||||
BINARIES_DIRECTORY,
|
|
||||||
ALL_REPOSITORIES,
|
|
||||||
args.no_verification,
|
|
||||||
args.system,
|
|
||||||
args.preview,
|
|
||||||
args.quiet,
|
|
||||||
args.dependencies,
|
|
||||||
args.clone_mode
|
|
||||||
)
|
|
||||||
elif args.command == "release":
|
|
||||||
if not selected:
|
|
||||||
print("No repositories selected for release.")
|
|
||||||
exit(1)
|
|
||||||
# Import the release function from pkgmgr/release.py
|
|
||||||
from pkgmgr import release as rel
|
|
||||||
# Save the original working directory.
|
|
||||||
original_dir = os.getcwd()
|
|
||||||
for repo in selected:
|
|
||||||
# Determine the repository directory
|
|
||||||
repo_dir = repo.get("directory")
|
|
||||||
if not repo_dir:
|
|
||||||
from pkgmgr.get_repo_dir import get_repo_dir
|
|
||||||
repo_dir = get_repo_dir(REPOSITORIES_BASE_DIR, repo)
|
|
||||||
# Dynamically determine the file paths for pyproject.toml and CHANGELOG.md.
|
|
||||||
pyproject_path = os.path.join(repo_dir, "pyproject.toml")
|
|
||||||
changelog_path = os.path.join(repo_dir, "CHANGELOG.md")
|
|
||||||
print(f"Releasing repository '{repo.get('repository')}' in '{repo_dir}'...")
|
|
||||||
# Change into the repository directory so Git commands run in the right context.
|
|
||||||
os.chdir(repo_dir)
|
|
||||||
# Call the release function with the proper parameters.
|
|
||||||
rel.release(
|
|
||||||
pyproject_path=pyproject_path,
|
|
||||||
changelog_path=changelog_path,
|
|
||||||
release_type=args.release_type,
|
|
||||||
message=args.message
|
|
||||||
)
|
|
||||||
# Change back to the original working directory.
|
|
||||||
os.chdir(original_dir)
|
|
||||||
elif args.command == "status":
|
|
||||||
status_repos(selected,REPOSITORIES_BASE_DIR, ALL_REPOSITORIES, args.extra_args, list_only=args.list, system_status=args.system, preview=args.preview)
|
|
||||||
elif args.command == "explore":
|
|
||||||
for repository in selected:
|
|
||||||
run_command(f"nautilus {repository['directory']} & disown")
|
|
||||||
elif args.command == "code":
|
|
||||||
if not selected:
|
|
||||||
print("No repositories selected.")
|
|
||||||
else:
|
|
||||||
identifiers = [get_repo_identifier(repo, ALL_REPOSITORIES) for repo in selected]
|
|
||||||
sorted_identifiers = sorted(identifiers)
|
|
||||||
workspace_name = "_".join(sorted_identifiers) + ".code-workspace"
|
|
||||||
workspaces_dir = os.path.expanduser(CONFIG_MERGED.get("directories").get("workspaces"))
|
|
||||||
os.makedirs(workspaces_dir, exist_ok=True)
|
|
||||||
workspace_file = os.path.join(workspaces_dir, workspace_name)
|
|
||||||
|
|
||||||
folders = []
|
|
||||||
for repository in selected:
|
|
||||||
folders.append({"path": repository["directory"]})
|
|
||||||
|
|
||||||
workspace_data = {
|
|
||||||
"folders": folders,
|
|
||||||
"settings": {}
|
|
||||||
}
|
|
||||||
if not os.path.exists(workspace_file):
|
|
||||||
with open(workspace_file, "w") as f:
|
|
||||||
json.dump(workspace_data, f, indent=4)
|
|
||||||
print(f"Created workspace file: {workspace_file}")
|
|
||||||
else:
|
|
||||||
print(f"Using existing workspace file: {workspace_file}")
|
|
||||||
run_command(f'code "{workspace_file}"')
|
|
||||||
elif args.command == "terminal":
|
|
||||||
for repository in selected:
|
|
||||||
run_command(f'gnome-terminal --tab --working-directory="{repository["directory"]}"')
|
|
||||||
elif args.command == "path":
|
|
||||||
for repository in selected:
|
|
||||||
print(repository["directory"])
|
|
||||||
elif args.command == "shell":
|
|
||||||
if not args.shell_command:
|
|
||||||
print("No shell command specified.")
|
|
||||||
exit(2)
|
|
||||||
# Join the provided shell command parts into one string.
|
|
||||||
command_to_run = " ".join(args.shell_command)
|
|
||||||
for repository in selected:
|
|
||||||
print(f"Executing in '{repository['directory']}': {command_to_run}")
|
|
||||||
run_command(command_to_run, cwd=repository["directory"], preview=args.preview)
|
|
||||||
elif args.command == "config":
|
|
||||||
if args.subcommand == "show":
|
|
||||||
if args.all or (not args.identifiers):
|
|
||||||
show_config([], USER_CONFIG_PATH, full_config=True)
|
|
||||||
else:
|
|
||||||
selected = resolve_repos(args.identifiers, ALL_REPOSITORIES)
|
|
||||||
if selected:
|
|
||||||
show_config(selected, USER_CONFIG_PATH, full_config=False)
|
|
||||||
elif args.subcommand == "add":
|
|
||||||
interactive_add(CONFIG_MERGED,USER_CONFIG_PATH)
|
|
||||||
elif args.subcommand == "edit":
|
|
||||||
"""Open the user configuration file in nano."""
|
|
||||||
run_command(f"nano {USER_CONFIG_PATH}")
|
|
||||||
elif args.subcommand == "init":
|
|
||||||
if os.path.exists(USER_CONFIG_PATH):
|
|
||||||
with open(USER_CONFIG_PATH, 'r') as f:
|
|
||||||
user_config = yaml.safe_load(f) or {}
|
|
||||||
else:
|
|
||||||
user_config = {"repositories": []}
|
|
||||||
config_init(user_config, CONFIG_MERGED, BINARIES_DIRECTORY, USER_CONFIG_PATH)
|
|
||||||
elif args.subcommand == "delete":
|
|
||||||
# Load user config from USER_CONFIG_PATH.
|
|
||||||
if os.path.exists(USER_CONFIG_PATH):
|
|
||||||
with open(USER_CONFIG_PATH, 'r') as f:
|
|
||||||
user_config = yaml.safe_load(f) or {"repositories": []}
|
|
||||||
else:
|
|
||||||
user_config = {"repositories": []}
|
|
||||||
if args.all or not args.identifiers:
|
|
||||||
print("You must specify identifiers to delete.")
|
|
||||||
else:
|
|
||||||
to_delete = resolve_repos(args.identifiers, user_config.get("repositories", []))
|
|
||||||
new_repos = [entry for entry in user_config.get("repositories", []) if entry not in to_delete]
|
|
||||||
user_config["repositories"] = new_repos
|
|
||||||
save_user_config(user_config,USER_CONFIG_PATH)
|
|
||||||
print(f"Deleted {len(to_delete)} entries from user config.")
|
|
||||||
elif args.subcommand == "ignore":
|
|
||||||
# Load user config from USER_CONFIG_PATH.
|
|
||||||
if os.path.exists(USER_CONFIG_PATH):
|
|
||||||
with open(USER_CONFIG_PATH, 'r') as f:
|
|
||||||
user_config = yaml.safe_load(f) or {"repositories": []}
|
|
||||||
else:
|
|
||||||
user_config = {"repositories": []}
|
|
||||||
if args.all or not args.identifiers:
|
|
||||||
print("You must specify identifiers to modify ignore flag.")
|
|
||||||
else:
|
|
||||||
to_modify = resolve_repos(args.identifiers, user_config.get("repositories", []))
|
|
||||||
for entry in user_config["repositories"]:
|
|
||||||
key = (entry.get("provider"), entry.get("account"), entry.get("repository"))
|
|
||||||
for mod in to_modify:
|
|
||||||
mod_key = (mod.get("provider"), mod.get("account"), mod.get("repository"))
|
|
||||||
if key == mod_key:
|
|
||||||
entry["ignore"] = (args.set == "true")
|
|
||||||
print(f"Set ignore for {key} to {entry['ignore']}")
|
|
||||||
save_user_config(user_config,USER_CONFIG_PATH)
|
|
||||||
else:
|
|
||||||
parser.print_help()
|
|
||||||
7
pkgmgr.yml
Normal file
7
pkgmgr.yml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
version: 1
|
||||||
|
|
||||||
|
author: "Kevin Veen-Birkenbach"
|
||||||
|
url: "https://github.com/kevinveenbirkenbach/package-manager"
|
||||||
|
description: "A configurable Python-based package manager for managing multiple repositories via Bash."
|
||||||
|
|
||||||
|
dependencies: []
|
||||||
489
pkgmgr/cli.py
Executable file
489
pkgmgr/cli.py
Executable file
@@ -0,0 +1,489 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import os
|
||||||
|
import yaml
|
||||||
|
import argparse
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
# Define configuration file paths.
|
||||||
|
PROJECT_ROOT = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
||||||
|
USER_CONFIG_PATH = os.path.join(PROJECT_ROOT, "config", "config.yaml")
|
||||||
|
|
||||||
|
from pkgmgr.clone_repos import clone_repos
|
||||||
|
from pkgmgr.config_init import config_init
|
||||||
|
from pkgmgr.create_ink import create_ink
|
||||||
|
from pkgmgr.deinstall_repos import deinstall_repos
|
||||||
|
from pkgmgr.delete_repos import delete_repos
|
||||||
|
from pkgmgr.exec_proxy_command import exec_proxy_command
|
||||||
|
from pkgmgr.filter_ignored import filter_ignored
|
||||||
|
from pkgmgr.get_repo_identifier import get_repo_identifier
|
||||||
|
from pkgmgr.get_selected_repos import get_selected_repos
|
||||||
|
from pkgmgr.install_repos import install_repos
|
||||||
|
from pkgmgr.interactive_add import interactive_add
|
||||||
|
from pkgmgr.list_repositories import list_repositories
|
||||||
|
from pkgmgr.load_config import load_config
|
||||||
|
from pkgmgr.resolve_repos import resolve_repos
|
||||||
|
from pkgmgr.run_command import run_command
|
||||||
|
from pkgmgr.save_user_config import save_user_config
|
||||||
|
from pkgmgr.show_config import show_config
|
||||||
|
from pkgmgr.status_repos import status_repos
|
||||||
|
from pkgmgr.update_repos import update_repos
|
||||||
|
|
||||||
|
# Commands proxied by package-manager
|
||||||
|
PROXY_COMMANDS = {
|
||||||
|
"git":[
|
||||||
|
"pull",
|
||||||
|
"push",
|
||||||
|
"diff",
|
||||||
|
"add",
|
||||||
|
"show",
|
||||||
|
"checkout",
|
||||||
|
"clone",
|
||||||
|
"reset",
|
||||||
|
"revert",
|
||||||
|
"rebase",
|
||||||
|
"commit"
|
||||||
|
],
|
||||||
|
"docker":[
|
||||||
|
"start",
|
||||||
|
"stop",
|
||||||
|
"build"
|
||||||
|
],
|
||||||
|
"docker compose":[
|
||||||
|
"up",
|
||||||
|
"down",
|
||||||
|
"exec",
|
||||||
|
"ps",
|
||||||
|
"restart",
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
class SortedSubParsersAction(argparse._SubParsersAction):
|
||||||
|
def add_parser(self, name, **kwargs):
|
||||||
|
parser = super().add_parser(name, **kwargs)
|
||||||
|
# Sort the list of subparsers each time one is added
|
||||||
|
self._choices_actions.sort(key=lambda a: a.dest)
|
||||||
|
return parser
|
||||||
|
|
||||||
|
# Main program.
|
||||||
|
def main() -> None:
|
||||||
|
CONFIG_MERGED = load_config(USER_CONFIG_PATH)
|
||||||
|
REPOSITORIES_BASE_DIR = os.path.expanduser(CONFIG_MERGED["directories"]["repositories"])
|
||||||
|
ALL_REPOSITORIES = CONFIG_MERGED["repositories"]
|
||||||
|
BINARIES_DIRECTORY = os.path.expanduser(CONFIG_MERGED["directories"]["binaries"])
|
||||||
|
description_text = """\
|
||||||
|
\033[1;32mPackage Manager 🤖📦\033[0m
|
||||||
|
\033[3mKevin's Package Manager is a multi-repository, multi-package, and multi-format
|
||||||
|
development tool crafted by and designed for:\033[0m
|
||||||
|
\033[1;34mKevin Veen-Birkenbach\033[0m
|
||||||
|
\033[4mhttps://www.veen.world/\033[0m
|
||||||
|
|
||||||
|
\033[1mOverview:\033[0m
|
||||||
|
A powerful toolchain that unifies and automates workflows across heterogeneous
|
||||||
|
project ecosystems. pkgmgr is not only a package manager — it is a full
|
||||||
|
developer-oriented orchestration tool.
|
||||||
|
|
||||||
|
It automatically detects, merges, and processes metadata from multiple
|
||||||
|
dependency formats, including:
|
||||||
|
• \033[1;33mPython:\033[0m pyproject.toml, requirements.txt
|
||||||
|
• \033[1;33mNix:\033[0m flake.nix
|
||||||
|
• \033[1;33mArch Linux:\033[0m PKGBUILD
|
||||||
|
• \033[1;33mAnsible:\033[0m requirements.yml
|
||||||
|
• \033[1;33mpkgmgr-native:\033[0m pkgmgr.yml
|
||||||
|
|
||||||
|
This allows pkgmgr to perform installation, updates, verification, dependency
|
||||||
|
resolution, and synchronization across complex multi-repo environments — with a
|
||||||
|
single unified command-line interface.
|
||||||
|
|
||||||
|
\033[1mDeveloper Tools:\033[0m
|
||||||
|
pkgmgr includes an integrated toolbox to enhance daily development workflows:
|
||||||
|
|
||||||
|
• \033[1;33mVS Code integration:\033[0m Auto-generate and open multi-repo workspaces
|
||||||
|
• \033[1;33mTerminal integration:\033[0m Open repositories in new GNOME Terminal tabs
|
||||||
|
• \033[1;33mExplorer integration:\033[0m Open repositories in your file manager
|
||||||
|
• \033[1;33mRelease automation:\033[0m Version bumping, changelog updates, and tagging
|
||||||
|
• \033[1;33mBatch operations:\033[0m Execute shell commands across multiple repositories
|
||||||
|
• \033[1;33mGit/Docker/Make wrappers:\033[0m Unified command proxying for many tools
|
||||||
|
|
||||||
|
\033[1mCapabilities:\033[0m
|
||||||
|
• Clone, pull, verify, update, and manage many repositories at once
|
||||||
|
• Resolve dependencies across languages and ecosystems
|
||||||
|
• Standardize install/update workflows
|
||||||
|
• Create symbolic executable wrappers for any project
|
||||||
|
• Merge configuration from default + user config layers
|
||||||
|
|
||||||
|
Use pkgmgr as both a robust package management framework and a versatile
|
||||||
|
development orchestration tool.
|
||||||
|
|
||||||
|
For detailed help on each command, use:
|
||||||
|
\033[1mpkgmgr <command> --help\033[0m
|
||||||
|
"""
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(description=description_text,formatter_class=argparse.RawTextHelpFormatter)
|
||||||
|
subparsers = parser.add_subparsers(dest="command", help="Subcommands", action=SortedSubParsersAction)
|
||||||
|
def add_identifier_arguments(subparser):
|
||||||
|
subparser.add_argument(
|
||||||
|
"identifiers",
|
||||||
|
nargs="*",
|
||||||
|
help="Identifier(s) for repositories. Default: Repository of current folder.",
|
||||||
|
)
|
||||||
|
subparser.add_argument(
|
||||||
|
"--all",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
help="Apply the subcommand to all repositories in the config. Some subcommands ask for confirmation. If you want to give this confirmation for all repositories, pipe 'yes'. E.g: yes | pkgmgr {subcommand} --all"
|
||||||
|
)
|
||||||
|
subparser.add_argument("--preview", action="store_true", help="Preview changes without executing commands")
|
||||||
|
subparser.add_argument("--list", action="store_true", help="List affected repositories (with preview or status)")
|
||||||
|
subparser.add_argument("-a", "--args", nargs=argparse.REMAINDER, dest="extra_args", help="Additional parameters to be attached.",default=[])
|
||||||
|
|
||||||
|
def add_install_update_arguments(subparser):
|
||||||
|
add_identifier_arguments(subparser)
|
||||||
|
subparser.add_argument(
|
||||||
|
"-q",
|
||||||
|
"--quiet",
|
||||||
|
action="store_true",
|
||||||
|
help="Suppress warnings and info messages",
|
||||||
|
)
|
||||||
|
subparser.add_argument(
|
||||||
|
"--no-verification",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
help="Disable verification via commit/gpg",
|
||||||
|
)
|
||||||
|
subparser.add_argument(
|
||||||
|
"--dependencies",
|
||||||
|
action="store_true",
|
||||||
|
help="Also pull and update dependencies",
|
||||||
|
)
|
||||||
|
subparser.add_argument(
|
||||||
|
"--clone-mode",
|
||||||
|
choices=["ssh", "https", "shallow"],
|
||||||
|
default="ssh",
|
||||||
|
help="Specify the clone mode: ssh, https, or shallow (HTTPS shallow clone; default: ssh)",
|
||||||
|
)
|
||||||
|
|
||||||
|
install_parser = subparsers.add_parser("install", help="Setup repository/repositories alias links to executables")
|
||||||
|
add_install_update_arguments(install_parser)
|
||||||
|
|
||||||
|
update_parser = subparsers.add_parser("update", help="Update (pull + install) repository/repositories")
|
||||||
|
add_install_update_arguments(update_parser)
|
||||||
|
update_parser.add_argument("--system", action="store_true", help="Include system update commands")
|
||||||
|
|
||||||
|
|
||||||
|
deinstall_parser = subparsers.add_parser("deinstall", help="Remove alias links to repository/repositories")
|
||||||
|
add_identifier_arguments(deinstall_parser)
|
||||||
|
|
||||||
|
delete_parser = subparsers.add_parser("delete", help="Delete repository/repositories alias links to executables")
|
||||||
|
add_identifier_arguments(delete_parser)
|
||||||
|
|
||||||
|
# Add the 'create' subcommand (with existing identifier arguments)
|
||||||
|
create_parser = subparsers.add_parser(
|
||||||
|
"create",
|
||||||
|
help="Create new repository entries: add them to the config if not already present, initialize the local repository, and push remotely if --remote is set."
|
||||||
|
)
|
||||||
|
# Reuse the common identifier arguments
|
||||||
|
add_identifier_arguments(create_parser)
|
||||||
|
create_parser.add_argument(
|
||||||
|
"--remote",
|
||||||
|
action="store_true",
|
||||||
|
help="If set, add the remote and push the initial commit."
|
||||||
|
)
|
||||||
|
|
||||||
|
status_parser = subparsers.add_parser("status", help="Show status for repository/repositories or system")
|
||||||
|
add_identifier_arguments(status_parser)
|
||||||
|
status_parser.add_argument("--system", action="store_true", help="Show system status")
|
||||||
|
|
||||||
|
config_parser = subparsers.add_parser("config", help="Manage configuration")
|
||||||
|
config_subparsers = config_parser.add_subparsers(dest="subcommand", help="Config subcommands", required=True)
|
||||||
|
config_show = config_subparsers.add_parser("show", help="Show configuration")
|
||||||
|
add_identifier_arguments(config_show)
|
||||||
|
config_add = config_subparsers.add_parser("add", help="Interactively add a new repository entry")
|
||||||
|
config_edit = config_subparsers.add_parser("edit", help="Edit configuration file with nano")
|
||||||
|
config_init_parser = config_subparsers.add_parser("init", help="Initialize user configuration by scanning the base directory")
|
||||||
|
config_delete = config_subparsers.add_parser("delete", help="Delete repository entry from user config")
|
||||||
|
add_identifier_arguments(config_delete)
|
||||||
|
config_ignore = config_subparsers.add_parser("ignore", help="Set ignore flag for repository entries in user config")
|
||||||
|
add_identifier_arguments(config_ignore)
|
||||||
|
config_ignore.add_argument("--set", choices=["true", "false"], required=True, help="Set ignore to true or false")
|
||||||
|
path_parser = subparsers.add_parser("path", help="Print the path(s) of repository/repositories")
|
||||||
|
add_identifier_arguments(path_parser)
|
||||||
|
explore_parser = subparsers.add_parser("explore", help="Open repository in Nautilus file manager")
|
||||||
|
add_identifier_arguments(explore_parser)
|
||||||
|
|
||||||
|
terminal_parser = subparsers.add_parser("terminal", help="Open repository in a new GNOME Terminal tab")
|
||||||
|
add_identifier_arguments(terminal_parser)
|
||||||
|
|
||||||
|
release_parser = subparsers.add_parser(
|
||||||
|
"release",
|
||||||
|
help="Create a release for repository/ies by incrementing version and updating the changelog."
|
||||||
|
)
|
||||||
|
release_parser.add_argument(
|
||||||
|
"release_type",
|
||||||
|
choices=["major", "minor", "patch"],
|
||||||
|
help="Type of version increment for the release (major, minor, patch)."
|
||||||
|
)
|
||||||
|
release_parser.add_argument(
|
||||||
|
"-m", "--message",
|
||||||
|
default="",
|
||||||
|
help="Optional release message to add to the changelog and tag."
|
||||||
|
)
|
||||||
|
add_identifier_arguments(release_parser)
|
||||||
|
|
||||||
|
|
||||||
|
code_parser = subparsers.add_parser("code", help="Open repository workspace with VS Code")
|
||||||
|
add_identifier_arguments(code_parser)
|
||||||
|
|
||||||
|
list_parser = subparsers.add_parser("list", help="List all repositories with details and status")
|
||||||
|
list_parser.add_argument("--search", default="", help="Filter repositories that contain the given string")
|
||||||
|
list_parser.add_argument("--status", type=str, default="", help="Filter repositories by status (case insensitive)")
|
||||||
|
|
||||||
|
# Add the subcommand parser for "shell"
|
||||||
|
shell_parser = subparsers.add_parser("shell", help="Execute a shell command in each repository")
|
||||||
|
add_identifier_arguments(shell_parser)
|
||||||
|
shell_parser.add_argument("-c", "--command", nargs=argparse.REMAINDER, dest="shell_command", help="The shell command (and its arguments) to execute in each repository",default=[])
|
||||||
|
|
||||||
|
make_parser = subparsers.add_parser("make", help="Executes make commands")
|
||||||
|
add_identifier_arguments(make_parser)
|
||||||
|
make_subparsers = make_parser.add_subparsers(dest="subcommand", help="Make subcommands", required=True)
|
||||||
|
make_install = make_subparsers.add_parser("install", help="Executes the make install command")
|
||||||
|
add_identifier_arguments(make_install)
|
||||||
|
make_deinstall = make_subparsers.add_parser("deinstall", help="Executes the make deinstall command")
|
||||||
|
|
||||||
|
proxy_command_parsers = {}
|
||||||
|
for command, subcommands in PROXY_COMMANDS.items():
|
||||||
|
for subcommand in subcommands:
|
||||||
|
proxy_command_parsers[f"{command}_{subcommand}"] = subparsers.add_parser(
|
||||||
|
subcommand,
|
||||||
|
help=f"Proxies '{command} {subcommand}' to repository/ies",
|
||||||
|
description=f"Executes '{command} {subcommand}' for the identified repos.\nTo recieve more help execute '{command} {subcommand} --help'",
|
||||||
|
formatter_class=argparse.RawTextHelpFormatter
|
||||||
|
)
|
||||||
|
if subcommand in ["pull", "clone"]:
|
||||||
|
proxy_command_parsers[f"{command}_{subcommand}"].add_argument(
|
||||||
|
"--no-verification",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
help="Disable verification via commit/gpg",
|
||||||
|
)
|
||||||
|
if subcommand == "clone":
|
||||||
|
proxy_command_parsers[f"{command}_{subcommand}"].add_argument(
|
||||||
|
"--clone-mode",
|
||||||
|
choices=["ssh", "https", "shallow"],
|
||||||
|
default="ssh",
|
||||||
|
help="Specify the clone mode: ssh, https, or shallow (HTTPS shallow clone; default: ssh)",
|
||||||
|
)
|
||||||
|
add_identifier_arguments(proxy_command_parsers[f"{command}_{subcommand}"])
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
# All
|
||||||
|
if args.command and not args.command in ["config","list","create"]:
|
||||||
|
selected = get_selected_repos(args.all,ALL_REPOSITORIES,args.identifiers)
|
||||||
|
|
||||||
|
for command, subcommands in PROXY_COMMANDS.items():
|
||||||
|
for subcommand in subcommands:
|
||||||
|
if args.command == subcommand:
|
||||||
|
if args.command == "clone":
|
||||||
|
clone_repos(
|
||||||
|
selected,
|
||||||
|
REPOSITORIES_BASE_DIR,
|
||||||
|
ALL_REPOSITORIES,
|
||||||
|
args.preview,
|
||||||
|
args.no_verification,
|
||||||
|
args.clone_mode
|
||||||
|
)
|
||||||
|
elif args.command == "pull":
|
||||||
|
from pkgmgr.pull_with_verification import pull_with_verification
|
||||||
|
pull_with_verification(
|
||||||
|
selected,
|
||||||
|
REPOSITORIES_BASE_DIR,
|
||||||
|
ALL_REPOSITORIES,
|
||||||
|
args.extra_args,
|
||||||
|
args.no_verification,
|
||||||
|
args.preview
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
exec_proxy_command(command,selected, REPOSITORIES_BASE_DIR, ALL_REPOSITORIES, args.command, args.extra_args, args.preview)
|
||||||
|
exit(0)
|
||||||
|
|
||||||
|
if args.command in ["make"]:
|
||||||
|
exec_proxy_command(args.command,selected, REPOSITORIES_BASE_DIR, ALL_REPOSITORIES, args.subcommand, args.extra_args, args.preview)
|
||||||
|
exit(0)
|
||||||
|
|
||||||
|
# Dispatch commands.
|
||||||
|
if args.command == "install":
|
||||||
|
install_repos(
|
||||||
|
selected,
|
||||||
|
REPOSITORIES_BASE_DIR,
|
||||||
|
BINARIES_DIRECTORY,
|
||||||
|
ALL_REPOSITORIES,
|
||||||
|
args.no_verification,
|
||||||
|
args.preview,
|
||||||
|
args.quiet,
|
||||||
|
args.clone_mode,
|
||||||
|
args.dependencies,
|
||||||
|
)
|
||||||
|
elif args.command == "create":
|
||||||
|
from pkgmgr.create_repo import create_repo
|
||||||
|
# If no identifiers are provided, you can decide to either use the repository of the current folder
|
||||||
|
# or prompt the user to supply at least one identifier.
|
||||||
|
if not args.identifiers:
|
||||||
|
print("No identifiers provided. Please specify at least one identifier in the format provider/account/repository.")
|
||||||
|
sys.exit(1)
|
||||||
|
else:
|
||||||
|
selected = get_selected_repos(True,ALL_REPOSITORIES,None)
|
||||||
|
for identifier in args.identifiers:
|
||||||
|
create_repo(identifier, CONFIG_MERGED, USER_CONFIG_PATH, BINARIES_DIRECTORY, remote=args.remote, preview=args.preview)
|
||||||
|
elif args.command == "list":
|
||||||
|
list_repositories(ALL_REPOSITORIES, REPOSITORIES_BASE_DIR, BINARIES_DIRECTORY, search_filter=args.search, status_filter=args.status)
|
||||||
|
elif args.command == "deinstall":
|
||||||
|
deinstall_repos(selected,REPOSITORIES_BASE_DIR, BINARIES_DIRECTORY, ALL_REPOSITORIES, preview=args.preview)
|
||||||
|
elif args.command == "delete":
|
||||||
|
delete_repos(selected,REPOSITORIES_BASE_DIR, ALL_REPOSITORIES, preview=args.preview)
|
||||||
|
elif args.command == "update":
|
||||||
|
update_repos(
|
||||||
|
selected,
|
||||||
|
REPOSITORIES_BASE_DIR,
|
||||||
|
BINARIES_DIRECTORY,
|
||||||
|
ALL_REPOSITORIES,
|
||||||
|
args.no_verification,
|
||||||
|
args.system,
|
||||||
|
args.preview,
|
||||||
|
args.quiet,
|
||||||
|
args.dependencies,
|
||||||
|
args.clone_mode
|
||||||
|
)
|
||||||
|
elif args.command == "release":
|
||||||
|
if not selected:
|
||||||
|
print("No repositories selected for release.")
|
||||||
|
exit(1)
|
||||||
|
# Import the release function from pkgmgr/release.py
|
||||||
|
from pkgmgr import release as rel
|
||||||
|
# Save the original working directory.
|
||||||
|
original_dir = os.getcwd()
|
||||||
|
for repo in selected:
|
||||||
|
# Determine the repository directory
|
||||||
|
repo_dir = repo.get("directory")
|
||||||
|
if not repo_dir:
|
||||||
|
from pkgmgr.get_repo_dir import get_repo_dir
|
||||||
|
repo_dir = get_repo_dir(REPOSITORIES_BASE_DIR, repo)
|
||||||
|
# Dynamically determine the file paths for pyproject.toml and CHANGELOG.md.
|
||||||
|
pyproject_path = os.path.join(repo_dir, "pyproject.toml")
|
||||||
|
changelog_path = os.path.join(repo_dir, "CHANGELOG.md")
|
||||||
|
print(f"Releasing repository '{repo.get('repository')}' in '{repo_dir}'...")
|
||||||
|
# Change into the repository directory so Git commands run in the right context.
|
||||||
|
os.chdir(repo_dir)
|
||||||
|
# Call the release function with the proper parameters.
|
||||||
|
rel.release(
|
||||||
|
pyproject_path=pyproject_path,
|
||||||
|
changelog_path=changelog_path,
|
||||||
|
release_type=args.release_type,
|
||||||
|
message=args.message
|
||||||
|
)
|
||||||
|
# Change back to the original working directory.
|
||||||
|
os.chdir(original_dir)
|
||||||
|
elif args.command == "status":
|
||||||
|
status_repos(selected,REPOSITORIES_BASE_DIR, ALL_REPOSITORIES, args.extra_args, list_only=args.list, system_status=args.system, preview=args.preview)
|
||||||
|
elif args.command == "explore":
|
||||||
|
for repository in selected:
|
||||||
|
run_command(f"nautilus {repository['directory']} & disown")
|
||||||
|
elif args.command == "code":
|
||||||
|
if not selected:
|
||||||
|
print("No repositories selected.")
|
||||||
|
else:
|
||||||
|
identifiers = [get_repo_identifier(repo, ALL_REPOSITORIES) for repo in selected]
|
||||||
|
sorted_identifiers = sorted(identifiers)
|
||||||
|
workspace_name = "_".join(sorted_identifiers) + ".code-workspace"
|
||||||
|
workspaces_dir = os.path.expanduser(CONFIG_MERGED.get("directories").get("workspaces"))
|
||||||
|
os.makedirs(workspaces_dir, exist_ok=True)
|
||||||
|
workspace_file = os.path.join(workspaces_dir, workspace_name)
|
||||||
|
|
||||||
|
folders = []
|
||||||
|
for repository in selected:
|
||||||
|
folders.append({"path": repository["directory"]})
|
||||||
|
|
||||||
|
workspace_data = {
|
||||||
|
"folders": folders,
|
||||||
|
"settings": {}
|
||||||
|
}
|
||||||
|
if not os.path.exists(workspace_file):
|
||||||
|
with open(workspace_file, "w") as f:
|
||||||
|
json.dump(workspace_data, f, indent=4)
|
||||||
|
print(f"Created workspace file: {workspace_file}")
|
||||||
|
else:
|
||||||
|
print(f"Using existing workspace file: {workspace_file}")
|
||||||
|
run_command(f'code "{workspace_file}"')
|
||||||
|
elif args.command == "terminal":
|
||||||
|
for repository in selected:
|
||||||
|
run_command(f'gnome-terminal --tab --working-directory="{repository["directory"]}"')
|
||||||
|
elif args.command == "path":
|
||||||
|
for repository in selected:
|
||||||
|
print(repository["directory"])
|
||||||
|
elif args.command == "shell":
|
||||||
|
if not args.shell_command:
|
||||||
|
print("No shell command specified.")
|
||||||
|
exit(2)
|
||||||
|
# Join the provided shell command parts into one string.
|
||||||
|
command_to_run = " ".join(args.shell_command)
|
||||||
|
for repository in selected:
|
||||||
|
print(f"Executing in '{repository['directory']}': {command_to_run}")
|
||||||
|
run_command(command_to_run, cwd=repository["directory"], preview=args.preview)
|
||||||
|
elif args.command == "config":
|
||||||
|
if args.subcommand == "show":
|
||||||
|
if args.all or (not args.identifiers):
|
||||||
|
show_config([], USER_CONFIG_PATH, full_config=True)
|
||||||
|
else:
|
||||||
|
selected = resolve_repos(args.identifiers, ALL_REPOSITORIES)
|
||||||
|
if selected:
|
||||||
|
show_config(selected, USER_CONFIG_PATH, full_config=False)
|
||||||
|
elif args.subcommand == "add":
|
||||||
|
interactive_add(CONFIG_MERGED,USER_CONFIG_PATH)
|
||||||
|
elif args.subcommand == "edit":
|
||||||
|
"""Open the user configuration file in nano."""
|
||||||
|
run_command(f"nano {USER_CONFIG_PATH}")
|
||||||
|
elif args.subcommand == "init":
|
||||||
|
if os.path.exists(USER_CONFIG_PATH):
|
||||||
|
with open(USER_CONFIG_PATH, 'r') as f:
|
||||||
|
user_config = yaml.safe_load(f) or {}
|
||||||
|
else:
|
||||||
|
user_config = {"repositories": []}
|
||||||
|
config_init(user_config, CONFIG_MERGED, BINARIES_DIRECTORY, USER_CONFIG_PATH)
|
||||||
|
elif args.subcommand == "delete":
|
||||||
|
# Load user config from USER_CONFIG_PATH.
|
||||||
|
if os.path.exists(USER_CONFIG_PATH):
|
||||||
|
with open(USER_CONFIG_PATH, 'r') as f:
|
||||||
|
user_config = yaml.safe_load(f) or {"repositories": []}
|
||||||
|
else:
|
||||||
|
user_config = {"repositories": []}
|
||||||
|
if args.all or not args.identifiers:
|
||||||
|
print("You must specify identifiers to delete.")
|
||||||
|
else:
|
||||||
|
to_delete = resolve_repos(args.identifiers, user_config.get("repositories", []))
|
||||||
|
new_repos = [entry for entry in user_config.get("repositories", []) if entry not in to_delete]
|
||||||
|
user_config["repositories"] = new_repos
|
||||||
|
save_user_config(user_config,USER_CONFIG_PATH)
|
||||||
|
print(f"Deleted {len(to_delete)} entries from user config.")
|
||||||
|
elif args.subcommand == "ignore":
|
||||||
|
# Load user config from USER_CONFIG_PATH.
|
||||||
|
if os.path.exists(USER_CONFIG_PATH):
|
||||||
|
with open(USER_CONFIG_PATH, 'r') as f:
|
||||||
|
user_config = yaml.safe_load(f) or {"repositories": []}
|
||||||
|
else:
|
||||||
|
user_config = {"repositories": []}
|
||||||
|
if args.all or not args.identifiers:
|
||||||
|
print("You must specify identifiers to modify ignore flag.")
|
||||||
|
else:
|
||||||
|
to_modify = resolve_repos(args.identifiers, user_config.get("repositories", []))
|
||||||
|
for entry in user_config["repositories"]:
|
||||||
|
key = (entry.get("provider"), entry.get("account"), entry.get("repository"))
|
||||||
|
for mod in to_modify:
|
||||||
|
mod_key = (mod.get("provider"), mod.get("account"), mod.get("repository"))
|
||||||
|
if key == mod_key:
|
||||||
|
entry["ignore"] = (args.set == "true")
|
||||||
|
print(f"Set ignore for {key} to {entry['ignore']}")
|
||||||
|
save_user_config(user_config,USER_CONFIG_PATH)
|
||||||
|
else:
|
||||||
|
parser.print_help()
|
||||||
@@ -1,8 +1,10 @@
|
|||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import yaml
|
|
||||||
import tempfile
|
import tempfile
|
||||||
|
import shutil
|
||||||
|
import yaml
|
||||||
|
|
||||||
from pkgmgr.get_repo_identifier import get_repo_identifier
|
from pkgmgr.get_repo_identifier import get_repo_identifier
|
||||||
from pkgmgr.get_repo_dir import get_repo_dir
|
from pkgmgr.get_repo_dir import get_repo_dir
|
||||||
from pkgmgr.create_ink import create_ink
|
from pkgmgr.create_ink import create_ink
|
||||||
@@ -10,6 +12,179 @@ from pkgmgr.run_command import run_command
|
|||||||
from pkgmgr.verify import verify_repository
|
from pkgmgr.verify import verify_repository
|
||||||
from pkgmgr.clone_repos import clone_repos
|
from pkgmgr.clone_repos import clone_repos
|
||||||
|
|
||||||
|
|
||||||
|
def _extract_pkgbuild_array(repo_dir: str, var_name: str) -> list:
|
||||||
|
"""
|
||||||
|
Extract a Bash array (depends/makedepends) from PKGBUILD using bash itself.
|
||||||
|
Returns a list of package names or an empty list on error.
|
||||||
|
"""
|
||||||
|
pkgbuild_path = os.path.join(repo_dir, "PKGBUILD")
|
||||||
|
if not os.path.exists(pkgbuild_path):
|
||||||
|
return []
|
||||||
|
|
||||||
|
script = f'source PKGBUILD >/dev/null 2>&1; printf "%s\\n" "${{{var_name}[@]}}"'
|
||||||
|
try:
|
||||||
|
output = subprocess.check_output(
|
||||||
|
["bash", "-lc", script],
|
||||||
|
cwd=repo_dir,
|
||||||
|
text=True,
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
return []
|
||||||
|
|
||||||
|
return [line.strip() for line in output.splitlines() if line.strip()]
|
||||||
|
|
||||||
|
|
||||||
|
def _install_arch_dependencies_from_pkgbuild(repo_dir: str, preview: bool) -> None:
|
||||||
|
"""
|
||||||
|
If PKGBUILD exists and pacman is available, install depends + makedepends
|
||||||
|
via pacman.
|
||||||
|
"""
|
||||||
|
if shutil.which("pacman") is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
pkgbuild_path = os.path.join(repo_dir, "PKGBUILD")
|
||||||
|
if not os.path.exists(pkgbuild_path):
|
||||||
|
return
|
||||||
|
|
||||||
|
depends = _extract_pkgbuild_array(repo_dir, "depends")
|
||||||
|
makedepends = _extract_pkgbuild_array(repo_dir, "makedepends")
|
||||||
|
all_pkgs = depends + makedepends
|
||||||
|
|
||||||
|
if not all_pkgs:
|
||||||
|
return
|
||||||
|
|
||||||
|
cmd = "sudo pacman -S --noconfirm " + " ".join(all_pkgs)
|
||||||
|
run_command(cmd, preview=preview)
|
||||||
|
|
||||||
|
|
||||||
|
def _install_nix_flake_profile(repo_dir: str, preview: bool) -> None:
|
||||||
|
"""
|
||||||
|
If flake.nix exists and 'nix' is available, try to install a profile
|
||||||
|
from the flake. Convention: try .#pkgmgr, then .#default.
|
||||||
|
"""
|
||||||
|
flake_path = os.path.join(repo_dir, "flake.nix")
|
||||||
|
if not os.path.exists(flake_path):
|
||||||
|
return
|
||||||
|
if shutil.which("nix") is None:
|
||||||
|
print("Warning: flake.nix found but 'nix' command not available. Skipping flake setup.")
|
||||||
|
return
|
||||||
|
|
||||||
|
print("Nix flake detected, attempting to install profile output...")
|
||||||
|
for output in ("pkgmgr", "default"):
|
||||||
|
cmd = f"nix profile install {repo_dir}#{output}"
|
||||||
|
try:
|
||||||
|
run_command(cmd, preview=preview)
|
||||||
|
print(f"Nix flake output '{output}' successfully installed.")
|
||||||
|
break
|
||||||
|
except SystemExit as e:
|
||||||
|
print(f"[Warning] Failed to install Nix flake output '{output}': {e}")
|
||||||
|
|
||||||
|
|
||||||
|
def _install_pkgmgr_dependencies_from_manifest(
|
||||||
|
repo_dir: str,
|
||||||
|
no_verification: bool,
|
||||||
|
update_dependencies: bool,
|
||||||
|
clone_mode: str,
|
||||||
|
preview: bool,
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Read pkgmgr.yml (if present) and install referenced pkgmgr repository
|
||||||
|
dependencies.
|
||||||
|
|
||||||
|
Expected format:
|
||||||
|
|
||||||
|
version: 1
|
||||||
|
author: "..."
|
||||||
|
url: "..."
|
||||||
|
description: "..."
|
||||||
|
dependencies:
|
||||||
|
- repository: github:user/repo
|
||||||
|
version: main
|
||||||
|
reason: "Optional description"
|
||||||
|
"""
|
||||||
|
manifest_path = os.path.join(repo_dir, "pkgmgr.yml")
|
||||||
|
if not os.path.exists(manifest_path):
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(manifest_path, "r", encoding="utf-8") as f:
|
||||||
|
manifest = yaml.safe_load(f) or {}
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error loading pkgmgr.yml in '{repo_dir}': {e}")
|
||||||
|
return
|
||||||
|
|
||||||
|
dependencies = manifest.get("dependencies", []) or []
|
||||||
|
if not isinstance(dependencies, list) or not dependencies:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Optional: show basic metadata (author/url/description) if present
|
||||||
|
author = manifest.get("author")
|
||||||
|
url = manifest.get("url")
|
||||||
|
description = manifest.get("description")
|
||||||
|
|
||||||
|
if not preview:
|
||||||
|
print("pkgmgr manifest detected:")
|
||||||
|
if author:
|
||||||
|
print(f" author: {author}")
|
||||||
|
if url:
|
||||||
|
print(f" url: {url}")
|
||||||
|
if description:
|
||||||
|
print(f" description: {description}")
|
||||||
|
|
||||||
|
dep_repo_ids = []
|
||||||
|
for dep in dependencies:
|
||||||
|
if not isinstance(dep, dict):
|
||||||
|
continue
|
||||||
|
repo_id = dep.get("repository")
|
||||||
|
if repo_id:
|
||||||
|
dep_repo_ids.append(str(repo_id))
|
||||||
|
|
||||||
|
# Optionally: update (pull) dependencies before installing
|
||||||
|
if update_dependencies and dep_repo_ids:
|
||||||
|
cmd_pull = "pkgmgr pull " + " ".join(dep_repo_ids)
|
||||||
|
try:
|
||||||
|
run_command(cmd_pull, preview=preview)
|
||||||
|
except SystemExit as e:
|
||||||
|
print(f"Warning: 'pkgmgr pull' for dependencies failed (exit code {e}).")
|
||||||
|
|
||||||
|
# Install dependencies one by one
|
||||||
|
for dep in dependencies:
|
||||||
|
if not isinstance(dep, dict):
|
||||||
|
continue
|
||||||
|
|
||||||
|
repo_id = dep.get("repository")
|
||||||
|
if not repo_id:
|
||||||
|
continue
|
||||||
|
|
||||||
|
version = dep.get("version")
|
||||||
|
reason = dep.get("reason")
|
||||||
|
|
||||||
|
if reason and not preview:
|
||||||
|
print(f"Installing dependency {repo_id}: {reason}")
|
||||||
|
else:
|
||||||
|
print(f"Installing dependency {repo_id}...")
|
||||||
|
|
||||||
|
cmd = f"pkgmgr install {repo_id}"
|
||||||
|
|
||||||
|
if version:
|
||||||
|
cmd += f" --version {version}"
|
||||||
|
|
||||||
|
if no_verification:
|
||||||
|
cmd += " --no-verification"
|
||||||
|
|
||||||
|
if update_dependencies:
|
||||||
|
cmd += " --dependencies"
|
||||||
|
|
||||||
|
if clone_mode:
|
||||||
|
cmd += f" --clone-mode {clone_mode}"
|
||||||
|
|
||||||
|
try:
|
||||||
|
run_command(cmd, preview=preview)
|
||||||
|
except SystemExit as e:
|
||||||
|
print(f"[Warning] Failed to install dependency '{repo_id}': {e}")
|
||||||
|
|
||||||
|
|
||||||
def install_repos(
|
def install_repos(
|
||||||
selected_repos,
|
selected_repos,
|
||||||
repositories_base_dir,
|
repositories_base_dir,
|
||||||
@@ -19,25 +194,39 @@ def install_repos(
|
|||||||
preview,
|
preview,
|
||||||
quiet,
|
quiet,
|
||||||
clone_mode: str,
|
clone_mode: str,
|
||||||
update_dependencies: bool
|
update_dependencies: bool,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Install repositories by creating symbolic links, running setup commands, and
|
Install repositories by creating symbolic links and processing standard
|
||||||
installing additional packages if a requirements.yml or requirements.txt file is found.
|
manifest files (pkgmgr.yml, PKGBUILD, flake.nix, Ansible requirements,
|
||||||
|
Python manifests, Makefile).
|
||||||
"""
|
"""
|
||||||
for repo in selected_repos:
|
for repo in selected_repos:
|
||||||
repo_identifier = get_repo_identifier(repo, all_repos)
|
repo_identifier = get_repo_identifier(repo, all_repos)
|
||||||
repo_dir = get_repo_dir(repositories_base_dir, repo)
|
repo_dir = get_repo_dir(repositories_base_dir, repo)
|
||||||
|
|
||||||
if not os.path.exists(repo_dir):
|
if not os.path.exists(repo_dir):
|
||||||
print(f"Repository directory '{repo_dir}' does not exist. Cloning it now...")
|
print(f"Repository directory '{repo_dir}' does not exist. Cloning it now...")
|
||||||
# Pass the clone_mode parameter to clone_repos
|
# Pass the clone_mode parameter to clone_repos
|
||||||
clone_repos([repo], repositories_base_dir, all_repos, preview, no_verification, clone_mode)
|
clone_repos(
|
||||||
|
[repo],
|
||||||
|
repositories_base_dir,
|
||||||
|
all_repos,
|
||||||
|
preview,
|
||||||
|
no_verification,
|
||||||
|
clone_mode,
|
||||||
|
)
|
||||||
if not os.path.exists(repo_dir):
|
if not os.path.exists(repo_dir):
|
||||||
print(f"Cloning failed for repository {repo_identifier}. Skipping installation.")
|
print(f"Cloning failed for repository {repo_identifier}. Skipping installation.")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
verified_info = repo.get("verified")
|
verified_info = repo.get("verified")
|
||||||
verified_ok, errors, commit_hash, signing_key = verify_repository(repo, repo_dir, mode="local", no_verification=no_verification)
|
verified_ok, errors, commit_hash, signing_key = verify_repository(
|
||||||
|
repo,
|
||||||
|
repo_dir,
|
||||||
|
mode="local",
|
||||||
|
no_verification=no_verification,
|
||||||
|
)
|
||||||
|
|
||||||
if not no_verification and verified_info and not verified_ok:
|
if not no_verification and verified_info and not verified_ok:
|
||||||
print(f"Warning: Verification failed for {repo_identifier}:")
|
print(f"Warning: Verification failed for {repo_identifier}:")
|
||||||
@@ -49,98 +238,82 @@ def install_repos(
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
# Create the symlink using create_ink.
|
# Create the symlink using create_ink.
|
||||||
create_ink(repo, repositories_base_dir, bin_dir, all_repos, quiet=quiet, preview=preview)
|
create_ink(
|
||||||
|
repo,
|
||||||
|
repositories_base_dir,
|
||||||
|
bin_dir,
|
||||||
|
all_repos,
|
||||||
|
quiet=quiet,
|
||||||
|
preview=preview,
|
||||||
|
)
|
||||||
|
|
||||||
# Check if a requirements.yml file exists and install additional packages.
|
# 1) pkgmgr.yml (pkgmgr-internal manifest for other repositories)
|
||||||
|
_install_pkgmgr_dependencies_from_manifest(
|
||||||
|
repo_dir=repo_dir,
|
||||||
|
no_verification=no_verification,
|
||||||
|
update_dependencies=update_dependencies,
|
||||||
|
clone_mode=clone_mode,
|
||||||
|
preview=preview,
|
||||||
|
)
|
||||||
|
|
||||||
|
# 2) Arch: PKGBUILD (depends/makedepends)
|
||||||
|
_install_arch_dependencies_from_pkgbuild(repo_dir, preview=preview)
|
||||||
|
|
||||||
|
# 3) Nix: flake.nix
|
||||||
|
_install_nix_flake_profile(repo_dir, preview=preview)
|
||||||
|
|
||||||
|
# 4) Ansible: requirements.yml (only collections/roles)
|
||||||
req_file = os.path.join(repo_dir, "requirements.yml")
|
req_file = os.path.join(repo_dir, "requirements.yml")
|
||||||
if os.path.exists(req_file):
|
if os.path.exists(req_file):
|
||||||
try:
|
try:
|
||||||
with open(req_file, "r") as f:
|
with open(req_file, "r", encoding="utf-8") as f:
|
||||||
requirements = yaml.safe_load(f)
|
requirements = yaml.safe_load(f) or {}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Error loading requirements.yml in {repo_identifier}: {e}")
|
print(f"Error loading requirements.yml in {repo_identifier}: {e}")
|
||||||
continue # Skip to next repository if error occurs
|
requirements = None
|
||||||
if requirements:
|
|
||||||
# Install pacman packages if defined.
|
|
||||||
if "pacman" in requirements:
|
|
||||||
pacman_packages = requirements["pacman"]
|
|
||||||
if pacman_packages:
|
|
||||||
cmd = "sudo pacman -S --noconfirm " + " ".join(pacman_packages)
|
|
||||||
run_command(cmd, preview=preview)
|
|
||||||
# Install yay packages if defined.
|
|
||||||
if "yay" in requirements:
|
|
||||||
yay_packages = requirements["yay"]
|
|
||||||
if yay_packages:
|
|
||||||
cmd = "sudo -u aur_builder yay -S --noconfirm " + " ".join(yay_packages)
|
|
||||||
run_command(cmd, preview=preview)
|
|
||||||
# Install pkgmgr packages if defined.
|
|
||||||
if "pkgmgr" in requirements:
|
|
||||||
pkgmgr_packages = requirements["pkgmgr"]
|
|
||||||
if pkgmgr_packages:
|
|
||||||
if update_dependencies:
|
|
||||||
cmd_pull = "pkgmgr pull " + " ".join(pkgmgr_packages)
|
|
||||||
try:
|
|
||||||
run_command(cmd_pull, preview=preview)
|
|
||||||
except SystemExit as e:
|
|
||||||
print(f"Warning: 'pkgmgr pull' command failed (exit code {e}). Trying fallback clone...")
|
|
||||||
cmd_clone = "pkgmgr clone " + " ".join(pkgmgr_packages)
|
|
||||||
run_command(cmd_clone, preview=preview)
|
|
||||||
cmd = "pkgmgr install " + " ".join(pkgmgr_packages)
|
|
||||||
|
|
||||||
if no_verification:
|
if requirements and isinstance(requirements, dict):
|
||||||
cmd += " --no-verification"
|
|
||||||
|
|
||||||
if update_dependencies:
|
|
||||||
cmd += " --dependencies"
|
|
||||||
|
|
||||||
if clone_mode:
|
|
||||||
cmd += f" --clone-mode {clone_mode}"
|
|
||||||
|
|
||||||
run_command(cmd, preview=preview)
|
|
||||||
|
|
||||||
# Install pip packages if defined.
|
|
||||||
if "pip" in requirements:
|
|
||||||
pip_packages = requirements["pip"]
|
|
||||||
if pip_packages:
|
|
||||||
cmd = "python3 -m pip install " + " ".join(pip_packages)
|
|
||||||
run_command(cmd, preview=preview)
|
|
||||||
|
|
||||||
# Check if the requirements contain either 'collections' or 'roles'
|
|
||||||
if "collections" in requirements or "roles" in requirements:
|
if "collections" in requirements or "roles" in requirements:
|
||||||
print(f"Ansible dependencies found in {repo_identifier}, installing...")
|
print(f"Ansible dependencies found in {repo_identifier}, installing...")
|
||||||
|
|
||||||
# Build a new dictionary that only contains the Ansible dependencies
|
|
||||||
ansible_requirements = {}
|
ansible_requirements = {}
|
||||||
if "collections" in requirements:
|
if "collections" in requirements:
|
||||||
ansible_requirements["collections"] = requirements["collections"]
|
ansible_requirements["collections"] = requirements["collections"]
|
||||||
if "roles" in requirements:
|
if "roles" in requirements:
|
||||||
ansible_requirements["roles"] = requirements["roles"]
|
ansible_requirements["roles"] = requirements["roles"]
|
||||||
|
|
||||||
# Write the ansible requirements to a temporary file.
|
with tempfile.NamedTemporaryFile(
|
||||||
with tempfile.NamedTemporaryFile(mode='w', suffix='.yml', delete=False) as tmp:
|
mode="w",
|
||||||
|
suffix=".yml",
|
||||||
|
delete=False,
|
||||||
|
) as tmp:
|
||||||
yaml.dump(ansible_requirements, tmp, default_flow_style=False)
|
yaml.dump(ansible_requirements, tmp, default_flow_style=False)
|
||||||
tmp_filename = tmp.name
|
tmp_filename = tmp.name
|
||||||
|
|
||||||
# Install Ansible collections if defined.
|
|
||||||
if "collections" in ansible_requirements:
|
if "collections" in ansible_requirements:
|
||||||
print(f"Ansible collections found in {repo_identifier}, installing...")
|
print(f"Ansible collections found in {repo_identifier}, installing...")
|
||||||
cmd = f"ansible-galaxy collection install -r {tmp_filename}"
|
cmd = f"ansible-galaxy collection install -r {tmp_filename}"
|
||||||
run_command(cmd, cwd=repo_dir, preview=preview)
|
run_command(cmd, cwd=repo_dir, preview=preview)
|
||||||
|
|
||||||
# Install Ansible roles if defined.
|
|
||||||
if "roles" in ansible_requirements:
|
if "roles" in ansible_requirements:
|
||||||
print(f"Ansible roles found in {repo_identifier}, installing...")
|
print(f"Ansible roles found in {repo_identifier}, installing...")
|
||||||
cmd = f"ansible-galaxy role install -r {tmp_filename}"
|
cmd = f"ansible-galaxy role install -r {tmp_filename}"
|
||||||
run_command(cmd, cwd=repo_dir, preview=preview)
|
run_command(cmd, cwd=repo_dir, preview=preview)
|
||||||
|
|
||||||
# Check if a requirements.txt file exists and install Python packages.
|
# 5) Python: pyproject.toml (modern) / requirements.txt (classic)
|
||||||
|
pyproject_path = os.path.join(repo_dir, "pyproject.toml")
|
||||||
|
if os.path.exists(pyproject_path):
|
||||||
|
print(f"pyproject.toml found in {repo_identifier}, installing Python project...")
|
||||||
|
cmd = "~/.venvs/pkgmgr/bin/pip install ."
|
||||||
|
run_command(cmd, cwd=repo_dir, preview=preview)
|
||||||
|
|
||||||
req_txt_file = os.path.join(repo_dir, "requirements.txt")
|
req_txt_file = os.path.join(repo_dir, "requirements.txt")
|
||||||
if os.path.exists(req_txt_file):
|
if os.path.exists(req_txt_file):
|
||||||
print(f"requirements.txt found in {repo_identifier}, installing Python dependencies...")
|
print(f"requirements.txt found in {repo_identifier}, installing Python dependencies...")
|
||||||
cmd = "~/.venvs/pkgmgr/bin/pip install -r requirements.txt"
|
cmd = "~/.venvs/pkgmgr/bin/pip install -r requirements.txt"
|
||||||
run_command(cmd, cwd=repo_dir, preview=preview)
|
run_command(cmd, cwd=repo_dir, preview=preview)
|
||||||
|
|
||||||
# Check if a Makefile exists and run make.
|
# 6) Makefile: make install (if present)
|
||||||
makefile_path = os.path.join(repo_dir, "Makefile")
|
makefile_path = os.path.join(repo_dir, "Makefile")
|
||||||
if os.path.exists(makefile_path):
|
if os.path.exists(makefile_path):
|
||||||
cmd = "make install"
|
cmd = "make install"
|
||||||
|
|||||||
@@ -1,13 +1,44 @@
|
|||||||
import sys
|
import sys
|
||||||
|
import shutil
|
||||||
|
|
||||||
from .exec_proxy_command import exec_proxy_command
|
from .exec_proxy_command import exec_proxy_command
|
||||||
from .run_command import run_command
|
from .run_command import run_command
|
||||||
|
from .get_repo_identifier import get_repo_identifier
|
||||||
|
|
||||||
def status_repos(selected_repos, repositories_base_dir, all_repos, extra_args, list_only=False, system_status=False, preview=False):
|
|
||||||
|
def status_repos(
|
||||||
|
selected_repos,
|
||||||
|
repositories_base_dir,
|
||||||
|
all_repos,
|
||||||
|
extra_args,
|
||||||
|
list_only: bool = False,
|
||||||
|
system_status: bool = False,
|
||||||
|
preview: bool = False,
|
||||||
|
):
|
||||||
if system_status:
|
if system_status:
|
||||||
print("System status:")
|
print("System status:")
|
||||||
|
|
||||||
|
# Arch / AUR updates (if yay / aur_builder is configured)
|
||||||
run_command("sudo -u aur_builder yay -Qu --noconfirm", preview=preview)
|
run_command("sudo -u aur_builder yay -Qu --noconfirm", preview=preview)
|
||||||
|
|
||||||
|
# Nix profile status (if Nix is available)
|
||||||
|
if shutil.which("nix") is not None:
|
||||||
|
print("\nNix profile status:")
|
||||||
|
try:
|
||||||
|
run_command("nix profile list", preview=preview)
|
||||||
|
except SystemExit as e:
|
||||||
|
print(f"[Warning] Failed to query Nix profiles: {e}")
|
||||||
|
|
||||||
if list_only:
|
if list_only:
|
||||||
for repo in selected_repos:
|
for repo in selected_repos:
|
||||||
print(get_repo_identifier(repo, all_repos))
|
print(get_repo_identifier(repo, all_repos))
|
||||||
else:
|
else:
|
||||||
exec_proxy_command('git',selected_repos, repositories_base_dir, all_repos, "status", extra_args, preview)
|
exec_proxy_command(
|
||||||
|
"git",
|
||||||
|
selected_repos,
|
||||||
|
repositories_base_dir,
|
||||||
|
all_repos,
|
||||||
|
"status",
|
||||||
|
extra_args,
|
||||||
|
preview,
|
||||||
|
)
|
||||||
|
|||||||
@@ -1,7 +1,10 @@
|
|||||||
import sys
|
import sys
|
||||||
|
import shutil
|
||||||
|
|
||||||
from pkgmgr.pull_with_verification import pull_with_verification
|
from pkgmgr.pull_with_verification import pull_with_verification
|
||||||
from pkgmgr.install_repos import install_repos
|
from pkgmgr.install_repos import install_repos
|
||||||
|
|
||||||
|
|
||||||
def update_repos(
|
def update_repos(
|
||||||
selected_repos,
|
selected_repos,
|
||||||
repositories_base_dir,
|
repositories_base_dir,
|
||||||
@@ -12,7 +15,8 @@ def update_repos(
|
|||||||
preview: bool,
|
preview: bool,
|
||||||
quiet: bool,
|
quiet: bool,
|
||||||
update_dependencies: bool,
|
update_dependencies: bool,
|
||||||
clone_mode: str):
|
clone_mode: str,
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
Update repositories by pulling latest changes and installing them.
|
Update repositories by pulling latest changes and installing them.
|
||||||
|
|
||||||
@@ -34,7 +38,7 @@ def update_repos(
|
|||||||
all_repos,
|
all_repos,
|
||||||
[],
|
[],
|
||||||
no_verification,
|
no_verification,
|
||||||
preview
|
preview,
|
||||||
)
|
)
|
||||||
|
|
||||||
install_repos(
|
install_repos(
|
||||||
@@ -46,10 +50,19 @@ def update_repos(
|
|||||||
preview,
|
preview,
|
||||||
quiet,
|
quiet,
|
||||||
clone_mode,
|
clone_mode,
|
||||||
update_dependencies
|
update_dependencies,
|
||||||
)
|
)
|
||||||
|
|
||||||
if system_update:
|
if system_update:
|
||||||
from pkgmgr.run_command import run_command
|
from pkgmgr.run_command import run_command
|
||||||
|
|
||||||
|
# Nix: upgrade all profile entries (if Nix is available)
|
||||||
|
if shutil.which("nix") is not None:
|
||||||
|
try:
|
||||||
|
run_command("nix profile upgrade '.*'", preview=preview)
|
||||||
|
except SystemExit as e:
|
||||||
|
print(f"[Warning] 'nix profile upgrade' failed: {e}")
|
||||||
|
|
||||||
|
# Arch / AUR system update
|
||||||
run_command("sudo -u aur_builder yay -Syu --noconfirm", preview=preview)
|
run_command("sudo -u aur_builder yay -Syu --noconfirm", preview=preview)
|
||||||
run_command("sudo pacman -Syyu --noconfirm", preview=preview)
|
run_command("sudo pacman -Syyu --noconfirm", preview=preview)
|
||||||
35
pyproject.toml
Normal file
35
pyproject.toml
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
[build-system]
|
||||||
|
# This tells pip/build how to build your project (PEP 517).
|
||||||
|
requires = ["setuptools>=77", "wheel"]
|
||||||
|
build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
|
[project]
|
||||||
|
name = "package-manager"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = "Kevin's Package Manager is a configurable Python tool to manage multiple repositories via Bash."
|
||||||
|
readme = "README.md"
|
||||||
|
requires-python = ">=3.10"
|
||||||
|
|
||||||
|
# Use a simple SPDX license string to avoid the deprecation warning.
|
||||||
|
license = "MIT"
|
||||||
|
|
||||||
|
authors = [
|
||||||
|
{ name = "Kevin Veen-Birkenbach", email = "info@veen.world" }
|
||||||
|
]
|
||||||
|
|
||||||
|
# Python runtime dependencies.
|
||||||
|
dependencies = [
|
||||||
|
"PyYAML"
|
||||||
|
]
|
||||||
|
|
||||||
|
[project.urls]
|
||||||
|
Homepage = "https://www.veen.world"
|
||||||
|
Repository = "https://github.com/kevinveenbirkenbach/package-manager"
|
||||||
|
Funding-GitHub-Sponsors = "https://github.com/sponsors/kevinveenbirkenbach"
|
||||||
|
Funding-Patreon = "https://www.patreon.com/c/kevinveenbirkenbach"
|
||||||
|
Funding-BuyMeACoffee = "https://buymeacoffee.com/kevinveenbirkenbach"
|
||||||
|
Funding-PayPal = "https://s.veen.world/paypaldonate"
|
||||||
|
|
||||||
|
[tool.setuptools]
|
||||||
|
packages = ["pkgmgr"]
|
||||||
|
|
||||||
@@ -1 +1,4 @@
|
|||||||
|
# Legacy file used only if pip still installs from requirements.txt.
|
||||||
|
# You may delete this file once you switch entirely to pyproject.toml.
|
||||||
|
|
||||||
PyYAML
|
PyYAML
|
||||||
9
requirements.yml
Normal file
9
requirements.yml
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
# This file defines Ansible Galaxy dependencies for this repository.
|
||||||
|
# It is used by `ansible-galaxy install -r requirements.yml`
|
||||||
|
# to download required collections and roles.
|
||||||
|
#
|
||||||
|
# Since the package-manager project does not rely on Ansible,
|
||||||
|
# both sections are intentionally left empty.
|
||||||
|
|
||||||
|
collections: []
|
||||||
|
roles: []
|
||||||
Reference in New Issue
Block a user