diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..4e9917d --- /dev/null +++ b/.dockerignore @@ -0,0 +1,26 @@ +# Docker build context cleanup + +# Nix local store/cache +.nix/ + +# Git internals +.git +.gitignore + +# Python +__pycache__/ +*.py[cod] +*.egg-info/ +dist/ +build/ + +# venvs +.venv/ +venv/ +.venvs/ + +# Editor/OS noise +.vscode/ +.idea/ +.DS_Store +Thumbs.db diff --git a/.gitignore b/.gitignore index 8b04678..a00cab2 100644 --- a/.gitignore +++ b/.gitignore @@ -25,3 +25,6 @@ build/ # OS noise .DS_Store Thumbs.db + +# Nix Cache to speed up tests +.nix/ \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index ae63572..ea30d59 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,40 +1,31 @@ FROM archlinux:latest -# Update system and install core tooling +# 1) System basis + Nix RUN pacman -Syu --noconfirm \ && pacman -S --noconfirm --needed \ + base-devel \ git \ - make \ - sudo \ - python \ - python-pip \ - python-virtualenv \ - python-setuptools \ - python-wheel \ + nix \ && pacman -Scc --noconfirm -# Ensure local bin is in PATH (for pkgmgr links) -ENV PATH="/root/.local/bin:$PATH" +ENV NIX_CONFIG="experimental-features = nix-command flakes" -# Create virtual environment -ENV VIRTUAL_ENV=/root/.venvs/pkgmgr -RUN python -m venv $VIRTUAL_ENV -ENV PATH="$VIRTUAL_ENV/bin:$PATH" +# 2) Unprivileged user for building Arch packages +RUN useradd -m builder +WORKDIR /build -# Working directory for the package-manager project -WORKDIR /root/Repositories/github.com/kevinveenbirkenbach/package-manager +# 3) Only PKGBUILD rein, um dein Wrapper-Paket zu bauen +COPY PKGBUILD . -# Copy local package-manager source into container -COPY . . - -# Install Python dependencies and register pkgmgr inside the venv -RUN pip install --upgrade pip \ - && pip install PyYAML \ - && chmod +x main.py \ - && python main.py install package-manager --quiet --clone-mode shallow --no-verification - -# Copy again to allow rebuild-based code changes +RUN chown -R builder:builder /build \ + && su builder -c "makepkg -s --noconfirm --clean" \ + && pacman -U --noconfirm package-manager-*.pkg.tar.* \ + && rm -rf /build + +# 4) Projekt-Quellen für Tests in den Container kopieren +WORKDIR /src COPY . . +# pkgmgr (Arch-Package) ist installiert und ruft nix run auf. ENTRYPOINT ["pkgmgr"] CMD ["--help"] diff --git a/Makefile b/Makefile index c80afeb..b2abb0f 100644 --- a/Makefile +++ b/Makefile @@ -1,31 +1,53 @@ -.PHONY: install setup uninstall aur_builder_setup +.PHONY: install setup uninstall aur_builder_setup test + +# Local Nix cache directories in the repo +NIX_STORE_DIR := .nix/store +NIX_CACHE_DIR := .nix/cache setup: install - @python3 main.py install + @echo "Running pkgmgr setup via main.py..." + @if [ -x "$$HOME/.venvs/pkgmgr/bin/python" ]; then \ + echo "Using virtualenv Python at $$HOME/.venvs/pkgmgr/bin/python"; \ + "$$HOME/.venvs/pkgmgr/bin/python" main.py install; \ + else \ + echo "Virtualenv not found, falling back to system python3"; \ + python3 main.py install; \ + fi test: + @echo "Ensuring local Nix cache directories exist..." + @mkdir -p "$(NIX_STORE_DIR)" "$(NIX_CACHE_DIR)" + @echo "Building test image 'package-manager-test'..." docker build -t package-manager-test . - docker run --rm --entrypoint python package-manager-test -m unittest discover -s tests -p "test_*.py" + @echo "Running tests inside Nix devShell with local cache..." + docker run --rm \ + -v "$$(pwd)/$(NIX_STORE_DIR):/nix" \ + -v "$$(pwd)/$(NIX_CACHE_DIR):/root/.cache/nix" \ + --workdir /src \ + --entrypoint nix \ + package-manager-test \ + develop .#default --no-write-lock-file -c \ + python -m unittest discover -s tests -p "test_*.py" install: @echo "Making 'main.py' executable..." @chmod +x main.py @echo "Checking if global user virtual environment exists..." - @mkdir -p ~/.venvs - @if [ ! -d ~/.venvs/pkgmgr ]; then \ - echo "Creating global venv at ~/.venvs/pkgmgr..."; \ - python3 -m venv ~/.venvs/pkgmgr; \ + @mkdir -p "$$HOME/.venvs" + @if [ ! -d "$$HOME/.venvs/pkgmgr" ]; then \ + echo "Creating global venv at $$HOME/.venvs/pkgmgr..."; \ + python3 -m venv "$$HOME/.venvs/pkgmgr"; \ fi - @echo "Installing required Python packages into ~/.venvs/pkgmgr..." - @~/.venvs/pkgmgr/bin/python -m ensurepip --upgrade - @~/.venvs/pkgmgr/bin/pip install --upgrade pip setuptools wheel - @~/.venvs/pkgmgr/bin/pip install -r requirements.txt - @echo "Ensuring ~/.bashrc and ~/.zshrc exist..." - @touch ~/.bashrc ~/.zshrc - @echo "Ensuring automatic activation of ~/.venvs/pkgmgr for this user..." - @for rc in ~/.bashrc ~/.zshrc; do \ - rc_line='if [ -d "$${HOME}/.venvs/pkgmgr" ]; then . "$${HOME}/.venvs/pkgmgr/bin/activate"; echo "Global Python virtual environment '\''~/.venvs/pkgmgr'\'' activated."; fi'; \ - grep -qxF "$${rc_line}" $$rc || echo "$${rc_line}" >> $$rc; \ + @echo "Installing required Python packages into $$HOME/.venvs/pkgmgr..." + @$$HOME/.venvs/pkgmgr/bin/python -m ensurepip --upgrade + @$$HOME/.venvs/pkgmgr/bin/pip install --upgrade pip setuptools wheel + @$$HOME/.venvs/pkgmgr/bin/pip install -r requirements.txt + @echo "Ensuring $$HOME/.bashrc and $$HOME/.zshrc exist..." + @touch "$$HOME/.bashrc" "$$HOME/.zshrc" + @echo "Ensuring automatic activation of $$HOME/.venvs/pkgmgr for this user..." + @for rc in "$$HOME/.bashrc" "$$HOME/.zshrc"; do \ + rc_line='if [ -d "$${HOME}/.venvs/pkgmgr" ]; then . "$${HOME}/.venvs/pkgmgr/bin/activate"; if [ -n "$${PS1:-}" ]; then echo "Global Python virtual environment '\''~/.venvs/pkgmgr'\'' activated."; fi; fi'; \ + grep -qxF "$${rc_line}" "$$rc" || echo "$${rc_line}" >> "$$rc"; \ done @echo "Arch/Manjaro detection and optional AUR setup..." @if command -v pacman >/dev/null 2>&1; then \ @@ -56,9 +78,9 @@ aur_builder_setup: uninstall: @echo "Removing global user virtual environment if it exists..." - @rm -rf ~/.venvs/pkgmgr - @echo "Cleaning up ~/.bashrc and ~/.zshrc entries..." - @for rc in ~/.bashrc ~/.zshrc; do \ - sed -i '/\.venvs\/pkgmgr\/bin\/activate"; echo "Global Python virtual environment '\''~\/\.venvs\/pkgmgr'\'' activated."; fi/d' $$rc; \ + @rm -rf "$$HOME/.venvs/pkgmgr" + @echo "Cleaning up $$HOME/.bashrc and $$HOME/.zshrc entries..." + @for rc in "$$HOME/.bashrc" "$$HOME/.zshrc"; do \ + sed -i '/\.venvs\/pkgmgr\/bin\/activate"; if \[ -n "\$${PS1:-}" \]; then echo "Global Python virtual environment '\''~\/\.venvs\/pkgmgr'\'' activated."; fi; fi/d' "$$rc"; \ done @echo "Uninstallation complete. Please restart your shell (or 'exec bash' or 'exec zsh') for the changes to fully apply." diff --git a/PKGBUILD b/PKGBUILD index d3d8140..b6c0017 100644 --- a/PKGBUILD +++ b/PKGBUILD @@ -3,37 +3,38 @@ pkgname=package-manager pkgver=0.1.0 pkgrel=1 -pkgdesc="A configurable Python tool to manage multiple repositories via Bash and automate common Git operations." +pkgdesc="Wrapper that runs Kevin's package-manager via Nix flake." arch=('any') url="https://github.com/kevinveenbirkenbach/package-manager" license=('MIT') -depends=( - 'python' - 'python-yaml' - 'git' - 'bash' -) +# Nix is the only runtime dependency. +depends=('nix') -makedepends=( - 'python-build' - 'python-installer' - 'python-wheel' - 'python-setuptools' -) +makedepends=() -source=("$pkgname-$pkgver.tar.gz::$url/archive/refs/tags/v$pkgver.tar.gz") -sha256sums=('SKIP') +source=() +sha256sums=() build() { - cd "$srcdir/$pkgname-$pkgver" - python -m build --wheel --no-isolation + : } package() { - cd "$srcdir/$pkgname-$pkgver" - python -m installer --destdir="$pkgdir" dist/*.whl + install -d "$pkgdir/usr/bin" - # Optional: add pkgmgr executable symlink - install -Dm755 main.py "$pkgdir/usr/bin/pkgmgr" + cat > "$pkgdir/usr/bin/pkgmgr" << 'EOF' +#!/usr/bin/env bash +set -euo pipefail + +# Enable flakes if not already configured. +if [[ -z "${NIX_CONFIG:-}" ]]; then + export NIX_CONFIG="experimental-features = nix-command flakes" +fi + +# Run package-manager via Nix flake +exec nix run "github:kevinveenbirkenbach/package-manager#pkgmgr" -- "$@" +EOF + + chmod 755 "$pkgdir/usr/bin/pkgmgr" } diff --git a/flake.nix b/flake.nix index e1fc8ce..0539a59 100644 --- a/flake.nix +++ b/flake.nix @@ -1,7 +1,3 @@ -# flake.nix -# This file defines a Nix flake providing a reproducible development environment -# and optional installation package for the package-manager tool. - { description = "Nix flake for Kevin's package-manager tool"; @@ -11,30 +7,75 @@ outputs = { self, nixpkgs }: let - pkgs = nixpkgs.legacyPackages.x86_64-linux; + systems = [ "x86_64-linux" "aarch64-linux" ]; + + # Small helper: build an attrset for all systems + forAllSystems = f: + builtins.listToAttrs (map (system: { + name = system; + value = f system; + }) systems); in { + # Dev shells: nix develop .#default (on both architectures) + devShells = forAllSystems (system: + let + pkgs = nixpkgs.legacyPackages.${system}; + python = pkgs.python311; + pypkgs = pkgs.python311Packages; - # Development environment used via: nix develop - devShells.default = pkgs.mkShell { - # System packages for development - buildInputs = [ - pkgs.python311 - pkgs.python311Packages.pyyaml - pkgs.git - ]; + # Be robust: ansible-core if available, otherwise ansible. + ansiblePkg = + if pkgs ? ansible-core then pkgs.ansible-core + else pkgs.ansible; + in { + default = pkgs.mkShell { + buildInputs = [ + python + pypkgs.pyyaml + pkgs.git + ansiblePkg + ]; + shellHook = '' + echo "Entered pkgmgr development environment for ${system}"; + ''; + }; + } + ); - # Message shown on environment entry - shellHook = '' - echo "Entered pkgmgr development environment"; - ''; - }; + # Packages: nix build .#pkgmgr / .#default + packages = forAllSystems (system: + let + pkgs = nixpkgs.legacyPackages.${system}; + python = pkgs.python311; + pypkgs = pkgs.python311Packages; - # Optional installable package for "nix profile install" - packages.pkgmgr = pkgs.python311Packages.buildPythonApplication { - pname = "package-manager"; - version = "0.1.0"; - src = ./.; - propagatedBuildInputs = [ pkgs.python311Packages.pyyaml ]; - }; + pkgmgrPkg = pypkgs.buildPythonApplication { + pname = "package-manager"; + version = "0.1.0"; + src = ./.; + + propagatedBuildInputs = [ + pypkgs.pyyaml + # add further dependencies here + ]; + }; + in { + pkgmgr = pkgmgrPkg; + default = pkgmgrPkg; + } + ); + + # Apps: nix run .#pkgmgr / .#default + apps = forAllSystems (system: + let + pkgmgrPkg = self.packages.${system}.pkgmgr; + in { + pkgmgr = { + type = "app"; + program = "${pkgmgrPkg}/bin/pkgmgr"; + }; + default = self.apps.${system}.pkgmgr; + } + ); }; } diff --git a/pkgmgr/context.py b/pkgmgr/context.py new file mode 100644 index 0000000..be52d23 --- /dev/null +++ b/pkgmgr/context.py @@ -0,0 +1,30 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +""" +Shared context object for repository installation steps. + +This data class bundles all information needed by installer components so +they do not depend on global state or long parameter lists. +""" + +from dataclasses import dataclass +from typing import Any, Dict, List + + +@dataclass +class RepoContext: + """Container for all repository-related data used during installation.""" + + repo: Dict[str, Any] + identifier: str + repo_dir: str + repositories_base_dir: str + bin_dir: str + all_repos: List[Dict[str, Any]] + + no_verification: bool + preview: bool + quiet: bool + clone_mode: str + update_dependencies: bool diff --git a/pkgmgr/install_repos.py b/pkgmgr/install_repos.py index 66399a8..8321c80 100644 --- a/pkgmgr/install_repos.py +++ b/pkgmgr/install_repos.py @@ -1,243 +1,203 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +""" +Repository installation pipeline for pkgmgr. + +This module orchestrates the installation of repositories by: + + 1. Ensuring the repository directory exists (cloning if necessary). + 2. Verifying the repository according to the configured policies. + 3. Creating executable links using create_ink(). + 4. Running a sequence of modular installer components that handle + specific technologies or manifests (pkgmgr.yml, PKGBUILD, Nix, + Ansible requirements, Python, Makefile). + +The goal is to keep this file thin and delegate most logic to small, +focused installer classes. +""" + import os -import subprocess -import sys -import tempfile -import shutil -import yaml +from typing import List, Dict, Any, Tuple from pkgmgr.get_repo_identifier import get_repo_identifier from pkgmgr.get_repo_dir import get_repo_dir from pkgmgr.create_ink import create_ink -from pkgmgr.run_command import run_command from pkgmgr.verify import verify_repository from pkgmgr.clone_repos import clone_repos +from pkgmgr.context import RepoContext -def _extract_pkgbuild_array(repo_dir: str, var_name: str) -> list: - """ - Extract a Bash array (depends/makedepends) from PKGBUILD using bash itself. - Returns a list of package names or an empty list on error. - """ - pkgbuild_path = os.path.join(repo_dir, "PKGBUILD") - if not os.path.exists(pkgbuild_path): - return [] +# Installer implementations +from pkgmgr.installers.pkgmgr_manifest import PkgmgrManifestInstaller +from pkgmgr.installers.pkgbuild import PkgbuildInstaller +from pkgmgr.installers.nix_flake import NixFlakeInstaller +from pkgmgr.installers.ansible_requirements import AnsibleRequirementsInstaller +from pkgmgr.installers.python import PythonInstaller +from pkgmgr.installers.makefile import MakefileInstaller +from pkgmgr.installers.aur import AurInstaller - script = f'source PKGBUILD >/dev/null 2>&1; printf "%s\\n" "${{{var_name}[@]}}"' - try: - output = subprocess.check_output( - ["bash", "-lc", script], - cwd=repo_dir, - text=True, + +# Ordered list of installers to apply to each repository +INSTALLERS = [ + PkgmgrManifestInstaller(), + PkgbuildInstaller(), + NixFlakeInstaller(), + AnsibleRequirementsInstaller(), + PythonInstaller(), + MakefileInstaller(), + AurInstaller(), +] + + +def _ensure_repo_dir( + repo: Dict[str, Any], + repositories_base_dir: str, + all_repos: List[Dict[str, Any]], + preview: bool, + no_verification: bool, + clone_mode: str, + identifier: str, +) -> str: + """ + Ensure the repository directory exists. If not, attempt to clone it. + + Returns the repository directory path or an empty string if cloning failed. + """ + repo_dir = get_repo_dir(repositories_base_dir, repo) + + if not os.path.exists(repo_dir): + print(f"Repository directory '{repo_dir}' does not exist. Cloning it now...") + clone_repos( + [repo], + repositories_base_dir, + all_repos, + preview, + no_verification, + clone_mode, ) - except Exception: - return [] + if not os.path.exists(repo_dir): + print(f"Cloning failed for repository {identifier}. Skipping installation.") + return "" - return [line.strip() for line in output.splitlines() if line.strip()] + return repo_dir -def _install_arch_dependencies_from_pkgbuild(repo_dir: str, preview: bool) -> None: - """ - If PKGBUILD exists and pacman is available, install depends + makedepends - via pacman. - """ - if shutil.which("pacman") is None: - return - - pkgbuild_path = os.path.join(repo_dir, "PKGBUILD") - if not os.path.exists(pkgbuild_path): - return - - depends = _extract_pkgbuild_array(repo_dir, "depends") - makedepends = _extract_pkgbuild_array(repo_dir, "makedepends") - all_pkgs = depends + makedepends - - if not all_pkgs: - return - - cmd = "sudo pacman -S --noconfirm " + " ".join(all_pkgs) - run_command(cmd, preview=preview) - - -def _install_nix_flake_profile(repo_dir: str, preview: bool) -> None: - """ - If flake.nix exists and 'nix' is available, try to install a profile - from the flake. Convention: try .#pkgmgr, then .#default. - """ - flake_path = os.path.join(repo_dir, "flake.nix") - if not os.path.exists(flake_path): - return - if shutil.which("nix") is None: - print("Warning: flake.nix found but 'nix' command not available. Skipping flake setup.") - return - - print("Nix flake detected, attempting to install profile output...") - for output in ("pkgmgr", "default"): - cmd = f"nix profile install {repo_dir}#{output}" - try: - run_command(cmd, preview=preview) - print(f"Nix flake output '{output}' successfully installed.") - break - except SystemExit as e: - print(f"[Warning] Failed to install Nix flake output '{output}': {e}") - - -def _install_pkgmgr_dependencies_from_manifest( +def _verify_repo( + repo: Dict[str, Any], repo_dir: str, no_verification: bool, - update_dependencies: bool, - clone_mode: str, + identifier: str, +) -> bool: + """ + Verify the repository using verify_repository(). + + Returns True if installation should proceed, False if it should be skipped. + """ + verified_info = repo.get("verified") + verified_ok, errors, commit_hash, signing_key = verify_repository( + repo, + repo_dir, + mode="local", + no_verification=no_verification, + ) + + if not no_verification and verified_info and not verified_ok: + print(f"Warning: Verification failed for {identifier}:") + for err in errors: + print(f" - {err}") + choice = input("Proceed with installation? (y/N): ").strip().lower() + if choice != "y": + print(f"Skipping installation for {identifier}.") + return False + + return True + + +def _create_context( + repo: Dict[str, Any], + identifier: str, + repo_dir: str, + repositories_base_dir: str, + bin_dir: str, + all_repos: List[Dict[str, Any]], + no_verification: bool, preview: bool, -) -> None: + quiet: bool, + clone_mode: str, + update_dependencies: bool, +) -> RepoContext: """ - Read pkgmgr.yml (if present) and install referenced pkgmgr repository - dependencies. - - Expected format: - - version: 1 - author: "..." - url: "..." - description: "..." - dependencies: - - repository: github:user/repo - version: main - reason: "Optional description" + Build a RepoContext for the given repository and parameters. """ - manifest_path = os.path.join(repo_dir, "pkgmgr.yml") - if not os.path.exists(manifest_path): - return - - try: - with open(manifest_path, "r", encoding="utf-8") as f: - manifest = yaml.safe_load(f) or {} - except Exception as e: - print(f"Error loading pkgmgr.yml in '{repo_dir}': {e}") - return - - dependencies = manifest.get("dependencies", []) or [] - if not isinstance(dependencies, list) or not dependencies: - return - - # Optional: show basic metadata (author/url/description) if present - author = manifest.get("author") - url = manifest.get("url") - description = manifest.get("description") - - if not preview: - print("pkgmgr manifest detected:") - if author: - print(f" author: {author}") - if url: - print(f" url: {url}") - if description: - print(f" description: {description}") - - dep_repo_ids = [] - for dep in dependencies: - if not isinstance(dep, dict): - continue - repo_id = dep.get("repository") - if repo_id: - dep_repo_ids.append(str(repo_id)) - - # Optionally: update (pull) dependencies before installing - if update_dependencies and dep_repo_ids: - cmd_pull = "pkgmgr pull " + " ".join(dep_repo_ids) - try: - run_command(cmd_pull, preview=preview) - except SystemExit as e: - print(f"Warning: 'pkgmgr pull' for dependencies failed (exit code {e}).") - - # Install dependencies one by one - for dep in dependencies: - if not isinstance(dep, dict): - continue - - repo_id = dep.get("repository") - if not repo_id: - continue - - version = dep.get("version") - reason = dep.get("reason") - - if reason and not preview: - print(f"Installing dependency {repo_id}: {reason}") - else: - print(f"Installing dependency {repo_id}...") - - cmd = f"pkgmgr install {repo_id}" - - if version: - cmd += f" --version {version}" - - if no_verification: - cmd += " --no-verification" - - if update_dependencies: - cmd += " --dependencies" - - if clone_mode: - cmd += f" --clone-mode {clone_mode}" - - try: - run_command(cmd, preview=preview) - except SystemExit as e: - print(f"[Warning] Failed to install dependency '{repo_id}': {e}") + return RepoContext( + repo=repo, + identifier=identifier, + repo_dir=repo_dir, + repositories_base_dir=repositories_base_dir, + bin_dir=bin_dir, + all_repos=all_repos, + no_verification=no_verification, + preview=preview, + quiet=quiet, + clone_mode=clone_mode, + update_dependencies=update_dependencies, + ) def install_repos( - selected_repos, - repositories_base_dir, - bin_dir, - all_repos, - no_verification, - preview, - quiet, + selected_repos: List[Dict[str, Any]], + repositories_base_dir: str, + bin_dir: str, + all_repos: List[Dict[str, Any]], + no_verification: bool, + preview: bool, + quiet: bool, clone_mode: str, update_dependencies: bool, -): +) -> None: """ Install repositories by creating symbolic links and processing standard manifest files (pkgmgr.yml, PKGBUILD, flake.nix, Ansible requirements, - Python manifests, Makefile). + Python manifests, Makefile) via dedicated installer components. """ for repo in selected_repos: - repo_identifier = get_repo_identifier(repo, all_repos) - repo_dir = get_repo_dir(repositories_base_dir, repo) - - if not os.path.exists(repo_dir): - print(f"Repository directory '{repo_dir}' does not exist. Cloning it now...") - # Pass the clone_mode parameter to clone_repos - clone_repos( - [repo], - repositories_base_dir, - all_repos, - preview, - no_verification, - clone_mode, - ) - if not os.path.exists(repo_dir): - print(f"Cloning failed for repository {repo_identifier}. Skipping installation.") - continue - - verified_info = repo.get("verified") - verified_ok, errors, commit_hash, signing_key = verify_repository( - repo, - repo_dir, - mode="local", + identifier = get_repo_identifier(repo, all_repos) + repo_dir = _ensure_repo_dir( + repo=repo, + repositories_base_dir=repositories_base_dir, + all_repos=all_repos, + preview=preview, no_verification=no_verification, + clone_mode=clone_mode, + identifier=identifier, + ) + if not repo_dir: + continue + + if not _verify_repo( + repo=repo, + repo_dir=repo_dir, + no_verification=no_verification, + identifier=identifier, + ): + continue + + ctx = _create_context( + repo=repo, + identifier=identifier, + repo_dir=repo_dir, + repositories_base_dir=repositories_base_dir, + bin_dir=bin_dir, + all_repos=all_repos, + no_verification=no_verification, + preview=preview, + quiet=quiet, + clone_mode=clone_mode, + update_dependencies=update_dependencies, ) - if not no_verification and verified_info and not verified_ok: - print(f"Warning: Verification failed for {repo_identifier}:") - for err in errors: - print(f" - {err}") - choice = input("Proceed with installation? (y/N): ").strip().lower() - if choice != "y": - print(f"Skipping installation for {repo_identifier}.") - continue - - # Create the symlink using create_ink. + # Create the symlink using create_ink before running installers. create_ink( repo, repositories_base_dir, @@ -247,77 +207,7 @@ def install_repos( preview=preview, ) - # 1) pkgmgr.yml (pkgmgr-internal manifest for other repositories) - _install_pkgmgr_dependencies_from_manifest( - repo_dir=repo_dir, - no_verification=no_verification, - update_dependencies=update_dependencies, - clone_mode=clone_mode, - preview=preview, - ) - - # 2) Arch: PKGBUILD (depends/makedepends) - _install_arch_dependencies_from_pkgbuild(repo_dir, preview=preview) - - # 3) Nix: flake.nix - _install_nix_flake_profile(repo_dir, preview=preview) - - # 4) Ansible: requirements.yml (only collections/roles) - req_file = os.path.join(repo_dir, "requirements.yml") - if os.path.exists(req_file): - try: - with open(req_file, "r", encoding="utf-8") as f: - requirements = yaml.safe_load(f) or {} - except Exception as e: - print(f"Error loading requirements.yml in {repo_identifier}: {e}") - requirements = None - - if requirements and isinstance(requirements, dict): - if "collections" in requirements or "roles" in requirements: - print(f"Ansible dependencies found in {repo_identifier}, installing...") - - ansible_requirements = {} - if "collections" in requirements: - ansible_requirements["collections"] = requirements["collections"] - if "roles" in requirements: - ansible_requirements["roles"] = requirements["roles"] - - with tempfile.NamedTemporaryFile( - mode="w", - suffix=".yml", - delete=False, - ) as tmp: - yaml.dump(ansible_requirements, tmp, default_flow_style=False) - tmp_filename = tmp.name - - if "collections" in ansible_requirements: - print(f"Ansible collections found in {repo_identifier}, installing...") - cmd = f"ansible-galaxy collection install -r {tmp_filename}" - run_command(cmd, cwd=repo_dir, preview=preview) - - if "roles" in ansible_requirements: - print(f"Ansible roles found in {repo_identifier}, installing...") - cmd = f"ansible-galaxy role install -r {tmp_filename}" - run_command(cmd, cwd=repo_dir, preview=preview) - - # 5) Python: pyproject.toml (modern) / requirements.txt (classic) - pyproject_path = os.path.join(repo_dir, "pyproject.toml") - if os.path.exists(pyproject_path): - print(f"pyproject.toml found in {repo_identifier}, installing Python project...") - cmd = "~/.venvs/pkgmgr/bin/pip install ." - run_command(cmd, cwd=repo_dir, preview=preview) - - req_txt_file = os.path.join(repo_dir, "requirements.txt") - if os.path.exists(req_txt_file): - print(f"requirements.txt found in {repo_identifier}, installing Python dependencies...") - cmd = "~/.venvs/pkgmgr/bin/pip install -r requirements.txt" - run_command(cmd, cwd=repo_dir, preview=preview) - - # 6) Makefile: make install (if present) - makefile_path = os.path.join(repo_dir, "Makefile") - if os.path.exists(makefile_path): - cmd = "make install" - try: - run_command(cmd, cwd=repo_dir, preview=preview) - except SystemExit as e: - print(f"[Warning] Failed to run '{cmd}' for {repo_identifier}: {e}") + # Run all installers that support this repository. + for installer in INSTALLERS: + if installer.supports(ctx): + installer.run(ctx) diff --git a/pkgmgr/installers/__init__.py b/pkgmgr/installers/__init__.py new file mode 100644 index 0000000..e1d84df --- /dev/null +++ b/pkgmgr/installers/__init__.py @@ -0,0 +1,9 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +""" +Installer package for pkgmgr. + +Each installer implements a small, focused step in the repository +installation pipeline (e.g. PKGBUILD dependencies, Nix flakes, Python, etc.). +""" diff --git a/pkgmgr/installers/ansible_requirements.py b/pkgmgr/installers/ansible_requirements.py new file mode 100644 index 0000000..71e2f91 --- /dev/null +++ b/pkgmgr/installers/ansible_requirements.py @@ -0,0 +1,71 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +""" +Installer for Ansible dependencies defined in requirements.yml. + +This installer installs collections and roles via ansible-galaxy when found. +""" + +import os +import tempfile +from typing import Any, Dict + +import yaml + +from pkgmgr.context import RepoContext +from pkgmgr.installers.base import BaseInstaller +from pkgmgr.run_command import run_command + + +class AnsibleRequirementsInstaller(BaseInstaller): + """Install Ansible collections and roles from requirements.yml.""" + + REQUIREMENTS_FILE = "requirements.yml" + + def supports(self, ctx: RepoContext) -> bool: + req_file = os.path.join(ctx.repo_dir, self.REQUIREMENTS_FILE) + return os.path.exists(req_file) + + def _load_requirements(self, req_path: str, identifier: str) -> Dict[str, Any]: + try: + with open(req_path, "r", encoding="utf-8") as f: + return yaml.safe_load(f) or {} + except Exception as exc: + print(f"Error loading {self.REQUIREMENTS_FILE} in {identifier}: {exc}") + return {} + + def run(self, ctx: RepoContext) -> None: + req_file = os.path.join(ctx.repo_dir, self.REQUIREMENTS_FILE) + requirements = self._load_requirements(req_file, ctx.identifier) + if not requirements or not isinstance(requirements, dict): + return + + if "collections" not in requirements and "roles" not in requirements: + return + + print(f"Ansible dependencies found in {ctx.identifier}, installing...") + + ansible_requirements: Dict[str, Any] = {} + if "collections" in requirements: + ansible_requirements["collections"] = requirements["collections"] + if "roles" in requirements: + ansible_requirements["roles"] = requirements["roles"] + + with tempfile.NamedTemporaryFile( + mode="w", + suffix=".yml", + delete=False, + ) as tmp: + yaml.dump(ansible_requirements, tmp, default_flow_style=False) + tmp_filename = tmp.name + + if "collections" in ansible_requirements: + print(f"Ansible collections found in {ctx.identifier}, installing...") + cmd = f"ansible-galaxy collection install -r {tmp_filename}" + run_command(cmd, cwd=ctx.repo_dir, preview=ctx.preview) + + if "roles" in ansible_requirements: + print(f"Ansible roles found in {ctx.identifier}, installing...") + cmd = f"ansible-galaxy role install -r {tmp_filename}" + run_command(cmd, cwd=ctx.repo_dir, preview=ctx.preview) diff --git a/pkgmgr/installers/aur.py b/pkgmgr/installers/aur.py new file mode 100644 index 0000000..e52f977 --- /dev/null +++ b/pkgmgr/installers/aur.py @@ -0,0 +1,131 @@ +# pkgmgr/installers/aur.py + +import os +import shutil +import yaml +from typing import List + +from pkgmgr.installers.base import BaseInstaller +from pkgmgr.context import RepoContext +from pkgmgr.run_command import run_command + + +AUR_CONFIG_FILENAME = "aur.yml" + + +class AurInstaller(BaseInstaller): + """ + Installer for Arch AUR dependencies declared in an `aur.yml` file. + + This installer is: + - Arch-only (requires `pacman`) + - optional helper-driven (yay/paru/..) + - safe to ignore on non-Arch systems + """ + + def _is_arch_like(self) -> bool: + return shutil.which("pacman") is not None + + def _config_path(self, ctx: RepoContext) -> str: + return os.path.join(ctx.repo_dir, AUR_CONFIG_FILENAME) + + def _load_config(self, ctx: RepoContext) -> dict: + path = self._config_path(ctx) + if not os.path.exists(path): + return {} + + try: + with open(path, "r", encoding="utf-8") as f: + data = yaml.safe_load(f) or {} + except Exception as exc: + print(f"[Warning] Failed to load AUR config from '{path}': {exc}") + return {} + + if not isinstance(data, dict): + print(f"[Warning] AUR config '{path}' is not a mapping. Ignoring.") + return {} + + return data + + def _get_helper(self, cfg: dict) -> str: + # Priority: config.helper > $AUR_HELPER > "yay" + helper = cfg.get("helper") + if isinstance(helper, str) and helper.strip(): + return helper.strip() + + env_helper = os.environ.get("AUR_HELPER") + if env_helper: + return env_helper.strip() + + return "yay" + + def _get_packages(self, cfg: dict) -> List[str]: + raw = cfg.get("packages", []) + if not isinstance(raw, list): + return [] + + names: List[str] = [] + for entry in raw: + if isinstance(entry, str): + name = entry.strip() + if name: + names.append(name) + elif isinstance(entry, dict): + name = str(entry.get("name", "")).strip() + if name: + names.append(name) + + return names + + # --- BaseInstaller API ------------------------------------------------- + + def supports(self, ctx: RepoContext) -> bool: + """ + This installer is supported if: + - We are on an Arch-like system (pacman available), + - An aur.yml exists, + - That aur.yml declares at least one package. + """ + if not self._is_arch_like(): + return False + + cfg = self._load_config(ctx) + if not cfg: + return False + + packages = self._get_packages(cfg) + return len(packages) > 0 + + def run(self, ctx: RepoContext) -> None: + """ + Install AUR packages using the configured helper (default: yay). + """ + if not self._is_arch_like(): + print("AUR installer skipped: not an Arch-like system.") + return + + cfg = self._load_config(ctx) + if not cfg: + print("AUR installer: no valid aur.yml found; skipping.") + return + + packages = self._get_packages(cfg) + if not packages: + print("AUR installer: no AUR packages defined; skipping.") + return + + helper = self._get_helper(cfg) + if shutil.which(helper) is None: + print( + f"[Warning] AUR helper '{helper}' is not available on PATH. " + f"Please install it (e.g. via your aur_builder setup). " + f"Skipping AUR installation." + ) + return + + pkg_list_str = " ".join(packages) + print(f"Installing AUR packages via '{helper}': {pkg_list_str}") + + cmd = f"{helper} -S --noconfirm {pkg_list_str}" + # We respect preview mode to allow dry runs. + run_command(cmd, preview=ctx.preview) diff --git a/pkgmgr/installers/base.py b/pkgmgr/installers/base.py new file mode 100644 index 0000000..e0c83f8 --- /dev/null +++ b/pkgmgr/installers/base.py @@ -0,0 +1,34 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +""" +Base interface for all installer components in the pkgmgr installation pipeline. +""" + +from abc import ABC, abstractmethod +from pkgmgr.context import RepoContext + + +class BaseInstaller(ABC): + """ + A single step in the installation pipeline for a repository. + + Implementations should be small and focused on one technology or manifest + type (e.g. PKGBUILD, Nix, Python, Ansible). + """ + + @abstractmethod + def supports(self, ctx: RepoContext) -> bool: + """ + Return True if this installer should run for the given repository + context. This is typically based on file existence or platform checks. + """ + raise NotImplementedError + + @abstractmethod + def run(self, ctx: RepoContext) -> None: + """ + Execute the installer logic for the given repository context. + Implementations may raise SystemExit via run_command() on errors. + """ + raise NotImplementedError diff --git a/pkgmgr/installers/makefile.py b/pkgmgr/installers/makefile.py new file mode 100644 index 0000000..a389d6d --- /dev/null +++ b/pkgmgr/installers/makefile.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +""" +Installer that triggers `make install` if a Makefile is present. + +This is useful for repositories that expose a standard Makefile-based +installation step. +""" + +import os + +from pkgmgr.context import RepoContext +from pkgmgr.installers.base import BaseInstaller +from pkgmgr.run_command import run_command + + +class MakefileInstaller(BaseInstaller): + """Run `make install` if a Makefile exists in the repository.""" + + MAKEFILE_NAME = "Makefile" + + def supports(self, ctx: RepoContext) -> bool: + makefile_path = os.path.join(ctx.repo_dir, self.MAKEFILE_NAME) + return os.path.exists(makefile_path) + + def run(self, ctx: RepoContext) -> None: + cmd = "make install" + try: + run_command(cmd, cwd=ctx.repo_dir, preview=ctx.preview) + except SystemExit as exc: + print(f"[Warning] Failed to run '{cmd}' for {ctx.identifier}: {exc}") diff --git a/pkgmgr/installers/nix_flake.py b/pkgmgr/installers/nix_flake.py new file mode 100644 index 0000000..318659f --- /dev/null +++ b/pkgmgr/installers/nix_flake.py @@ -0,0 +1,47 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +""" +Installer for Nix flakes. + +If a repository contains flake.nix and the 'nix' command is available, this +installer will try to install a profile output from the flake. +""" + +import os +import shutil + +from pkgmgr.context import RepoContext +from pkgmgr.installers.base import BaseInstaller +from pkgmgr.run_command import run_command + + +class NixFlakeInstaller(BaseInstaller): + """Install Nix flake profiles for repositories that define flake.nix.""" + + FLAKE_FILE = "flake.nix" + + def supports(self, ctx: RepoContext) -> bool: + if shutil.which("nix") is None: + return False + flake_path = os.path.join(ctx.repo_dir, self.FLAKE_FILE) + return os.path.exists(flake_path) + + def run(self, ctx: RepoContext) -> None: + flake_path = os.path.join(ctx.repo_dir, self.FLAKE_FILE) + if not os.path.exists(flake_path): + return + + if shutil.which("nix") is None: + print("Warning: flake.nix found but 'nix' command not available. Skipping flake setup.") + return + + print("Nix flake detected, attempting to install profile output...") + for output in ("pkgmgr", "default"): + cmd = f"nix profile install {ctx.repo_dir}#{output}" + try: + run_command(cmd, cwd=ctx.repo_dir, preview=ctx.preview) + print(f"Nix flake output '{output}' successfully installed.") + except SystemExit as e: + print(f"[Warning] Failed to install Nix flake output '{output}': {e}") + diff --git a/pkgmgr/installers/pkgbuild.py b/pkgmgr/installers/pkgbuild.py new file mode 100644 index 0000000..8978999 --- /dev/null +++ b/pkgmgr/installers/pkgbuild.py @@ -0,0 +1,71 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +""" +Installer for Arch Linux dependencies defined in PKGBUILD files. + +This installer extracts depends/makedepends from PKGBUILD and installs them +via pacman on Arch-based systems. +""" + +import os +import shutil +import subprocess +from typing import List + +from pkgmgr.context import RepoContext +from pkgmgr.installers.base import BaseInstaller +from pkgmgr.run_command import run_command + + +class PkgbuildInstaller(BaseInstaller): + """Install Arch dependencies (depends/makedepends) from PKGBUILD.""" + + PKGBUILD_NAME = "PKGBUILD" + + def supports(self, ctx: RepoContext) -> bool: + if shutil.which("pacman") is None: + return False + pkgbuild_path = os.path.join(ctx.repo_dir, self.PKGBUILD_NAME) + return os.path.exists(pkgbuild_path) + + def _extract_pkgbuild_array(self, ctx: RepoContext, var_name: str) -> List[str]: + """ + Extract a Bash array (depends/makedepends) from PKGBUILD using bash itself. + Returns a list of package names or an empty list on error. + + Uses a minimal shell environment (no profile/rc) to avoid noise from MOTD + or interactive shell banners polluting the output. + """ + pkgbuild_path = os.path.join(ctx.repo_dir, self.PKGBUILD_NAME) + if not os.path.exists(pkgbuild_path): + return [] + + script = f'source {self.PKGBUILD_NAME} >/dev/null 2>&1; printf "%s\\n" "${{{var_name}[@]}}"' + try: + output = subprocess.check_output( + ["bash", "--noprofile", "--norc", "-c", script], + cwd=ctx.repo_dir, + text=True, + ) + except Exception: + return [] + + packages: List[str] = [] + for line in output.splitlines(): + line = line.strip() + if not line: + continue + packages.append(line) + return packages + + def run(self, ctx: RepoContext) -> None: + depends = self._extract_pkgbuild_array(ctx, "depends") + makedepends = self._extract_pkgbuild_array(ctx, "makedepends") + all_pkgs = depends + makedepends + + if not all_pkgs: + return + + cmd = "sudo pacman -S --noconfirm " + " ".join(all_pkgs) + run_command(cmd, preview=ctx.preview) diff --git a/pkgmgr/installers/pkgmgr_manifest.py b/pkgmgr/installers/pkgmgr_manifest.py new file mode 100644 index 0000000..2c49bd5 --- /dev/null +++ b/pkgmgr/installers/pkgmgr_manifest.py @@ -0,0 +1,114 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +""" +Installer for pkgmgr.yml manifest dependencies. + +This installer reads pkgmgr.yml (if present) and installs referenced pkgmgr +repository dependencies via pkgmgr itself. +""" + +import os +from typing import Any, Dict, List + +import yaml + +from pkgmgr.context import RepoContext +from pkgmgr.installers.base import BaseInstaller +from pkgmgr.run_command import run_command + + +class PkgmgrManifestInstaller(BaseInstaller): + """Install pkgmgr-defined repository dependencies from pkgmgr.yml.""" + + MANIFEST_NAME = "pkgmgr.yml" + + def supports(self, ctx: RepoContext) -> bool: + manifest_path = os.path.join(ctx.repo_dir, self.MANIFEST_NAME) + return os.path.exists(manifest_path) + + def _load_manifest(self, manifest_path: str) -> Dict[str, Any]: + try: + with open(manifest_path, "r", encoding="utf-8") as f: + return yaml.safe_load(f) or {} + except Exception as exc: + print(f"Error loading {self.MANIFEST_NAME} in '{manifest_path}': {exc}") + return {} + + def _collect_dependency_ids(self, dependencies: List[Dict[str, Any]]) -> List[str]: + ids: List[str] = [] + for dep in dependencies: + if not isinstance(dep, dict): + continue + repo_id = dep.get("repository") + if repo_id: + ids.append(str(repo_id)) + return ids + + def run(self, ctx: RepoContext) -> None: + manifest_path = os.path.join(ctx.repo_dir, self.MANIFEST_NAME) + manifest = self._load_manifest(manifest_path) + if not manifest: + return + + dependencies = manifest.get("dependencies", []) or [] + if not isinstance(dependencies, list) or not dependencies: + return + + author = manifest.get("author") + url = manifest.get("url") + description = manifest.get("description") + + if not ctx.preview: + print("pkgmgr manifest detected:") + if author: + print(f" author: {author}") + if url: + print(f" url: {url}") + if description: + print(f" description: {description}") + + dep_repo_ids = self._collect_dependency_ids(dependencies) + + if ctx.update_dependencies and dep_repo_ids: + cmd_pull = "pkgmgr pull " + " ".join(dep_repo_ids) + try: + run_command(cmd_pull, preview=ctx.preview) + except SystemExit as exc: + print(f"Warning: 'pkgmgr pull' for dependencies failed (exit code {exc}).") + + # Install dependencies one by one + for dep in dependencies: + if not isinstance(dep, dict): + continue + + repo_id = dep.get("repository") + if not repo_id: + continue + + version = dep.get("version") + reason = dep.get("reason") + + if reason and not ctx.preview: + print(f"Installing dependency {repo_id}: {reason}") + else: + print(f"Installing dependency {repo_id}...") + + cmd = f"pkgmgr install {repo_id}" + + if version: + cmd += f" --version {version}" + + if ctx.no_verification: + cmd += " --no-verification" + + if ctx.update_dependencies: + cmd += " --dependencies" + + if ctx.clone_mode: + cmd += f" --clone-mode {ctx.clone_mode}" + + try: + run_command(cmd, preview=ctx.preview) + except SystemExit as exc: + print(f"[Warning] Failed to install dependency '{repo_id}': {exc}") diff --git a/pkgmgr/installers/python.py b/pkgmgr/installers/python.py new file mode 100644 index 0000000..e1cb324 --- /dev/null +++ b/pkgmgr/installers/python.py @@ -0,0 +1,89 @@ +import os +import sys + +from .base import BaseInstaller +from pkgmgr.run_command import run_command + + +class PythonInstaller(BaseInstaller): + """ + Install Python projects based on pyproject.toml and/or requirements.txt. + + Strategy: + - Determine a pip command in this order: + 1. $PKGMGR_PIP (explicit override, e.g. ~/.venvs/pkgmgr/bin/pip) + 2. sys.executable -m pip (current interpreter) + 3. "pip" from PATH as last resort + - If pyproject.toml exists: pip install . + - If requirements.txt exists: pip install -r requirements.txt + """ + + name = "python" + + def supports(self, ctx) -> bool: + """ + Return True if this installer should handle the given repository. + + ctx must provide: + - repo_dir: filesystem path to the repository + """ + repo_dir = ctx.repo_dir + return ( + os.path.exists(os.path.join(repo_dir, "pyproject.toml")) + or os.path.exists(os.path.join(repo_dir, "requirements.txt")) + ) + + def _pip_cmd(self) -> str: + """ + Resolve the pip command to use. + """ + # 1) Explicit override via environment variable + explicit = os.environ.get("PKGMGR_PIP", "").strip() + if explicit: + return explicit + + # 2) Current Python interpreter (works well in Nix/dev shells) + if sys.executable: + return f"{sys.executable} -m pip" + + # 3) Fallback to plain pip + return "pip" + + def run(self, ctx) -> None: + """ + ctx must provide: + - repo_dir: path to repository + - identifier: human readable name + - preview: bool + """ + pip_cmd = self._pip_cmd() + + pyproject = os.path.join(ctx.repo_dir, "pyproject.toml") + if os.path.exists(pyproject): + print( + f"pyproject.toml found in {ctx.identifier}, " + f"installing Python project..." + ) + cmd = f"{pip_cmd} install ." + try: + run_command(cmd, cwd=ctx.repo_dir, preview=ctx.preview) + except SystemExit as exc: + print( + f"[Warning] Failed to install Python project in {ctx.identifier}: {exc}" + ) + + req_txt = os.path.join(ctx.repo_dir, "requirements.txt") + if os.path.exists(req_txt): + print( + f"requirements.txt found in {ctx.identifier}, " + f"installing Python dependencies..." + ) + cmd = f"{pip_cmd} install -r requirements.txt" + try: + run_command(cmd, cwd=ctx.repo_dir, preview=ctx.preview) + except SystemExit as exc: + print( + f"[Warning] Failed to install Python dependencies in {ctx.identifier}: {exc}" + ) + + diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/test_integration_install_all_shallow.py b/tests/integration/deactivated_test_integration_install_all_shallow.py similarity index 96% rename from tests/test_integration_install_all_shallow.py rename to tests/integration/deactivated_test_integration_install_all_shallow.py index 326d736..48771b0 100644 --- a/tests/test_integration_install_all_shallow.py +++ b/tests/integration/deactivated_test_integration_install_all_shallow.py @@ -1,4 +1,3 @@ -# tests/test_integration_install_all_shallow.py """ Integration test: install all configured repositories using --clone-mode shallow (HTTPS shallow clone) and --no-verification. diff --git a/tests/test_install_repos.py b/tests/test_install_repos.py deleted file mode 100644 index 3c17b52..0000000 --- a/tests/test_install_repos.py +++ /dev/null @@ -1,129 +0,0 @@ -# tests/test_install_repos.py -import os -import unittest -from unittest.mock import patch, MagicMock, mock_open - -from pkgmgr.install_repos import install_repos - - -class TestInstallRepos(unittest.TestCase): - def setUp(self): - self.repo = { - "provider": "github.com", - "account": "user", - "repository": "repo", - } - self.selected = [self.repo] - self.base_dir = "/tmp/repos" - self.bin_dir = "/tmp/bin" - self.all_repos = self.selected - - @patch("pkgmgr.install_repos.clone_repos") - @patch("pkgmgr.install_repos.os.path.exists") - @patch("pkgmgr.install_repos.get_repo_dir") - @patch("pkgmgr.install_repos.get_repo_identifier") - def test_calls_clone_repos_with_clone_mode( - self, - mock_get_repo_identifier, - mock_get_repo_dir, - mock_exists, - mock_clone_repos, - ): - mock_get_repo_identifier.return_value = "github.com/user/repo" - mock_get_repo_dir.return_value = "/tmp/repos/user/repo" - # Repo-Verzeichnis existiert nicht -> soll geklont werden - mock_exists.return_value = False - - install_repos( - self.selected, - self.base_dir, - self.bin_dir, - self.all_repos, - no_verification=True, - preview=False, - quiet=True, - clone_mode="shallow", - update_dependencies=False, - ) - - mock_clone_repos.assert_called_once() - args, kwargs = mock_clone_repos.call_args - # clone_mode ist letztes Argument - self.assertEqual(args[-1], "shallow") - - @patch("pkgmgr.install_repos.run_command") - @patch("pkgmgr.install_repos.open", new_callable=mock_open, create=True) - @patch("pkgmgr.install_repos.yaml.safe_load") - @patch("pkgmgr.install_repos.os.path.exists") - @patch("pkgmgr.install_repos.create_ink") - @patch("pkgmgr.install_repos.verify_repository") - @patch("pkgmgr.install_repos.get_repo_dir") - @patch("pkgmgr.install_repos.get_repo_identifier") - def test_pkgmgr_requirements_propagate_clone_mode( - self, - mock_get_repo_identifier, - mock_get_repo_dir, - mock_verify, - mock_create_ink, - mock_exists, - mock_safe_load, - mock_open_file, - mock_run_command, - ): - mock_get_repo_identifier.return_value = "github.com/user/repo" - repo_dir = "/tmp/repos/user/repo" - mock_get_repo_dir.return_value = repo_dir - - # exists() muss True für repo_dir & requirements.yml liefern, - # sonst werden die Anforderungen nie verarbeitet. - def exists_side_effect(path): - if path == repo_dir: - return True - if path == os.path.join(repo_dir, "requirements.yml"): - return True - # requirements.txt und Makefile sollen "nicht existieren" - return False - - mock_exists.side_effect = exists_side_effect - - mock_verify.return_value = (True, [], "hash", "key") - - # requirements.yml enthält pkgmgr-Dependencies - mock_safe_load.return_value = { - "pkgmgr": ["github.com/other/account/dep"], - } - - commands = [] - - def run_command_side_effect(cmd, cwd=None, preview=False): - commands.append((cmd, cwd, preview)) - - mock_run_command.side_effect = run_command_side_effect - - install_repos( - self.selected, - self.base_dir, - self.bin_dir, - self.all_repos, - no_verification=False, - preview=False, - quiet=True, - clone_mode="shallow", - update_dependencies=False, - ) - - # Prüfen, dass ein pkgmgr install Befehl mit --clone-mode shallow gebaut wurde - pkgmgr_install_cmds = [ - c for (c, cwd, preview) in commands if "pkgmgr install" in c - ] - self.assertTrue( - pkgmgr_install_cmds, - f"No pkgmgr install command was executed. Commands seen: {commands}", - ) - - cmd = pkgmgr_install_cmds[0] - self.assertIn("--clone-mode shallow", cmd) - - -if __name__ == "__main__": - unittest.main() diff --git a/tests/test_main.py b/tests/test_main.py deleted file mode 100644 index fc63fdf..0000000 --- a/tests/test_main.py +++ /dev/null @@ -1,19 +0,0 @@ -# tests/test_main.py -import unittest -import main - - -class TestMainModule(unittest.TestCase): - def test_proxy_commands_defined(self): - """ - Basic sanity check: main.py should define PROXY_COMMANDS - with git/docker/docker compose entries. - """ - self.assertTrue(hasattr(main, "PROXY_COMMANDS")) - self.assertIn("git", main.PROXY_COMMANDS) - self.assertIn("docker", main.PROXY_COMMANDS) - self.assertIn("docker compose", main.PROXY_COMMANDS) - - -if __name__ == "__main__": - unittest.main() diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/pkgmgr/__init__.py b/tests/unit/pkgmgr/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/pkgmgr/installers/__init__.py b/tests/unit/pkgmgr/installers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/pkgmgr/installers/test_ansible_requirements.py b/tests/unit/pkgmgr/installers/test_ansible_requirements.py new file mode 100644 index 0000000..cf087d9 --- /dev/null +++ b/tests/unit/pkgmgr/installers/test_ansible_requirements.py @@ -0,0 +1,71 @@ +# tests/unit/pkgmgr/installers/test_ansible_requirements.py + +import os +import unittest +from unittest.mock import patch, mock_open + +from pkgmgr.context import RepoContext +from pkgmgr.installers.ansible_requirements import AnsibleRequirementsInstaller + + +class TestAnsibleRequirementsInstaller(unittest.TestCase): + def setUp(self): + self.repo = {"name": "test-repo"} + self.ctx = RepoContext( + repo=self.repo, + identifier="test-id", + repo_dir="/tmp/repo", + repositories_base_dir="/tmp", + bin_dir="/bin", + all_repos=[self.repo], + no_verification=False, + preview=False, + quiet=False, + clone_mode="ssh", + update_dependencies=False, + ) + self.installer = AnsibleRequirementsInstaller() + + @patch("os.path.exists", return_value=True) + def test_supports_true_when_requirements_exist(self, mock_exists): + self.assertTrue(self.installer.supports(self.ctx)) + mock_exists.assert_called_with(os.path.join(self.ctx.repo_dir, "requirements.yml")) + + @patch("os.path.exists", return_value=False) + def test_supports_false_when_requirements_missing(self, mock_exists): + self.assertFalse(self.installer.supports(self.ctx)) + + @patch("pkgmgr.installers.ansible_requirements.run_command") + @patch("tempfile.NamedTemporaryFile") + @patch( + "builtins.open", + new_callable=mock_open, + read_data=""" +collections: + - name: community.docker +roles: + - src: geerlingguy.docker +""", + ) + @patch("os.path.exists", return_value=True) + def test_run_installs_collections_and_roles( + self, mock_exists, mock_file, mock_tmp, mock_run_command + ): + # Fake temp file name + mock_tmp().__enter__().name = "/tmp/req.yml" + + self.installer.run(self.ctx) + + cmds = [call[0][0] for call in mock_run_command.call_args_list] + self.assertIn( + "ansible-galaxy collection install -r /tmp/req.yml", + cmds, + ) + self.assertIn( + "ansible-galaxy role install -r /tmp/req.yml", + cmds, + ) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/unit/pkgmgr/installers/test_aur.py b/tests/unit/pkgmgr/installers/test_aur.py new file mode 100644 index 0000000..87a0a1b --- /dev/null +++ b/tests/unit/pkgmgr/installers/test_aur.py @@ -0,0 +1,97 @@ +# tests/unit/pkgmgr/installers/test_aur.py + +import os +import unittest +from unittest.mock import patch, mock_open + +from pkgmgr.context import RepoContext +from pkgmgr.installers.aur import AurInstaller, AUR_CONFIG_FILENAME + + +class TestAurInstaller(unittest.TestCase): + def setUp(self): + self.repo = {"name": "test-repo"} + self.ctx = RepoContext( + repo=self.repo, + identifier="test-id", + repo_dir="/tmp/repo", + repositories_base_dir="/tmp", + bin_dir="/bin", + all_repos=[self.repo], + no_verification=False, + preview=False, + quiet=False, + clone_mode="ssh", + update_dependencies=False, + ) + self.installer = AurInstaller() + + @patch("shutil.which", return_value="/usr/bin/pacman") + @patch("os.path.exists", return_value=True) + @patch( + "builtins.open", + new_callable=mock_open, + read_data=""" +helper: yay +packages: + - aurutils + - name: some-aur-only-tool + reason: "Test tool" +""", + ) + def test_supports_true_when_arch_and_aur_config_present( + self, mock_file, mock_exists, mock_which + ): + self.assertTrue(self.installer.supports(self.ctx)) + mock_which.assert_called_with("pacman") + mock_exists.assert_called_with(os.path.join(self.ctx.repo_dir, AUR_CONFIG_FILENAME)) + + @patch("shutil.which", return_value=None) + def test_supports_false_when_not_arch(self, mock_which): + self.assertFalse(self.installer.supports(self.ctx)) + + @patch("shutil.which", return_value="/usr/bin/pacman") + @patch("os.path.exists", return_value=False) + def test_supports_false_when_no_config(self, mock_exists, mock_which): + self.assertFalse(self.installer.supports(self.ctx)) + + @patch("shutil.which", side_effect=lambda name: "/usr/bin/pacman" if name == "pacman" else "/usr/bin/yay") + @patch("pkgmgr.installers.aur.run_command") + @patch( + "builtins.open", + new_callable=mock_open, + read_data=""" +helper: yay +packages: + - aurutils + - some-aur-only-tool +""", + ) + @patch("os.path.exists", return_value=True) + def test_run_installs_packages_with_helper( + self, mock_exists, mock_file, mock_run_command, mock_which + ): + self.installer.run(self.ctx) + + cmd = mock_run_command.call_args[0][0] + self.assertTrue(cmd.startswith("yay -S --noconfirm ")) + self.assertIn("aurutils", cmd) + self.assertIn("some-aur-only-tool", cmd) + + @patch("shutil.which", return_value="/usr/bin/pacman") + @patch( + "builtins.open", + new_callable=mock_open, + read_data="packages: []", + ) + @patch("os.path.exists", return_value=True) + def test_run_skips_when_no_packages( + self, mock_exists, mock_file, mock_which + ): + with patch("pkgmgr.installers.aur.run_command") as mock_run_command: + self.installer.run(self.ctx) + mock_run_command.assert_not_called() + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/unit/pkgmgr/installers/test_base.py b/tests/unit/pkgmgr/installers/test_base.py new file mode 100644 index 0000000..99e0083 --- /dev/null +++ b/tests/unit/pkgmgr/installers/test_base.py @@ -0,0 +1,43 @@ +# tests/unit/pkgmgr/installers/test_base.py + +import unittest +from pkgmgr.installers.base import BaseInstaller +from pkgmgr.context import RepoContext + + +class DummyInstaller(BaseInstaller): + def __init__(self, supports_value: bool = True): + self._supports_value = supports_value + self.ran_with = None + + def supports(self, ctx: RepoContext) -> bool: + return self._supports_value + + def run(self, ctx: RepoContext) -> None: + self.ran_with = ctx + + +class TestBaseInstaller(unittest.TestCase): + def test_dummy_installer_supports_and_run(self): + ctx = RepoContext( + repo={}, + identifier="id", + repo_dir="/tmp/repo", + repositories_base_dir="/tmp", + bin_dir="/bin", + all_repos=[], + no_verification=False, + preview=False, + quiet=False, + clone_mode="ssh", + update_dependencies=False, + ) + inst = DummyInstaller(supports_value=True) + self.assertTrue(inst.supports(ctx)) + self.assertIsNone(inst.ran_with) + inst.run(ctx) + self.assertIs(inst.ran_with, ctx) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/unit/pkgmgr/installers/test_makefile_installer.py b/tests/unit/pkgmgr/installers/test_makefile_installer.py new file mode 100644 index 0000000..fbf47fa --- /dev/null +++ b/tests/unit/pkgmgr/installers/test_makefile_installer.py @@ -0,0 +1,51 @@ +# tests/unit/pkgmgr/installers/test_makefile_installer.py + +import os +import unittest +from unittest.mock import patch + +from pkgmgr.context import RepoContext +from pkgmgr.installers.makefile import MakefileInstaller + + +class TestMakefileInstaller(unittest.TestCase): + def setUp(self): + self.repo = {"name": "test-repo"} + self.ctx = RepoContext( + repo=self.repo, + identifier="test-id", + repo_dir="/tmp/repo", + repositories_base_dir="/tmp", + bin_dir="/bin", + all_repos=[self.repo], + no_verification=False, + preview=False, + quiet=False, + clone_mode="ssh", + update_dependencies=False, + ) + self.installer = MakefileInstaller() + + @patch("os.path.exists", return_value=True) + def test_supports_true_when_makefile_exists(self, mock_exists): + self.assertTrue(self.installer.supports(self.ctx)) + mock_exists.assert_called_with(os.path.join(self.ctx.repo_dir, "Makefile")) + + @patch("os.path.exists", return_value=False) + def test_supports_false_when_makefile_missing(self, mock_exists): + self.assertFalse(self.installer.supports(self.ctx)) + + @patch("pkgmgr.installers.makefile.run_command") + @patch("os.path.exists", return_value=True) + def test_run_executes_make_install(self, mock_exists, mock_run_command): + self.installer.run(self.ctx) + cmd = mock_run_command.call_args[0][0] + self.assertEqual(cmd, "make install") + self.assertEqual( + mock_run_command.call_args[1].get("cwd"), + self.ctx.repo_dir, + ) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/unit/pkgmgr/installers/test_nix_flake.py b/tests/unit/pkgmgr/installers/test_nix_flake.py new file mode 100644 index 0000000..ec47e23 --- /dev/null +++ b/tests/unit/pkgmgr/installers/test_nix_flake.py @@ -0,0 +1,64 @@ +import os +import unittest +from unittest import mock +from unittest.mock import patch + +from pkgmgr.context import RepoContext +from pkgmgr.installers.nix_flake import NixFlakeInstaller + + +class TestNixFlakeInstaller(unittest.TestCase): + def setUp(self): + self.repo = {"name": "test-repo"} + self.ctx = RepoContext( + repo=self.repo, + identifier="test-id", + repo_dir="/tmp/repo", + repositories_base_dir="/tmp", + bin_dir="/bin", + all_repos=[self.repo], + no_verification=False, + preview=False, + quiet=False, + clone_mode="ssh", + update_dependencies=False, + ) + self.installer = NixFlakeInstaller() + + @patch("shutil.which", return_value="/usr/bin/nix") + @patch("os.path.exists", return_value=True) + def test_supports_true_when_nix_and_flake_exist(self, mock_exists, mock_which): + self.assertTrue(self.installer.supports(self.ctx)) + mock_which.assert_called_with("nix") + mock_exists.assert_called_with(os.path.join(self.ctx.repo_dir, "flake.nix")) + + @patch("shutil.which", return_value=None) + @patch("os.path.exists", return_value=True) + def test_supports_false_when_nix_missing(self, mock_exists, mock_which): + self.assertFalse(self.installer.supports(self.ctx)) + + @patch("os.path.exists", return_value=True) + @patch("shutil.which", return_value="/usr/bin/nix") + @mock.patch("pkgmgr.installers.nix_flake.run_command") + def test_run_tries_pkgmgr_then_default(self, mock_run_command, mock_which, mock_exists): + cmds = [] + + def side_effect(cmd, cwd=None, preview=False, *args, **kwargs): + cmds.append(cmd) + return None + + mock_run_command.side_effect = side_effect + + self.installer.run(self.ctx) + + self.assertIn( + f"nix profile install {self.ctx.repo_dir}#pkgmgr", + cmds, + ) + self.assertIn( + f"nix profile install {self.ctx.repo_dir}#default", + cmds, + ) + +if __name__ == "__main__": + unittest.main() diff --git a/tests/unit/pkgmgr/installers/test_pkgbuild.py b/tests/unit/pkgmgr/installers/test_pkgbuild.py new file mode 100644 index 0000000..30cb13d --- /dev/null +++ b/tests/unit/pkgmgr/installers/test_pkgbuild.py @@ -0,0 +1,65 @@ +# tests/unit/pkgmgr/installers/test_pkgbuild.py + +import os +import unittest +from unittest.mock import patch + +from pkgmgr.context import RepoContext +from pkgmgr.installers.pkgbuild import PkgbuildInstaller + + +class TestPkgbuildInstaller(unittest.TestCase): + def setUp(self): + self.repo = {"name": "test-repo"} + self.ctx = RepoContext( + repo=self.repo, + identifier="test-id", + repo_dir="/tmp/repo", + repositories_base_dir="/tmp", + bin_dir="/bin", + all_repos=[self.repo], + no_verification=False, + preview=False, + quiet=False, + clone_mode="ssh", + update_dependencies=False, + ) + self.installer = PkgbuildInstaller() + + @patch("os.path.exists", return_value=True) + @patch("shutil.which", return_value="/usr/bin/pacman") + def test_supports_true_when_pacman_and_pkgbuild_exist(self, mock_which, mock_exists): + self.assertTrue(self.installer.supports(self.ctx)) + mock_which.assert_called_with("pacman") + mock_exists.assert_called_with(os.path.join(self.ctx.repo_dir, "PKGBUILD")) + + @patch("os.path.exists", return_value=False) + @patch("shutil.which", return_value="/usr/bin/pacman") + def test_supports_false_when_pkgbuild_missing(self, mock_which, mock_exists): + self.assertFalse(self.installer.supports(self.ctx)) + + @patch("pkgmgr.installers.pkgbuild.run_command") + @patch("subprocess.check_output", return_value="python\ngit\n") + @patch("os.path.exists", return_value=True) + @patch("shutil.which", return_value="/usr/bin/pacman") + def test_run_installs_all_packages_and_uses_clean_bash( + self, mock_which, mock_exists, mock_check_output, mock_run_command + ): + self.installer.run(self.ctx) + + # Check subprocess.check_output arguments (clean shell) + args, kwargs = mock_check_output.call_args + cmd_list = args[0] + self.assertEqual(cmd_list[0], "bash") + self.assertIn("--noprofile", cmd_list) + self.assertIn("--norc", cmd_list) + + # Check that pacman is called with the extracted packages + cmd = mock_run_command.call_args[0][0] + self.assertTrue(cmd.startswith("sudo pacman -S --noconfirm ")) + self.assertIn("python", cmd) + self.assertIn("git", cmd) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/unit/pkgmgr/installers/test_pkgmgr_manifest.py b/tests/unit/pkgmgr/installers/test_pkgmgr_manifest.py new file mode 100644 index 0000000..6980f50 --- /dev/null +++ b/tests/unit/pkgmgr/installers/test_pkgmgr_manifest.py @@ -0,0 +1,87 @@ +# tests/unit/pkgmgr/installers/test_pkgmgr_manifest.py + +import os +import unittest +from unittest.mock import patch, mock_open + +from pkgmgr.context import RepoContext +from pkgmgr.installers.pkgmgr_manifest import PkgmgrManifestInstaller + + +class TestPkgmgrManifestInstaller(unittest.TestCase): + def setUp(self): + self.repo = {"name": "test-repo"} + self.ctx = RepoContext( + repo=self.repo, + identifier="test-id", + repo_dir="/tmp/repo", + repositories_base_dir="/tmp", + bin_dir="/bin", + all_repos=[self.repo], + no_verification=False, + preview=False, + quiet=False, + clone_mode="ssh", + update_dependencies=True, + ) + self.installer = PkgmgrManifestInstaller() + + @patch("os.path.exists", return_value=True) + def test_supports_true_when_manifest_exists(self, mock_exists): + self.assertTrue(self.installer.supports(self.ctx)) + manifest_path = os.path.join(self.ctx.repo_dir, "pkgmgr.yml") + mock_exists.assert_called_with(manifest_path) + + @patch("os.path.exists", return_value=False) + def test_supports_false_when_manifest_missing(self, mock_exists): + self.assertFalse(self.installer.supports(self.ctx)) + + @patch("pkgmgr.installers.pkgmgr_manifest.run_command") + @patch("builtins.open", new_callable=mock_open, read_data=""" +version: 1 +author: "Kevin" +url: "https://example.com" +description: "Test repo" +dependencies: + - repository: github:user/repo1 + version: main + reason: "Core dependency" + - repository: github:user/repo2 +""") + @patch("os.path.exists", return_value=True) + def test_run_installs_dependencies_and_pulls_when_update_enabled( + self, mock_exists, mock_file, mock_run_command + ): + self.installer.run(self.ctx) + + # First call: pkgmgr pull github:user/repo1 github:user/repo2 + # Then calls to pkgmgr install ... + cmds = [call_args[0][0] for call_args in mock_run_command.call_args_list] + + self.assertIn( + "pkgmgr pull github:user/repo1 github:user/repo2", + cmds, + ) + self.assertIn( + "pkgmgr install github:user/repo1 --version main --dependencies --clone-mode ssh", + cmds, + ) + # For repo2: no version but dependencies + clone_mode + self.assertIn( + "pkgmgr install github:user/repo2 --dependencies --clone-mode ssh", + cmds, + ) + + @patch("pkgmgr.installers.pkgmgr_manifest.run_command") + @patch("builtins.open", new_callable=mock_open, read_data="{}") + @patch("os.path.exists", return_value=True) + def test_run_no_dependencies_no_command_called( + self, mock_exists, mock_file, mock_run_command + ): + self.ctx.update_dependencies = True + self.installer.run(self.ctx) + mock_run_command.assert_not_called() + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/unit/pkgmgr/installers/test_python_installer.py b/tests/unit/pkgmgr/installers/test_python_installer.py new file mode 100644 index 0000000..a0f05d9 --- /dev/null +++ b/tests/unit/pkgmgr/installers/test_python_installer.py @@ -0,0 +1,71 @@ +# tests/unit/pkgmgr/installers/test_python_installer.py + +import os +import unittest +from unittest.mock import patch + +from pkgmgr.context import RepoContext +from pkgmgr.installers.python import PythonInstaller + + +class TestPythonInstaller(unittest.TestCase): + def setUp(self): + self.repo = {"name": "test-repo"} + self.ctx = RepoContext( + repo=self.repo, + identifier="test-id", + repo_dir="/tmp/repo", + repositories_base_dir="/tmp", + bin_dir="/bin", + all_repos=[self.repo], + no_verification=False, + preview=False, + quiet=False, + clone_mode="ssh", + update_dependencies=False, + ) + self.installer = PythonInstaller() + + @patch("os.path.exists", side_effect=lambda path: path.endswith("pyproject.toml")) + def test_supports_true_when_pyproject_exists(self, mock_exists): + self.assertTrue(self.installer.supports(self.ctx)) + + @patch("os.path.exists", side_effect=lambda path: path.endswith("requirements.txt")) + def test_supports_true_when_requirements_exists(self, mock_exists): + self.assertTrue(self.installer.supports(self.ctx)) + + @patch("os.path.exists", return_value=False) + def test_supports_false_when_no_python_files(self, mock_exists): + self.assertFalse(self.installer.supports(self.ctx)) + + @patch("pkgmgr.installers.python.run_command") + @patch( + "os.path.exists", + side_effect=lambda path: path.endswith("pyproject.toml") + ) + def test_run_installs_project_from_pyproject(self, mock_exists, mock_run_command): + self.installer.run(self.ctx) + cmd = mock_run_command.call_args[0][0] + self.assertIn("pip install .", cmd) + self.assertEqual( + mock_run_command.call_args[1].get("cwd"), + self.ctx.repo_dir, + ) + + @patch("pkgmgr.installers.python.run_command") + @patch( + "os.path.exists", + side_effect=lambda path: path.endswith("requirements.txt") + ) + def test_run_installs_dependencies_from_requirements(self, mock_exists, mock_run_command): + self.installer.run(self.ctx) + cmd = mock_run_command.call_args[0][0] + self.assertIn("pip install -r requirements.txt", cmd) + self.assertEqual( + mock_run_command.call_args[1].get("cwd"), + self.ctx.repo_dir, + ) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_clone_repos.py b/tests/unit/pkgmgr/test_clone_repos.py similarity index 100% rename from tests/test_clone_repos.py rename to tests/unit/pkgmgr/test_clone_repos.py diff --git a/tests/unit/pkgmgr/test_context.py b/tests/unit/pkgmgr/test_context.py new file mode 100644 index 0000000..8512f3f --- /dev/null +++ b/tests/unit/pkgmgr/test_context.py @@ -0,0 +1,36 @@ +import unittest +from pkgmgr.context import RepoContext + + +class TestRepoContext(unittest.TestCase): + def test_repo_context_fields_are_stored(self): + repo = {"name": "test-repo"} + ctx = RepoContext( + repo=repo, + identifier="test-id", + repo_dir="/tmp/test", + repositories_base_dir="/tmp", + bin_dir="/usr/local/bin", + all_repos=[repo], + no_verification=True, + preview=False, + quiet=True, + clone_mode="ssh", + update_dependencies=True, + ) + + self.assertEqual(ctx.repo, repo) + self.assertEqual(ctx.identifier, "test-id") + self.assertEqual(ctx.repo_dir, "/tmp/test") + self.assertEqual(ctx.repositories_base_dir, "/tmp") + self.assertEqual(ctx.bin_dir, "/usr/local/bin") + self.assertEqual(ctx.all_repos, [repo]) + self.assertTrue(ctx.no_verification) + self.assertFalse(ctx.preview) + self.assertTrue(ctx.quiet) + self.assertEqual(ctx.clone_mode, "ssh") + self.assertTrue(ctx.update_dependencies) + + +if __name__ == "__main__": + unittest.main() \ No newline at end of file diff --git a/tests/unit/pkgmgr/test_install_repos.py b/tests/unit/pkgmgr/test_install_repos.py new file mode 100644 index 0000000..245544a --- /dev/null +++ b/tests/unit/pkgmgr/test_install_repos.py @@ -0,0 +1,122 @@ +from pkgmgr.run_command import run_command +import unittest +from unittest.mock import patch, MagicMock + +from pkgmgr.context import RepoContext +import pkgmgr.install_repos as install_module + + +class DummyInstaller: + """Simple installer for testing orchestration.""" + def __init__(self): + self.calls = [] + + def supports(self, ctx: RepoContext) -> bool: + # Always support to verify that the pipeline runs + return True + + def run(self, ctx: RepoContext) -> None: + self.calls.append(ctx.identifier) + + +class TestInstallReposOrchestration(unittest.TestCase): + @patch("pkgmgr.install_repos.create_ink") + @patch("pkgmgr.install_repos.verify_repository") + @patch("pkgmgr.install_repos.get_repo_dir") + @patch("pkgmgr.install_repos.get_repo_identifier") + @patch("pkgmgr.install_repos.clone_repos") + def test_install_repos_runs_pipeline_for_each_repo( + self, + mock_clone_repos, + mock_get_repo_identifier, + mock_get_repo_dir, + mock_verify_repository, + mock_create_ink, + ): + repo1 = {"name": "repo1"} + repo2 = {"name": "repo2"} + selected_repos = [repo1, repo2] + all_repos = selected_repos + + # Return identifiers and directories + mock_get_repo_identifier.side_effect = ["id1", "id2"] + mock_get_repo_dir.side_effect = ["/tmp/repo1", "/tmp/repo2"] + + # Simulate verification success: (ok, errors, commit, key) + mock_verify_repository.return_value = (True, [], "commit", "key") + + # Ensure directories exist (no cloning) + with patch("os.path.exists", return_value=True): + dummy_installer = DummyInstaller() + # Monkeypatch INSTALLERS for this test + old_installers = install_module.INSTALLERS + install_module.INSTALLERS = [dummy_installer] + try: + install_module.install_repos( + selected_repos=selected_repos, + repositories_base_dir="/tmp", + bin_dir="/bin", + all_repos=all_repos, + no_verification=False, + preview=False, + quiet=False, + clone_mode="ssh", + update_dependencies=False, + ) + finally: + install_module.INSTALLERS = old_installers + + # Check that installers ran with both identifiers + self.assertEqual(dummy_installer.calls, ["id1", "id2"]) + self.assertEqual(mock_create_ink.call_count, 2) + self.assertEqual(mock_verify_repository.call_count, 2) + + @patch("pkgmgr.install_repos.verify_repository") + @patch("pkgmgr.install_repos.get_repo_dir") + @patch("pkgmgr.install_repos.get_repo_identifier") + @patch("pkgmgr.install_repos.clone_repos") + def test_install_repos_skips_on_failed_verification( + self, + mock_clone_repos, + mock_get_repo_identifier, + mock_get_repo_dir, + mock_verify_repository, + ): + repo = {"name": "repo1", "verified": True} + selected_repos = [repo] + all_repos = selected_repos + + mock_get_repo_identifier.return_value = "id1" + mock_get_repo_dir.return_value = "/tmp/repo1" + + # Verification fails: ok=False, with error list + mock_verify_repository.return_value = (False, ["sig error"], None, None) + + dummy_installer = DummyInstaller() + with patch("os.path.exists", return_value=True), \ + patch("pkgmgr.install_repos.create_ink") as mock_create_ink, \ + patch("builtins.input", return_value="n"): + old_installers = install_module.INSTALLERS + install_module.INSTALLERS = [dummy_installer] + try: + install_module.install_repos( + selected_repos=selected_repos, + repositories_base_dir="/tmp", + bin_dir="/bin", + all_repos=all_repos, + no_verification=False, + preview=False, + quiet=False, + clone_mode="ssh", + update_dependencies=False, + ) + finally: + install_module.INSTALLERS = old_installers + + # No installer run and no create_ink when user declines + self.assertEqual(dummy_installer.calls, []) + mock_create_ink.assert_not_called() + + +if __name__ == "__main__": + unittest.main()