Add cross-distribution OS package installers (Arch PKGBUILD, Debian control, RPM spec) and restructure tests.
Remove deprecated AUR and Ansible requirements installers. Introduce Nix init + wrapper scripts and full packaging (Arch/DEB/RPM). Associated conversation: https://chatgpt.com/share/693476a8-b9f0-800f-8e0c-ea5151295ce2
This commit is contained in:
@@ -30,23 +30,30 @@ from pkgmgr.context import RepoContext
|
||||
|
||||
# Installer implementations
|
||||
from pkgmgr.installers.pkgmgr_manifest import PkgmgrManifestInstaller
|
||||
from pkgmgr.installers.pkgbuild import PkgbuildInstaller
|
||||
from pkgmgr.installers.os_packages import (
|
||||
ArchPkgbuildInstaller,
|
||||
DebianControlInstaller,
|
||||
RpmSpecInstaller,
|
||||
)
|
||||
from pkgmgr.installers.nix_flake import NixFlakeInstaller
|
||||
from pkgmgr.installers.ansible_requirements import AnsibleRequirementsInstaller
|
||||
from pkgmgr.installers.python import PythonInstaller
|
||||
from pkgmgr.installers.makefile import MakefileInstaller
|
||||
from pkgmgr.installers.aur import AurInstaller
|
||||
|
||||
|
||||
# Ordered list of installers to apply to each repository.
|
||||
# Layering:
|
||||
# 1) pkgmgr.yml (high-level repo dependencies)
|
||||
# 2) OS packages: PKGBUILD / debian/control / RPM spec
|
||||
# 3) Nix flakes (flake.nix)
|
||||
# 4) Python (pyproject / requirements)
|
||||
# 5) Makefile fallback
|
||||
INSTALLERS = [
|
||||
PkgmgrManifestInstaller(),
|
||||
PkgbuildInstaller(),
|
||||
NixFlakeInstaller(),
|
||||
AnsibleRequirementsInstaller(),
|
||||
PythonInstaller(),
|
||||
MakefileInstaller(),
|
||||
AurInstaller(),
|
||||
PkgmgrManifestInstaller(), # meta/pkgmgr.yml deps
|
||||
ArchPkgbuildInstaller(), # Arch
|
||||
DebianControlInstaller(), # Debian/Ubuntu
|
||||
RpmSpecInstaller(), # Fedora/RHEL/CentOS
|
||||
NixFlakeInstaller(), # 2) flake.nix (Nix layer)
|
||||
PythonInstaller(), # 3) pyproject / requirements (fallback if no flake+nix)
|
||||
MakefileInstaller(), # generic 'make install'
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -4,16 +4,17 @@
|
||||
"""
|
||||
Installer package for pkgmgr.
|
||||
|
||||
Each installer implements a small, focused step in the repository
|
||||
installation pipeline (e.g. PKGBUILD dependencies, Nix flakes, Python,
|
||||
Ansible requirements, pkgmgr.yml, Makefile, AUR).
|
||||
This exposes all installer classes so users can import them directly from
|
||||
pkgmgr.installers.
|
||||
"""
|
||||
|
||||
from pkgmgr.installers.base import BaseInstaller # noqa: F401
|
||||
from pkgmgr.installers.pkgmgr_manifest import PkgmgrManifestInstaller # noqa: F401
|
||||
from pkgmgr.installers.pkgbuild import PkgbuildInstaller # noqa: F401
|
||||
from pkgmgr.installers.nix_flake import NixFlakeInstaller # noqa: F401
|
||||
from pkgmgr.installers.ansible_requirements import AnsibleRequirementsInstaller # noqa: F401
|
||||
from pkgmgr.installers.python import PythonInstaller # noqa: F401
|
||||
from pkgmgr.installers.makefile import MakefileInstaller # noqa: F401
|
||||
from pkgmgr.installers.aur import AurInstaller # noqa: F401
|
||||
|
||||
# OS-specific installers
|
||||
from pkgmgr.installers.os_packages.arch_pkgbuild import ArchPkgbuildInstaller # noqa: F401
|
||||
from pkgmgr.installers.os_packages.debian_control import DebianControlInstaller # noqa: F401
|
||||
from pkgmgr.installers.os_packages.rpm_spec import RpmSpecInstaller # noqa: F401
|
||||
|
||||
@@ -1,175 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Installer for Ansible dependencies defined in requirements.yml.
|
||||
|
||||
This installer installs collections and roles via ansible-galaxy when found.
|
||||
"""
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
from typing import Any, Dict, List
|
||||
|
||||
import yaml
|
||||
|
||||
from pkgmgr.context import RepoContext
|
||||
from pkgmgr.installers.base import BaseInstaller
|
||||
from pkgmgr.run_command import run_command
|
||||
|
||||
|
||||
class AnsibleRequirementsInstaller(BaseInstaller):
|
||||
"""Install Ansible collections and roles from requirements.yml."""
|
||||
|
||||
REQUIREMENTS_FILE = "requirements.yml"
|
||||
|
||||
def supports(self, ctx: RepoContext) -> bool:
|
||||
req_file = os.path.join(ctx.repo_dir, self.REQUIREMENTS_FILE)
|
||||
return os.path.exists(req_file)
|
||||
|
||||
def _get_ansible_galaxy_cmd(self) -> str:
|
||||
"""
|
||||
Resolve how to call ansible-galaxy:
|
||||
|
||||
1. If ansible-galaxy is on PATH, use it directly.
|
||||
2. Else, if nix is available, run it via Nix:
|
||||
nix --extra-experimental-features 'nix-command flakes' \
|
||||
run nixpkgs#ansible-core -- ansible-galaxy
|
||||
3. If neither is available, return an empty string.
|
||||
"""
|
||||
if shutil.which("ansible-galaxy"):
|
||||
return "ansible-galaxy"
|
||||
|
||||
if shutil.which("nix"):
|
||||
# Use Nix as the preferred provider
|
||||
return (
|
||||
"nix --extra-experimental-features 'nix-command flakes' "
|
||||
"run nixpkgs#ansible-core -- ansible-galaxy"
|
||||
)
|
||||
|
||||
return ""
|
||||
|
||||
def _load_requirements(self, req_path: str, identifier: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Load requirements.yml.
|
||||
|
||||
Any parsing error is treated as fatal (SystemExit).
|
||||
"""
|
||||
try:
|
||||
with open(req_path, "r", encoding="utf-8") as f:
|
||||
return yaml.safe_load(f) or {}
|
||||
except Exception as exc:
|
||||
print(f"Error loading {self.REQUIREMENTS_FILE} in {identifier}: {exc}")
|
||||
raise SystemExit(
|
||||
f"{self.REQUIREMENTS_FILE} parsing failed for {identifier}: {exc}"
|
||||
)
|
||||
|
||||
def _validate_requirements(self, requirements: Dict[str, Any], identifier: str) -> None:
|
||||
"""
|
||||
Validate the requirements.yml structure.
|
||||
|
||||
Raises SystemExit on any validation error.
|
||||
"""
|
||||
errors: List[str] = []
|
||||
|
||||
if not isinstance(requirements, dict):
|
||||
errors.append("Top-level structure must be a mapping.")
|
||||
else:
|
||||
allowed_keys = {"collections", "roles"}
|
||||
unknown_keys = set(requirements.keys()) - allowed_keys
|
||||
if unknown_keys:
|
||||
print(
|
||||
f"Warning: requirements.yml in {identifier} contains unknown keys: "
|
||||
f"{', '.join(sorted(unknown_keys))}"
|
||||
)
|
||||
|
||||
for section in ("collections", "roles"):
|
||||
if section not in requirements:
|
||||
continue
|
||||
|
||||
value = requirements[section]
|
||||
if not isinstance(value, list):
|
||||
errors.append(f"'{section}' must be a list.")
|
||||
continue
|
||||
|
||||
for idx, entry in enumerate(value):
|
||||
if isinstance(entry, str):
|
||||
# Short form "community.docker", etc.
|
||||
continue
|
||||
|
||||
if isinstance(entry, dict):
|
||||
if section == "collections":
|
||||
# Collections require 'name'
|
||||
if not entry.get("name"):
|
||||
errors.append(
|
||||
f"Entry #{idx} in '{section}' is a mapping "
|
||||
f"but has no 'name' key."
|
||||
)
|
||||
else:
|
||||
# Roles: 'name' OR 'src' are acceptable.
|
||||
if not (entry.get("name") or entry.get("src")):
|
||||
errors.append(
|
||||
f"Entry #{idx} in '{section}' is a mapping but "
|
||||
f"has neither 'name' nor 'src' key."
|
||||
)
|
||||
continue
|
||||
|
||||
errors.append(
|
||||
f"Entry #{idx} in '{section}' has invalid type "
|
||||
f"{type(entry).__name__}; expected string or mapping."
|
||||
)
|
||||
|
||||
if errors:
|
||||
print(f"Invalid requirements.yml in {identifier}:")
|
||||
for err in errors:
|
||||
print(f" - {err}")
|
||||
raise SystemExit(
|
||||
f"requirements.yml validation failed for {identifier}."
|
||||
)
|
||||
|
||||
def run(self, ctx: RepoContext) -> None:
|
||||
req_file = os.path.join(ctx.repo_dir, self.REQUIREMENTS_FILE)
|
||||
requirements = self._load_requirements(req_file, ctx.identifier)
|
||||
if not requirements:
|
||||
return
|
||||
|
||||
# Validate structure before doing anything dangerous.
|
||||
self._validate_requirements(requirements, ctx.identifier)
|
||||
|
||||
if "collections" not in requirements and "roles" not in requirements:
|
||||
return
|
||||
|
||||
print(f"Ansible dependencies found in {ctx.identifier}, installing...")
|
||||
|
||||
ansible_requirements: Dict[str, Any] = {}
|
||||
if "collections" in requirements:
|
||||
ansible_requirements["collections"] = requirements["collections"]
|
||||
if "roles" in requirements:
|
||||
ansible_requirements["roles"] = requirements["roles"]
|
||||
|
||||
with tempfile.NamedTemporaryFile(
|
||||
mode="w",
|
||||
suffix=".yml",
|
||||
delete=False,
|
||||
) as tmp:
|
||||
yaml.dump(ansible_requirements, tmp, default_flow_style=False)
|
||||
tmp_filename = tmp.name
|
||||
|
||||
galaxy_cmd = self._get_ansible_galaxy_cmd()
|
||||
if not galaxy_cmd:
|
||||
print(
|
||||
"Warning: ansible-galaxy is not available and 'nix' is missing. "
|
||||
"Skipping Ansible requirements installation."
|
||||
)
|
||||
return
|
||||
|
||||
if "collections" in ansible_requirements:
|
||||
print(f"Ansible collections found in {ctx.identifier}, installing...")
|
||||
cmd = f"{galaxy_cmd} collection install -r {tmp_filename}"
|
||||
run_command(cmd, cwd=ctx.repo_dir, preview=ctx.preview)
|
||||
|
||||
if "roles" in ansible_requirements:
|
||||
print(f"Ansible roles found in {ctx.identifier}, installing...")
|
||||
cmd = f"{galaxy_cmd} role install -r {tmp_filename}"
|
||||
run_command(cmd, cwd=ctx.repo_dir, preview=ctx.preview)
|
||||
@@ -1,150 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Installer for Arch AUR dependencies declared in an `aur.yml` file.
|
||||
|
||||
This installer is:
|
||||
- Arch-only (requires `pacman`)
|
||||
- helper-driven (yay/paru/..)
|
||||
- safe to ignore on non-Arch systems
|
||||
|
||||
Config parsing errors are treated as fatal to avoid silently ignoring
|
||||
broken configuration.
|
||||
"""
|
||||
|
||||
import os
|
||||
import shutil
|
||||
from typing import List
|
||||
|
||||
import yaml
|
||||
|
||||
from pkgmgr.installers.base import BaseInstaller
|
||||
from pkgmgr.context import RepoContext
|
||||
from pkgmgr.run_command import run_command
|
||||
|
||||
|
||||
AUR_CONFIG_FILENAME = "aur.yml"
|
||||
|
||||
|
||||
class AurInstaller(BaseInstaller):
|
||||
"""
|
||||
Installer for Arch AUR dependencies declared in an `aur.yml` file.
|
||||
"""
|
||||
|
||||
def _is_arch_like(self) -> bool:
|
||||
return shutil.which("pacman") is not None
|
||||
|
||||
def _config_path(self, ctx: RepoContext) -> str:
|
||||
return os.path.join(ctx.repo_dir, AUR_CONFIG_FILENAME)
|
||||
|
||||
def _load_config(self, ctx: RepoContext) -> dict:
|
||||
"""
|
||||
Load and validate aur.yml.
|
||||
|
||||
Any parsing error or invalid top-level structure is treated as fatal
|
||||
(SystemExit).
|
||||
"""
|
||||
path = self._config_path(ctx)
|
||||
if not os.path.exists(path):
|
||||
return {}
|
||||
|
||||
try:
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
data = yaml.safe_load(f) or {}
|
||||
except Exception as exc:
|
||||
print(f"[Error] Failed to load AUR config from '{path}': {exc}")
|
||||
raise SystemExit(f"AUR config '{path}' could not be parsed: {exc}")
|
||||
|
||||
if not isinstance(data, dict):
|
||||
print(f"[Error] AUR config '{path}' is not a mapping.")
|
||||
raise SystemExit(f"AUR config '{path}' must be a mapping at top level.")
|
||||
|
||||
return data
|
||||
|
||||
def _get_helper(self, cfg: dict) -> str:
|
||||
# Priority: config.helper > $AUR_HELPER > "yay"
|
||||
helper = cfg.get("helper")
|
||||
if isinstance(helper, str) and helper.strip():
|
||||
return helper.strip()
|
||||
|
||||
env_helper = os.environ.get("AUR_HELPER")
|
||||
if env_helper:
|
||||
return env_helper.strip()
|
||||
|
||||
return "yay"
|
||||
|
||||
def _get_packages(self, cfg: dict) -> List[str]:
|
||||
raw = cfg.get("packages", [])
|
||||
if not isinstance(raw, list):
|
||||
return []
|
||||
|
||||
names: List[str] = []
|
||||
for entry in raw:
|
||||
if isinstance(entry, str):
|
||||
name = entry.strip()
|
||||
if name:
|
||||
names.append(name)
|
||||
elif isinstance(entry, dict):
|
||||
name = str(entry.get("name", "")).strip()
|
||||
if name:
|
||||
names.append(name)
|
||||
|
||||
return names
|
||||
|
||||
# --- BaseInstaller API -------------------------------------------------
|
||||
|
||||
def supports(self, ctx: RepoContext) -> bool:
|
||||
"""
|
||||
This installer is supported if:
|
||||
- We are on an Arch-like system (pacman available),
|
||||
- An aur.yml exists,
|
||||
- That aur.yml declares at least one package.
|
||||
|
||||
An invalid aur.yml will raise SystemExit during config loading.
|
||||
"""
|
||||
if not self._is_arch_like():
|
||||
return False
|
||||
|
||||
cfg = self._load_config(ctx)
|
||||
if not cfg:
|
||||
return False
|
||||
|
||||
packages = self._get_packages(cfg)
|
||||
return len(packages) > 0
|
||||
|
||||
def run(self, ctx: RepoContext) -> None:
|
||||
"""
|
||||
Install AUR packages using the configured helper (default: yay).
|
||||
|
||||
Missing helper is treated as non-fatal (warning), everything else
|
||||
that fails in run_command() is fatal.
|
||||
"""
|
||||
if not self._is_arch_like():
|
||||
print("AUR installer skipped: not an Arch-like system.")
|
||||
return
|
||||
|
||||
cfg = self._load_config(ctx)
|
||||
if not cfg:
|
||||
print("AUR installer: no valid aur.yml found; skipping.")
|
||||
return
|
||||
|
||||
packages = self._get_packages(cfg)
|
||||
if not packages:
|
||||
print("AUR installer: no AUR packages defined; skipping.")
|
||||
return
|
||||
|
||||
helper = self._get_helper(cfg)
|
||||
if shutil.which(helper) is None:
|
||||
print(
|
||||
f"[Warning] AUR helper '{helper}' is not available on PATH. "
|
||||
f"Please install it (e.g. via your aur_builder setup). "
|
||||
f"Skipping AUR installation."
|
||||
)
|
||||
return
|
||||
|
||||
pkg_list_str = " ".join(packages)
|
||||
print(f"Installing AUR packages via '{helper}': {pkg_list_str}")
|
||||
|
||||
cmd = f"{helper} -S --noconfirm {pkg_list_str}"
|
||||
run_command(cmd, preview=ctx.preview)
|
||||
9
pkgmgr/installers/os_packages/__init__.py
Normal file
9
pkgmgr/installers/os_packages/__init__.py
Normal file
@@ -0,0 +1,9 @@
|
||||
from .arch_pkgbuild import ArchPkgbuildInstaller
|
||||
from .debian_control import DebianControlInstaller
|
||||
from .rpm_spec import RpmSpecInstaller
|
||||
|
||||
__all__ = [
|
||||
"ArchPkgbuildInstaller",
|
||||
"DebianControlInstaller",
|
||||
"RpmSpecInstaller",
|
||||
]
|
||||
@@ -18,12 +18,17 @@ from pkgmgr.installers.base import BaseInstaller
|
||||
from pkgmgr.run_command import run_command
|
||||
|
||||
|
||||
class PkgbuildInstaller(BaseInstaller):
|
||||
class ArchPkgbuildInstaller(BaseInstaller):
|
||||
"""Install Arch dependencies (depends/makedepends) from PKGBUILD."""
|
||||
|
||||
PKGBUILD_NAME = "PKGBUILD"
|
||||
|
||||
def supports(self, ctx: RepoContext) -> bool:
|
||||
"""
|
||||
This installer is supported if:
|
||||
- pacman is available, and
|
||||
- a PKGBUILD file exists in the repository root.
|
||||
"""
|
||||
if shutil.which("pacman") is None:
|
||||
return False
|
||||
pkgbuild_path = os.path.join(ctx.repo_dir, self.PKGBUILD_NAME)
|
||||
@@ -39,7 +44,10 @@ class PkgbuildInstaller(BaseInstaller):
|
||||
if not os.path.exists(pkgbuild_path):
|
||||
return []
|
||||
|
||||
script = f'source {self.PKGBUILD_NAME} >/dev/null 2>&1; printf "%s\\n" "${{{var_name}[@]}}"'
|
||||
script = (
|
||||
f'source {self.PKGBUILD_NAME} >/dev/null 2>&1; '
|
||||
f'printf "%s\\n" "${{{var_name}[@]}}"'
|
||||
)
|
||||
try:
|
||||
output = subprocess.check_output(
|
||||
["bash", "--noprofile", "--norc", "-c", script],
|
||||
@@ -64,6 +72,9 @@ class PkgbuildInstaller(BaseInstaller):
|
||||
return packages
|
||||
|
||||
def run(self, ctx: RepoContext) -> None:
|
||||
"""
|
||||
Install all packages from depends + makedepends via pacman.
|
||||
"""
|
||||
depends = self._extract_pkgbuild_array(ctx, "depends")
|
||||
makedepends = self._extract_pkgbuild_array(ctx, "makedepends")
|
||||
all_pkgs = depends + makedepends
|
||||
@@ -72,4 +83,4 @@ class PkgbuildInstaller(BaseInstaller):
|
||||
return
|
||||
|
||||
cmd = "sudo pacman -S --noconfirm " + " ".join(all_pkgs)
|
||||
run_command(cmd, preview=ctx.preview)
|
||||
run_command(cmd, cwd=ctx.repo_dir, preview=ctx.preview)
|
||||
141
pkgmgr/installers/os_packages/debian_control.py
Normal file
141
pkgmgr/installers/os_packages/debian_control.py
Normal file
@@ -0,0 +1,141 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Installer for Debian/Ubuntu system dependencies defined in debian/control.
|
||||
|
||||
This installer parses the debian/control file and installs packages from
|
||||
Build-Depends / Build-Depends-Indep / Depends via apt-get on Debian-based
|
||||
systems.
|
||||
"""
|
||||
|
||||
import os
|
||||
import shutil
|
||||
from typing import List
|
||||
|
||||
from pkgmgr.context import RepoContext
|
||||
from pkgmgr.installers.base import BaseInstaller
|
||||
from pkgmgr.run_command import run_command
|
||||
|
||||
|
||||
class DebianControlInstaller(BaseInstaller):
|
||||
"""Install Debian/Ubuntu system packages from debian/control."""
|
||||
|
||||
CONTROL_DIR = "debian"
|
||||
CONTROL_FILE = "control"
|
||||
|
||||
def _is_debian_like(self) -> bool:
|
||||
return shutil.which("apt-get") is not None
|
||||
|
||||
def _control_path(self, ctx: RepoContext) -> str:
|
||||
return os.path.join(ctx.repo_dir, self.CONTROL_DIR, self.CONTROL_FILE)
|
||||
|
||||
def supports(self, ctx: RepoContext) -> bool:
|
||||
"""
|
||||
This installer is supported if:
|
||||
- we are on a Debian-like system (apt-get available), and
|
||||
- debian/control exists.
|
||||
"""
|
||||
if not self._is_debian_like():
|
||||
return False
|
||||
|
||||
return os.path.exists(self._control_path(ctx))
|
||||
|
||||
def _parse_control_dependencies(self, control_path: str) -> List[str]:
|
||||
"""
|
||||
Parse Build-Depends, Build-Depends-Indep and Depends fields
|
||||
from debian/control.
|
||||
|
||||
This is a best-effort parser that:
|
||||
- joins continuation lines starting with space,
|
||||
- splits fields by comma,
|
||||
- strips version constraints and alternatives (x | y → x),
|
||||
- filters out variable placeholders like ${misc:Depends}.
|
||||
"""
|
||||
if not os.path.exists(control_path):
|
||||
return []
|
||||
|
||||
with open(control_path, "r", encoding="utf-8") as f:
|
||||
lines = f.readlines()
|
||||
|
||||
deps: List[str] = []
|
||||
current_key = None
|
||||
current_val_lines: List[str] = []
|
||||
|
||||
target_keys = {
|
||||
"Build-Depends",
|
||||
"Build-Depends-Indep",
|
||||
"Depends",
|
||||
}
|
||||
|
||||
def flush_current():
|
||||
nonlocal current_key, current_val_lines, deps
|
||||
if not current_key or not current_val_lines:
|
||||
return
|
||||
value = " ".join(l.strip() for l in current_val_lines)
|
||||
# Split by comma into individual dependency expressions
|
||||
for part in value.split(","):
|
||||
part = part.strip()
|
||||
if not part:
|
||||
continue
|
||||
# Take the first alternative: "foo | bar" → "foo"
|
||||
if "|" in part:
|
||||
part = part.split("|", 1)[0].strip()
|
||||
# Strip version constraints: "pkg (>= 1.0)" → "pkg"
|
||||
if " " in part:
|
||||
part = part.split(" ", 1)[0].strip()
|
||||
# Skip variable placeholders
|
||||
if part.startswith("${") and part.endswith("}"):
|
||||
continue
|
||||
if part:
|
||||
deps.append(part)
|
||||
current_key = None
|
||||
current_val_lines = []
|
||||
|
||||
for line in lines:
|
||||
if line.startswith(" ") or line.startswith("\t"):
|
||||
# Continuation of previous field
|
||||
if current_key in target_keys:
|
||||
current_val_lines.append(line)
|
||||
continue
|
||||
|
||||
# New field
|
||||
flush_current()
|
||||
|
||||
if ":" not in line:
|
||||
continue
|
||||
key, val = line.split(":", 1)
|
||||
key = key.strip()
|
||||
val = val.strip()
|
||||
|
||||
if key in target_keys:
|
||||
current_key = key
|
||||
current_val_lines = [val]
|
||||
|
||||
# Flush last field
|
||||
flush_current()
|
||||
|
||||
# De-duplicate while preserving order
|
||||
seen = set()
|
||||
unique_deps: List[str] = []
|
||||
for pkg in deps:
|
||||
if pkg not in seen:
|
||||
seen.add(pkg)
|
||||
unique_deps.append(pkg)
|
||||
|
||||
return unique_deps
|
||||
|
||||
def run(self, ctx: RepoContext) -> None:
|
||||
"""
|
||||
Install Debian/Ubuntu system packages via apt-get.
|
||||
"""
|
||||
control_path = self._control_path(ctx)
|
||||
packages = self._parse_control_dependencies(control_path)
|
||||
if not packages:
|
||||
return
|
||||
|
||||
# Update and install in two separate commands for clarity.
|
||||
run_command("sudo apt-get update", cwd=ctx.repo_dir, preview=ctx.preview)
|
||||
|
||||
cmd = "sudo apt-get install -y " + " ".join(packages)
|
||||
run_command(cmd, cwd=ctx.repo_dir, preview=ctx.preview)
|
||||
152
pkgmgr/installers/os_packages/rpm_spec.py
Normal file
152
pkgmgr/installers/os_packages/rpm_spec.py
Normal file
@@ -0,0 +1,152 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Installer for RPM-based system dependencies defined in *.spec files.
|
||||
|
||||
This installer parses the first *.spec file it finds in the repository
|
||||
and installs packages from BuildRequires / Requires via dnf or yum on
|
||||
RPM-based systems (Fedora / RHEL / CentOS / Rocky / Alma, etc.).
|
||||
"""
|
||||
|
||||
import glob
|
||||
import os
|
||||
import shutil
|
||||
from typing import List, Optional
|
||||
|
||||
from pkgmgr.context import RepoContext
|
||||
from pkgmgr.installers.base import BaseInstaller
|
||||
from pkgmgr.run_command import run_command
|
||||
|
||||
|
||||
class RpmSpecInstaller(BaseInstaller):
|
||||
"""Install RPM-based system packages from *.spec files."""
|
||||
|
||||
def _is_rpm_like(self) -> bool:
|
||||
return shutil.which("dnf") is not None or shutil.which("yum") is not None
|
||||
|
||||
def _spec_path(self, ctx: RepoContext) -> Optional[str]:
|
||||
pattern = os.path.join(ctx.repo_dir, "*.spec")
|
||||
matches = glob.glob(pattern)
|
||||
if not matches:
|
||||
return None
|
||||
# Take the first match deterministically (sorted)
|
||||
return sorted(matches)[0]
|
||||
|
||||
def supports(self, ctx: RepoContext) -> bool:
|
||||
"""
|
||||
This installer is supported if:
|
||||
- we are on an RPM-based system (dnf or yum available), and
|
||||
- a *.spec file exists in the repository root.
|
||||
"""
|
||||
if not self._is_rpm_like():
|
||||
return False
|
||||
|
||||
return self._spec_path(ctx) is not None
|
||||
|
||||
def _parse_spec_dependencies(self, spec_path: str) -> List[str]:
|
||||
"""
|
||||
Parse BuildRequires and Requires from a .spec file.
|
||||
|
||||
Best-effort parser that:
|
||||
- joins continuation lines starting with space or tab,
|
||||
- splits fields by comma,
|
||||
- takes the first token of each entry as the package name,
|
||||
- ignores macros and empty entries.
|
||||
"""
|
||||
if not os.path.exists(spec_path):
|
||||
return []
|
||||
|
||||
with open(spec_path, "r", encoding="utf-8") as f:
|
||||
lines = f.readlines()
|
||||
|
||||
deps: List[str] = []
|
||||
current_key = None
|
||||
current_val_lines: List[str] = []
|
||||
|
||||
target_keys = {
|
||||
"BuildRequires",
|
||||
"Requires",
|
||||
}
|
||||
|
||||
def flush_current():
|
||||
nonlocal current_key, current_val_lines, deps
|
||||
if not current_key or not current_val_lines:
|
||||
return
|
||||
value = " ".join(l.strip() for l in current_val_lines)
|
||||
# Split by comma into individual dependency expressions
|
||||
for part in value.split(","):
|
||||
part = part.strip()
|
||||
if not part:
|
||||
continue
|
||||
# Take first token as package name: "pkg >= 1.0" → "pkg"
|
||||
token = part.split()[0].strip()
|
||||
if not token:
|
||||
continue
|
||||
# Ignore macros like %{?something}
|
||||
if token.startswith("%"):
|
||||
continue
|
||||
deps.append(token)
|
||||
current_key = None
|
||||
current_val_lines = []
|
||||
|
||||
for line in lines:
|
||||
stripped = line.lstrip()
|
||||
if stripped.startswith("#"):
|
||||
# Comment
|
||||
continue
|
||||
|
||||
if line.startswith(" ") or line.startswith("\t"):
|
||||
# Continuation of previous field
|
||||
if current_key in target_keys:
|
||||
current_val_lines.append(line)
|
||||
continue
|
||||
|
||||
# New field
|
||||
flush_current()
|
||||
|
||||
if ":" not in line:
|
||||
continue
|
||||
key, val = line.split(":", 1)
|
||||
key = key.strip()
|
||||
val = val.strip()
|
||||
|
||||
if key in target_keys:
|
||||
current_key = key
|
||||
current_val_lines = [val]
|
||||
|
||||
# Flush last field
|
||||
flush_current()
|
||||
|
||||
# De-duplicate while preserving order
|
||||
seen = set()
|
||||
unique_deps: List[str] = []
|
||||
for pkg in deps:
|
||||
if pkg not in seen:
|
||||
seen.add(pkg)
|
||||
unique_deps.append(pkg)
|
||||
|
||||
return unique_deps
|
||||
|
||||
def run(self, ctx: RepoContext) -> None:
|
||||
"""
|
||||
Install RPM-based system packages via dnf or yum.
|
||||
"""
|
||||
spec_path = self._spec_path(ctx)
|
||||
if not spec_path:
|
||||
return
|
||||
|
||||
packages = self._parse_spec_dependencies(spec_path)
|
||||
if not packages:
|
||||
return
|
||||
|
||||
pkg_mgr = shutil.which("dnf") or shutil.which("yum")
|
||||
if not pkg_mgr:
|
||||
print(
|
||||
"[Warning] No suitable RPM package manager (dnf/yum) found on PATH. "
|
||||
"Skipping RPM dependency installation."
|
||||
)
|
||||
return
|
||||
|
||||
cmd = f"sudo {pkg_mgr} install -y " + " ".join(packages)
|
||||
run_command(cmd, cwd=ctx.repo_dir, preview=ctx.preview)
|
||||
Reference in New Issue
Block a user