From 6e8fd9f622b088812281fe8b25d05f5a1f0e02ec Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Fri, 26 Aug 2022 03:07:34 +0200 Subject: [PATCH 01/44] packages/pkgbuild: cache parsed pkgbuilds by path, add get_pkgbuild_by_path(), Pkgbuild.update(pkgb) --- packages/device.py | 5 ++- packages/pkgbuild.py | 76 ++++++++++++++++++++++++++++++++--------- packages/test_device.py | 2 +- 3 files changed, 63 insertions(+), 20 deletions(-) diff --git a/packages/device.py b/packages/device.py index f9f4568..a800f7e 100644 --- a/packages/device.py +++ b/packages/device.py @@ -6,7 +6,7 @@ from typing import Optional from config import config from constants import Arch, ARCHES from config.scheme import DataClass, munchclass -from .pkgbuild import discover_pkgbuilds, _pkgbuilds_cache, Pkgbuild, parse_pkgbuild +from .pkgbuild import discover_pkgbuilds, get_pkgbuild_by_path, _pkgbuilds_cache, Pkgbuild DEVICE_DEPRECATIONS = { "oneplus-enchilada": "sdm845-oneplus-enchilada", @@ -105,8 +105,7 @@ def get_device(name: str, pkgbuilds: Optional[dict[str, Pkgbuild]] = None, lazy: else: relative_path = os.path.join('device', pkgname) assert os.path.exists(os.path.join(config.get_path('pkgbuilds'), relative_path)) - pkgbuild = [p for p in parse_pkgbuild(relative_path, _config=config) if p.name == pkgname][0] - _pkgbuilds_cache[pkgname] = pkgbuild + pkgbuild = [p for p in get_pkgbuild_by_path(relative_path, lazy=lazy, _config=config) if p.name == pkgname][0] device = parse_device_pkg(pkgbuild) if lazy: _device_cache[name] = device diff --git a/packages/pkgbuild.py b/packages/pkgbuild.py index 9e5c340..562cdb5 100644 --- a/packages/pkgbuild.py +++ b/packages/pkgbuild.py @@ -6,11 +6,11 @@ import multiprocessing import os import subprocess -from constants import REPOSITORIES from joblib import Parallel, delayed from typing import Optional, Sequence from config import config, ConfigStateHolder +from constants import REPOSITORIES from exec.cmd import run_cmd from constants import Arch, MAKEPKG_CMD from distro.package import PackageInfo @@ -102,6 +102,19 @@ class Pkgbuild(PackageInfo): """updates `self.version` from `self.pkgver` and `self.pkgrel`""" self.version = f'{self.pkgver}-{self.pkgrel}' + def update(self, pkg: Pkgbuild): + self.version = pkg.version + self.arches = list(pkg.arches) + self.depends = list(pkg.depends) + self.provides = list(pkg.provides) + self.replaces = list(pkg.replaces) + self.local_depends = list(pkg.local_depends) + self.repo = pkg.repo + self.mode = pkg.mode + self.path = pkg.path + self.pkgver = pkg.pkgver + self.pkgrel = pkg.pkgrel + self.update_version() class Pkgbase(Pkgbuild): subpackages: Sequence[SubPkgbuild] @@ -110,6 +123,21 @@ class Pkgbase(Pkgbuild): self.subpackages = list(subpackages) super().__init__(relative_path, **args) + def update(self, pkg: Pkgbuild): + if not isinstance(pkg, Pkgbase): + raise Exception(f"Tried to update pkgbase {self.name} with non-base pkg {pkg}") + Pkgbuild.update(self, pkg) + sub_dict = {p.name: p for p in self.subpackages} + self.subpackages.clear() + for new_pkg in pkg.subpackages: + name = new_pkg.name + if name not in sub_dict: + sub_dict[name] = new_pkg + else: + sub_dict[name].update(new_pkg) + updated = sub_dict[name] + self.subpackages.append(updated) + class SubPkgbuild(Pkgbuild): pkgbase: Pkgbase @@ -119,18 +147,10 @@ class SubPkgbuild(Pkgbuild): self.name = name self.pkgbase = pkgbase - self.version = pkgbase.version - self.arches = pkgbase.arches - self.depends = list(pkgbase.depends) + self.update(pkgbase) + self.provides = [] self.replaces = [] - self.local_depends = list(pkgbase.local_depends) - self.repo = pkgbase.repo - self.mode = pkgbase.mode - self.path = pkgbase.path - self.pkgver = pkgbase.pkgver - self.pkgrel = pkgbase.pkgrel - self.update_version() def parse_pkgbuild(relative_pkg_dir: str, _config: Optional[ConfigStateHolder] = None) -> Sequence[Pkgbuild]: @@ -214,9 +234,21 @@ def parse_pkgbuild(relative_pkg_dir: str, _config: Optional[ConfigStateHolder] = _pkgbuilds_cache = dict[str, Pkgbuild]() +_pkgbuilds_paths = dict[str, list[Pkgbuild]]() _pkgbuilds_scanned: bool = False +def get_pkgbuild_by_path(relative_path: str, lazy: bool = True, _config: Optional[config] = None) -> list[Pkgbuild]: + global _pkgbuilds_cache, _pkgbuilds_paths + if lazy and relative_path in _pkgbuilds_paths: + return _pkgbuilds_paths[relative_path] + parsed = parse_pkgbuild(relative_path, _config=_config) + _pkgbuilds_paths[relative_path] = parsed + for pkg in parsed: + _pkgbuilds_cache[pkg.name] = pkg + return parsed + + def discover_pkgbuilds(parallel: bool = True, lazy: bool = True) -> dict[str, Pkgbuild]: global _pkgbuilds_cache, _pkgbuilds_scanned if lazy and _pkgbuilds_scanned: @@ -230,17 +262,29 @@ def discover_pkgbuilds(parallel: bool = True, lazy: bool = True) -> dict[str, Pk for dir in os.listdir(os.path.join(pkgbuilds_dir, repo)): paths.append(os.path.join(repo, dir)) - results = [] - logging.info("Parsing PKGBUILDs") - logging.debug(f"About to parse pkgbuilds. verbosity: {config.runtime['verbose']}") + results = [] if parallel: - chunks = (Parallel(n_jobs=multiprocessing.cpu_count() * 4)(delayed(parse_pkgbuild)(path, config) for path in paths)) + paths_filtered = paths + if lazy: + # filter out cached packages as the caches don't cross process boundaries + paths_filtered = [] + for p in paths: + if p in _pkgbuilds_paths: + # use cache + results += _pkgbuilds_paths[p] + else: + paths_filtered += [p] + chunks = (Parallel(n_jobs=multiprocessing.cpu_count() * 4)( + delayed(get_pkgbuild_by_path)(path, lazy=lazy, _config=config) for path in paths_filtered)) else: - chunks = (parse_pkgbuild(path) for path in paths) + chunks = (get_pkgbuild_by_path(path, lazy=lazy) for path in paths) + _pkgbuilds_paths.clear() + # one list of packages per path for pkglist in chunks: + _pkgbuilds_paths[pkglist[0].path] = pkglist results += pkglist logging.debug('Building package dictionary!') diff --git a/packages/test_device.py b/packages/test_device.py index e5085af..4d9d546 100644 --- a/packages/test_device.py +++ b/packages/test_device.py @@ -42,7 +42,7 @@ ONEPLUS_ENCHILADA_PKG = f'device-{ONEPLUS_ENCHILADA}' def enchilada_pkgbuild(initialise_pkgbuilds_dir: ConfigStateHolder): config = initialise_pkgbuilds_dir config.try_load_file() - return parse_pkgbuild(os.path.join('device', ONEPLUS_ENCHILADA_PKG), config)[0] + return parse_pkgbuild(os.path.join('device', ONEPLUS_ENCHILADA_PKG), _config=config)[0] def validate_oneplus_enchilada(d: Device): From 08fc10bf11dedee109895e0b22def0ac01596090 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Fri, 26 Aug 2022 03:28:17 +0200 Subject: [PATCH 02/44] Pkgbuild: add refresh_sources() --- packages/pkgbuild.py | 57 ++++++++++++++++++++++++++++++++++---------- 1 file changed, 45 insertions(+), 12 deletions(-) diff --git a/packages/pkgbuild.py b/packages/pkgbuild.py index 562cdb5..0fc0a8a 100644 --- a/packages/pkgbuild.py +++ b/packages/pkgbuild.py @@ -7,7 +7,7 @@ import os import subprocess from joblib import Parallel, delayed -from typing import Optional, Sequence +from typing import Optional from config import config, ConfigStateHolder from constants import REPOSITORIES @@ -65,6 +65,7 @@ class Pkgbuild(PackageInfo): path: str pkgver: str pkgrel: str + sources_refreshed: bool def __init__( self, @@ -74,6 +75,7 @@ class Pkgbuild(PackageInfo): provides: list[str] = [], replaces: list[str] = [], repo: Optional[str] = None, + sources_refreshed: bool = False, ) -> None: """ Create new Pkgbuild representation for file located at `{relative_path}/PKGBUILD`. @@ -91,9 +93,15 @@ class Pkgbuild(PackageInfo): self.path = relative_path self.pkgver = '' self.pkgrel = '' + self.sources_refreshed = sources_refreshed def __repr__(self): - return f'Pkgbuild({self.name},{repr(self.path)},{self.version},{self.mode})' + return ','.join([ + 'Pkgbuild(' + self.name, + repr(self.path), + self.version + ("🔄" if self.sources_refreshed else ""), + self.mode + ')', + ]) def names(self): return list(set([self.name] + self.provides + self.replaces)) @@ -114,12 +122,17 @@ class Pkgbuild(PackageInfo): self.path = pkg.path self.pkgver = pkg.pkgver self.pkgrel = pkg.pkgrel + self.sources_refreshed = self.sources_refreshed or pkg.sources_refreshed self.update_version() -class Pkgbase(Pkgbuild): - subpackages: Sequence[SubPkgbuild] + def refresh_sources(self): + raise NotImplementedError() - def __init__(self, relative_path: str, subpackages: Sequence[SubPkgbuild] = [], **args): + +class Pkgbase(Pkgbuild): + subpackages: list[SubPkgbuild] + + def __init__(self, relative_path: str, subpackages: list[SubPkgbuild] = [], **args): self.subpackages = list(subpackages) super().__init__(relative_path, **args) @@ -136,8 +149,23 @@ class Pkgbase(Pkgbuild): else: sub_dict[name].update(new_pkg) updated = sub_dict[name] + updated.sources_refreshed = self.sources_refreshed self.subpackages.append(updated) + def refresh_sources(self, lazy: bool = True): + ''' + Reloads the pkgbuild from disk. + Does **NOT** actually perform the makepkg action to refresh the pkgver() first! + ''' + if lazy and self.sources_refreshed: + return + parsed = parse_pkgbuild(self.path, sources_refreshed=True) + basepkgs = [p for p in parsed if isinstance(p, Pkgbase)] + if not len(basepkgs) == 1: + raise Exception(f"error refreshing {self.name}: wrong number of base packages found: {basepkgs}") + self.sources_refreshed = True + self.update(basepkgs[0]) + class SubPkgbuild(Pkgbuild): pkgbase: Pkgbase @@ -147,13 +175,18 @@ class SubPkgbuild(Pkgbuild): self.name = name self.pkgbase = pkgbase + self.sources_refreshed = False self.update(pkgbase) self.provides = [] self.replaces = [] + def refresh_sources(self, lazy: bool = True): + assert self.pkgbase + self.pkgbase.refresh_sources(lazy=lazy) -def parse_pkgbuild(relative_pkg_dir: str, _config: Optional[ConfigStateHolder] = None) -> Sequence[Pkgbuild]: + +def parse_pkgbuild(relative_pkg_dir: str, _config: Optional[ConfigStateHolder] = None, sources_refreshed: bool = False) -> list[Pkgbuild]: """ Since function may run in a different subprocess, we need to be passed the config via parameter """ @@ -176,7 +209,7 @@ def parse_pkgbuild(relative_pkg_dir: str, _config: Optional[ConfigStateHolder] = raise Exception((f'{relative_pkg_dir}/PKGBUILD has {"no" if mode is None else "an invalid"} mode configured') + (f': "{mode}"' if mode is not None else '')) - base_package = Pkgbase(relative_pkg_dir) + base_package = Pkgbase(relative_pkg_dir, sources_refreshed=sources_refreshed) base_package.mode = mode base_package.repo = relative_pkg_dir.split('/')[0] srcinfo = run_cmd( @@ -217,7 +250,7 @@ def parse_pkgbuild(relative_pkg_dir: str, _config: Optional[ConfigStateHolder] = elif line.startswith('depends') or line.startswith('makedepends') or line.startswith('checkdepends') or line.startswith('optdepends'): current.depends.append(splits[1].split('=')[0].split(': ')[0]) - results: Sequence[Pkgbuild] = list(base_package.subpackages) + results: list[Pkgbuild] = list(base_package.subpackages) if len(results) > 1: logging.debug(f" Split package detected: {base_package.name}: {results}") base_package.update_version() @@ -238,7 +271,7 @@ _pkgbuilds_paths = dict[str, list[Pkgbuild]]() _pkgbuilds_scanned: bool = False -def get_pkgbuild_by_path(relative_path: str, lazy: bool = True, _config: Optional[config] = None) -> list[Pkgbuild]: +def get_pkgbuild_by_path(relative_path: str, lazy: bool = True, _config: Optional[ConfigStateHolder] = None) -> list[Pkgbuild]: global _pkgbuilds_cache, _pkgbuilds_paths if lazy and relative_path in _pkgbuilds_paths: return _pkgbuilds_paths[relative_path] @@ -299,11 +332,11 @@ def discover_pkgbuilds(parallel: bool = True, lazy: bool = True) -> dict[str, Pk package.local_depends = package.depends.copy() for dep in package.depends.copy(): found = dep in packages - for p in packages.values(): + for pkg in packages.values(): if found: break - if dep in p.names(): - logging.debug(f'Found {p.name} that provides {dep}') + if dep in pkg.names(): + logging.debug(f'Found {pkg.name} that provides {dep}') found = True break if not found: From bc31f9822a7806c8b8a1a4539a13b6d04f68370f Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Fri, 26 Aug 2022 19:20:12 +0200 Subject: [PATCH 03/44] constants.py: add armv7h support --- constants.py | 47 +++++++++++++++++++++++++++++++++-------------- 1 file changed, 33 insertions(+), 14 deletions(-) diff --git a/constants.py b/constants.py index c41996c..81b14d9 100644 --- a/constants.py +++ b/constants.py @@ -91,11 +91,20 @@ Arch: TypeAlias = str ARCHES = [ 'x86_64', 'aarch64', + 'armv7h', ] DistroArch: TypeAlias = Arch TargetArch: TypeAlias = Arch +ALARM_REPOS = { + 'core': 'http://mirror.archlinuxarm.org/$arch/$repo', + 'extra': 'http://mirror.archlinuxarm.org/$arch/$repo', + 'community': 'http://mirror.archlinuxarm.org/$arch/$repo', + 'alarm': 'http://mirror.archlinuxarm.org/$arch/$repo', + 'aur': 'http://mirror.archlinuxarm.org/$arch/$repo', +} + BASE_DISTROS: dict[DistroArch, dict[str, dict[str, str]]] = { 'x86_64': { 'repos': { @@ -105,42 +114,52 @@ BASE_DISTROS: dict[DistroArch, dict[str, dict[str, str]]] = { }, }, 'aarch64': { - 'repos': { - 'core': 'http://mirror.archlinuxarm.org/$arch/$repo', - 'extra': 'http://mirror.archlinuxarm.org/$arch/$repo', - 'community': 'http://mirror.archlinuxarm.org/$arch/$repo', - 'alarm': 'http://mirror.archlinuxarm.org/$arch/$repo', - 'aur': 'http://mirror.archlinuxarm.org/$arch/$repo', - }, + 'repos': ALARM_REPOS, + }, + 'armv7h': { + 'repos': ALARM_REPOS, }, } COMPILE_ARCHES: dict[Arch, str] = { 'x86_64': 'amd64', 'aarch64': 'arm64', + 'armv7h': 'arm', } GCC_HOSTSPECS: dict[DistroArch, dict[TargetArch, str]] = { 'x86_64': { 'x86_64': 'x86_64-pc-linux-gnu', 'aarch64': 'aarch64-linux-gnu', + 'armv7h': 'arm-unknown-linux-gnueabihf' }, 'aarch64': { 'aarch64': 'aarch64-unknown-linux-gnu', - } + }, + 'armv7h': { + 'armv7h': 'armv7l-unknown-linux-gnueabihf' + }, } CFLAGS_GENERAL = ['-O2', '-pipe', '-fstack-protector-strong'] +CFLAGS_ALARM = [ + ' -fno-plt', + '-fexceptions', + '-Wp,-D_FORTIFY_SOURCE=2', + '-Wformat', + '-Werror=format-security', + '-fstack-clash-protection', +] CFLAGS_ARCHES: dict[Arch, list[str]] = { 'x86_64': ['-march=x86-64', '-mtune=generic'], 'aarch64': [ '-march=armv8-a', - '-fexceptions', - '-Wp,-D_FORTIFY_SOURCE=2', - '-Wformat', - '-Werror=format-security', - '-fstack-clash-protection', - ] + ] + CFLAGS_ALARM, + 'armv7h': [ + '-march=armv7-a', + '-mfloat-abi=hard', + '-mfpu=neon', + ] + CFLAGS_ALARM, } QEMU_BINFMT_PKGS = ['qemu-user-static-bin', 'binfmt-qemu-static'] From b154f835e641e4bac92facef9dc19b4dd0913b62 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Fri, 26 Aug 2022 22:54:30 +0200 Subject: [PATCH 04/44] constants: add QEMU_ARCHES --- binfmt.py | 18 +++++++++++++----- constants.py | 6 ++++++ 2 files changed, 19 insertions(+), 5 deletions(-) diff --git a/binfmt.py b/binfmt.py index 06b93a1..3161458 100644 --- a/binfmt.py +++ b/binfmt.py @@ -3,6 +3,7 @@ import os import logging +from constants import Arch, QEMU_ARCHES from exec.cmd import run_root_cmd from utils import mount @@ -38,11 +39,15 @@ def binfmt_info(): return full -def is_registered(arch: str) -> bool: - return os.path.exists("/proc/sys/fs/binfmt_misc/qemu-" + arch) +def is_registered(arch: Arch) -> bool: + qemu_arch = QEMU_ARCHES[arch] + return os.path.exists("/proc/sys/fs/binfmt_misc/qemu-" + qemu_arch) -def register(arch): +def register(arch: Arch): + if arch not in QEMU_ARCHES: + raise Exception(f'binfmt.register(): unknown arch {arch} (not in QEMU_ARCHES)') + qemu_arch = QEMU_ARCHES[arch] if is_registered(arch): return @@ -51,7 +56,7 @@ def register(arch): # Build registration string # https://en.wikipedia.org/wiki/Binfmt_misc # :name:type:offset:magic:mask:interpreter:flags - info = lines[arch] + info = lines[qemu_arch] code = info['line'] binfmt = '/proc/sys/fs/binfmt_misc' register = binfmt + '/register' @@ -70,7 +75,10 @@ def register(arch): def unregister(arch): - binfmt_file = "/proc/sys/fs/binfmt_misc/qemu-" + arch + if arch not in QEMU_ARCHES: + raise Exception(f'binfmt.unregister(): unknown arch {arch} (not in QEMU_ARCHES)') + qemu_arch = QEMU_ARCHES[arch] + binfmt_file = "/proc/sys/fs/binfmt_misc/qemu-" + qemu_arch if not os.path.exists(binfmt_file): return logging.info(f"Unregistering qemu binfmt ({arch})") diff --git a/constants.py b/constants.py index 81b14d9..7495207 100644 --- a/constants.py +++ b/constants.py @@ -162,6 +162,12 @@ CFLAGS_ARCHES: dict[Arch, list[str]] = { ] + CFLAGS_ALARM, } +QEMU_ARCHES: dict[Arch, str] = { + 'x86_64': 'x86_64', + 'aarch64': 'aarch64', + 'armv7h': 'arm', +} + QEMU_BINFMT_PKGS = ['qemu-user-static-bin', 'binfmt-qemu-static'] CROSSDIRECT_PKGS = ['crossdirect'] + QEMU_BINFMT_PKGS From 114755888eb828941f681800d2e28235e7366201 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Fri, 26 Aug 2022 23:33:29 +0200 Subject: [PATCH 05/44] packages: circumvent git dubious ownership errors in pkgbuilds.git due to chrootery --- packages/__init__.py | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/packages/__init__.py b/packages/__init__.py index ab2976d..abf7cb2 100644 --- a/packages/__init__.py +++ b/packages/__init__.py @@ -42,6 +42,7 @@ def get_makepkg_env(arch: Optional[Arch] = None): 'LANG': 'C', 'CARGO_BUILD_JOBS': str(threads), 'MAKEFLAGS': f"-j{threads}", + 'HOME': '/root', } native = config.runtime.arch assert native @@ -423,6 +424,15 @@ def setup_build_chroot( return chroot +def setup_git_insecure_paths(chroot: BuildChroot): + chroot.run_cmd( + ["git", "config", "--global", "--add", "safe.directory", "'*'"], + inner_env={ + 'HOME': '/root' + }, + ).check_returncode() # type: ignore[union-attr] + + def setup_sources(package: Pkgbuild, chroot: BuildChroot, makepkg_conf_path='/etc/makepkg.conf'): makepkg_setup_args = [ '--config', @@ -434,7 +444,12 @@ def setup_sources(package: Pkgbuild, chroot: BuildChroot, makepkg_conf_path='/et ] logging.info(f'Setting up sources for {package.path} in {chroot.name}') - result = chroot.run_cmd(MAKEPKG_CMD + makepkg_setup_args, cwd=os.path.join(CHROOT_PATHS['pkgbuilds'], package.path)) + setup_git_insecure_paths(chroot) + result = chroot.run_cmd( + MAKEPKG_CMD + makepkg_setup_args, + cwd=os.path.join(CHROOT_PATHS['pkgbuilds'], package.path), + inner_env=get_makepkg_env(chroot.arch), + ) assert isinstance(result, subprocess.CompletedProcess) if result.returncode != 0: raise Exception(f'Failed to check sources for {package.path}') @@ -506,6 +521,7 @@ def build_package( if failed_deps: raise Exception(f'Dependencies failed to install: {failed_deps}') + setup_git_insecure_paths(build_root) makepkg_conf_absolute = os.path.join('/', makepkg_conf_path) setup_sources(package, build_root, makepkg_conf_path=makepkg_conf_absolute) From 57d5ed474fd4870accbc690bb0ee2692ab88a0a5 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sat, 27 Aug 2022 03:46:07 +0200 Subject: [PATCH 06/44] typecheck.sh: show error codes --- typecheck.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/typecheck.sh b/typecheck.sh index 7797918..54117aa 100755 --- a/typecheck.sh +++ b/typecheck.sh @@ -1,2 +1,2 @@ #!/bin/bash -git ls-files \*.py | sort -u | xargs mypy --pretty --install-types --ignore-missing-imports "$@" +git ls-files \*.py | sort -u | xargs mypy --pretty --show-error-codes --install-types --ignore-missing-imports "$@" From 4c77a16bbaa44e4df3c3697976b8b14ea5dd9e83 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sat, 27 Aug 2022 04:59:18 +0200 Subject: [PATCH 07/44] main: add -w to *enforce* wrapping --- main.py | 12 ++++++++---- wrapper/__init__.py | 10 +++++----- 2 files changed, 13 insertions(+), 9 deletions(-) diff --git a/main.py b/main.py index e84f899..338a6a3 100755 --- a/main.py +++ b/main.py @@ -1,11 +1,13 @@ #!/usr/bin/env python3 import click -from traceback import format_exc as get_trace import subprocess +from traceback import format_exc as get_trace +from typing import Optional + from logger import logging, setup_logging, verbose_option -from wrapper import nowrapper_option +from wrapper import nowrapper_option, enforce_wrap from config import config, config_option, cmd_config from forwarding import cmd_forwarding from packages import cmd_packages @@ -23,12 +25,14 @@ from ssh import cmd_ssh @verbose_option @config_option @nowrapper_option -def cli(verbose: bool = False, config_file: str = None, no_wrapper: bool = False, error_shell: bool = False): +def cli(verbose: bool = False, config_file: str = None, wrapper_override: Optional[bool] = None, error_shell: bool = False): setup_logging(verbose) config.runtime['verbose'] = verbose - config.runtime['no_wrap'] = no_wrapper + config.runtime['no_wrap'] = wrapper_override is False config.runtime['error_shell'] = error_shell config.try_load_file(config_file) + if wrapper_override: + enforce_wrap() def main(): diff --git a/wrapper/__init__.py b/wrapper/__init__.py index 4b7a4c9..b680edd 100644 --- a/wrapper/__init__.py +++ b/wrapper/__init__.py @@ -51,10 +51,10 @@ def wrap_if_foreign_arch(arch: Arch): nowrapper_option = click.option( - '-W', - '--no-wrapper', - 'no_wrapper', + '-w/-W', + '--force-wrapper/--no-wrapper', + 'wrapper_override', is_flag=True, - default=False, - help='Disable the docker wrapper. Defaults to autodetection.', + default=None, + help='Force or disable the docker wrapper. Defaults to autodetection.', ) From 6b64989a3b4af756e47a52393715c0ab476ffd05 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sat, 27 Aug 2022 03:49:07 +0200 Subject: [PATCH 08/44] config: add config.runtime.uid --- config/scheme.py | 7 ++++--- config/state.py | 11 +++++++---- 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/config/scheme.py b/config/scheme.py index 80f1970..3add208 100644 --- a/config/scheme.py +++ b/config/scheme.py @@ -200,11 +200,12 @@ class Config(DataClass): @munchclass() class RuntimeConfiguration(DataClass): verbose: bool - config_file: Optional[str] - arch: Optional[Arch] no_wrap: bool - script_source_dir: str error_shell: bool + config_file: Optional[str] + script_source_dir: Optional[str] + arch: Optional[Arch] + uid: Optional[int] class ConfigLoadState(DataClass): diff --git a/config/state.py b/config/state.py index 5e64451..cb6f318 100644 --- a/config/state.py +++ b/config/state.py @@ -53,11 +53,12 @@ CONFIG_SECTIONS = list(CONFIG_DEFAULTS.keys()) CONFIG_RUNTIME_DEFAULTS: RuntimeConfiguration = RuntimeConfiguration.fromDict({ 'verbose': False, - 'config_file': None, - 'arch': None, 'no_wrap': False, - 'script_source_dir': os.path.dirname(os.path.dirname(os.path.realpath(__file__))), 'error_shell': False, + 'config_file': None, + 'script_source_dir': None, + 'arch': None, + 'uid': None, }) @@ -194,7 +195,9 @@ class ConfigStateHolder: self.file = Config.fromDict(merge_configs(conf_new=file_conf_base, conf_base=CONFIG_DEFAULTS)) self.file_state = ConfigLoadState() self.runtime = RuntimeConfiguration.fromDict(CONFIG_RUNTIME_DEFAULTS | runtime_conf) - self.runtime['arch'] = os.uname().machine + self.runtime.arch = os.uname().machine + self.runtime.script_source_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) + self.runtime.uid = os.getuid() self._profile_cache = {} if file_conf_path: self.try_load_file(file_conf_path) From 13ad63446ecde530ea042e371ebf1acfe697b4ff Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sat, 27 Aug 2022 05:55:19 +0200 Subject: [PATCH 09/44] DockerWrapper.wrap(): run as config.runtime.uid instead of root --- Dockerfile | 6 ++++-- local/bin/wrapper_su_helper | 1 + wrapper/docker.py | 9 +++++++-- wrapper/wrapper.py | 2 +- wrapper_su_helper.py | 37 +++++++++++++++++++++++++++++++++++++ 5 files changed, 50 insertions(+), 5 deletions(-) create mode 120000 local/bin/wrapper_su_helper create mode 100755 wrapper_su_helper.py diff --git a/Dockerfile b/Dockerfile index d3c5ffd..ec3cea4 100644 --- a/Dockerfile +++ b/Dockerfile @@ -2,11 +2,11 @@ FROM archlinux:base-devel RUN pacman-key --init && \ pacman -Sy --noconfirm archlinux-keyring && \ - pacman -Su --noconfirm \ + pacman -Su --noconfirm --needed \ python python-pip \ arch-install-scripts rsync \ aarch64-linux-gnu-gcc aarch64-linux-gnu-binutils aarch64-linux-gnu-glibc aarch64-linux-gnu-linux-api-headers \ - git \ + git sudo \ android-tools openssh inetutils \ parted @@ -32,5 +32,7 @@ RUN pip install -r requirements.txt COPY . . RUN python -c "from distro import distro; distro.get_kupfer_local(arch=None,in_chroot=False).repos_config_snippet()" | tee -a /etc/pacman.conf +RUN useradd -m -g users kupfer +RUN echo "kupfer ALL=(ALL) NOPASSWD: ALL" | tee /etc/sudoers.d/kupfer WORKDIR / diff --git a/local/bin/wrapper_su_helper b/local/bin/wrapper_su_helper new file mode 120000 index 0000000..a1f5932 --- /dev/null +++ b/local/bin/wrapper_su_helper @@ -0,0 +1 @@ +../../wrapper_su_helper.py \ No newline at end of file diff --git a/wrapper/docker.py b/wrapper/docker.py index 0b101c1..94952de 100644 --- a/wrapper/docker.py +++ b/wrapper/docker.py @@ -62,10 +62,13 @@ class DockerWrapper(BaseWrapper): wrapped_config = self.generate_wrapper_config() + target_user = 'root' if config.runtime.uid == 0 else 'kupfer' + target_home = '/root' if target_user == 'root' else f'/home/{target_user}' + ssh_dir = os.path.join(pathlib.Path.home(), '.ssh') if not os.path.exists(ssh_dir): os.makedirs(ssh_dir, mode=0o700) - volumes = self.get_bind_mounts_default(wrapped_config) + volumes = self.get_bind_mounts_default(wrapped_config, ssh_dir=ssh_dir, target_home=target_home) volumes |= dict({config.get_path(vol_name): vol_dest for vol_name, vol_dest in DOCKER_PATHS.items()}) docker_cmd = [ 'docker', @@ -78,7 +81,9 @@ class DockerWrapper(BaseWrapper): '--privileged', ] + docker_volumes_args(volumes) + [tag] - kupfer_cmd = ['kupferbootstrap', '--config', '/root/.config/kupfer/kupferbootstrap.toml'] + self.filter_args_wrapper(sys.argv[1:]) + kupfer_cmd = ['kupferbootstrap', '--config', volumes[wrapped_config]] + self.filter_args_wrapper(sys.argv[1:]) + if config.runtime.uid: + kupfer_cmd = ['wrapper_su_helper', '--uid', str(config.runtime.uid), '--username', 'kupfer', '--'] + kupfer_cmd cmd = docker_cmd + kupfer_cmd logging.debug('Wrapping in docker:' + repr(cmd)) diff --git a/wrapper/wrapper.py b/wrapper/wrapper.py index 4dcc6b3..5a0adb3 100644 --- a/wrapper/wrapper.py +++ b/wrapper/wrapper.py @@ -27,7 +27,7 @@ class Wrapper(Protocol): class BaseWrapper(Wrapper): - id: str + uuid: str identifier: str type: str wrapped_config_path: str diff --git a/wrapper_su_helper.py b/wrapper_su_helper.py new file mode 100755 index 0000000..40031e5 --- /dev/null +++ b/wrapper_su_helper.py @@ -0,0 +1,37 @@ +#!/bin/python3 + +import click +import os +import pwd + +from logger import logging, setup_logging + +from exec.cmd import run_cmd +from exec.file import chown + + +@click.command('kupferbootstrap_su') +@click.option('--username', default='kupfer', help="The user's name. If --uid is provided, the user's uid will be changed to this in passwd") +@click.option('--uid', default=1000, type=int, help='uid to change $username to and run as') +@click.argument('cmd', type=str, nargs=-1) +def kupferbootstrap_su(cmd: list[str], uid: int = 1000, username: str = 'kupfer'): + "Changes `username`'s uid to `uid` and executes kupferbootstrap as that user" + cmd = list(cmd) + user = pwd.getpwnam(username) + home = user.pw_dir + if uid != user.pw_uid: + run_cmd(['usermod', '-u', str(uid), username]).check_returncode() # type: ignore[union-attr] + chown(home, username, recursive=False) + env = os.environ | { + 'HOME': home, + 'USER': username, + } + logging.debug(f'wrapper: running {cmd} as {repr(username)}') + result = run_cmd(cmd, attach_tty=True, switch_user=username, env=env) + assert isinstance(result, int) + exit(result) + + +if __name__ == '__main__': + setup_logging(True) + kupferbootstrap_su(prog_name='kupferbootstrap_su_helper') From bef0efc637904174c847deefed282da5637f9425 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sat, 27 Aug 2022 16:48:50 +0200 Subject: [PATCH 10/44] global: refactor to use config.{file,runtime}.$member instead of config.file["$member"] --- chroot/__init__.py | 2 +- chroot/abstract.py | 5 +++-- chroot/build.py | 3 ++- config/__init__.py | 16 ++++++++-------- config/state.py | 19 ++++++++++--------- config/test_config.py | 7 ++++--- distro/distro.py | 5 +++-- generator.py | 5 +++-- image.py | 4 ++-- main.py | 10 +++++----- packages/__init__.py | 27 ++++++++++++++++----------- packages/pkgbuild.py | 6 +++--- ssh.py | 2 +- wrapper/__init__.py | 4 ++-- wrapper/docker.py | 4 ++-- 15 files changed, 65 insertions(+), 54 deletions(-) diff --git a/chroot/__init__.py b/chroot/__init__.py index f01ce48..86925c0 100644 --- a/chroot/__init__.py +++ b/chroot/__init__.py @@ -50,7 +50,7 @@ def cmd_chroot(type: str = 'build', arch: str = None, enable_crossdirect=True): build_chroot.initialize() build_chroot.initialized = True build_chroot.mount_pkgbuilds() - if config.file['build']['crossdirect'] and enable_crossdirect: + if config.file.build.crossdirect and enable_crossdirect: build_chroot.mount_crossdirect() else: raise Exception('Really weird bug') diff --git a/chroot/abstract.py b/chroot/abstract.py index b53081f..3153221 100644 --- a/chroot/abstract.py +++ b/chroot/abstract.py @@ -228,7 +228,8 @@ class Chroot(AbstractChroot): raise Exception(f'Chroot {self.name} is inactive, not running command! Hint: pass `fail_inactive=False`') if outer_env is None: outer_env = {} - native = config.runtime['arch'] + native = config.runtime.arch + assert native if self.arch != native and 'QEMU_LD_PREFIX' not in outer_env: outer_env = dict(outer_env) # copy dict for modification outer_env |= {'QEMU_LD_PREFIX': f'/usr/{GCC_HOSTSPECS[native][self.arch]}'} @@ -285,7 +286,7 @@ class Chroot(AbstractChroot): user = None group = None if check_space is None: - check_space = config.file['pacman']['check_space'] + check_space = config.file.pacman.check_space if not absolute_path: path = self.get_path('/etc') root_makedir(path) diff --git a/chroot/build.py b/chroot/build.py index 4efe0ab..4118957 100644 --- a/chroot/build.py +++ b/chroot/build.py @@ -69,7 +69,8 @@ class BuildChroot(Chroot): """ target_arch = self.arch if not native_chroot: - native_chroot = get_build_chroot(config.runtime['arch']) + assert config.runtime.arch + native_chroot = get_build_chroot(config.runtime.arch) host_arch = native_chroot.arch hostspec = GCC_HOSTSPECS[host_arch][target_arch] cc = f'{hostspec}-cc' diff --git a/config/__init__.py b/config/__init__.py index 3a8c815..03ce46a 100644 --- a/config/__init__.py +++ b/config/__init__.py @@ -66,8 +66,8 @@ def prompt_profile(name: str, create: bool = True, defaults: Union[Profile, dict profile: Any = PROFILE_EMPTY | defaults # don't use get_profile() here because we need the sparse profile - if name in config.file['profiles']: - profile |= config.file['profiles'][name] + if name in config.file.profiles: + profile |= config.file.profiles[name] elif create: logging.info(f"Profile {name} doesn't exist yet, creating new profile.") else: @@ -113,7 +113,7 @@ def prompt_for_save(retry_ctx: Optional[click.Context] = None): If `retry_ctx` is passed, the context's command will be reexecuted with the same arguments if the user chooses to retry. False will still be returned as the retry is expected to either save, perform another retry or arbort. """ - if click.confirm(f'Do you want to save your changes to {config.runtime["config_file"]}?', default=True): + if click.confirm(f'Do you want to save your changes to {config.runtime.config_file}?', default=True): return True if retry_ctx: if click.confirm('Retry? ("n" to quit without saving)', default=True): @@ -171,7 +171,7 @@ def cmd_config_init(ctx, sections: list[str] = CONFIG_SECTIONS, non_interactive: config.update(results) if 'profiles' in sections: - current_profile = 'default' if 'current' not in config.file['profiles'] else config.file['profiles']['current'] + current_profile = 'default' if 'current' not in config.file.profiles else config.file.profiles.current new_current, _ = prompt_config('profile.current', default=current_profile, field_type=str) profile, changed = prompt_profile(new_current, create=True) config.update_profile(new_current, profile) @@ -182,7 +182,7 @@ def cmd_config_init(ctx, sections: list[str] = CONFIG_SECTIONS, non_interactive: if not noop: config.write() else: - logging.info(f'--noop passed, not writing to {config.runtime["config_file"]}!') + logging.info(f'--noop passed, not writing to {config.runtime.config_file}!') @cmd_config.command(name='set') @@ -250,8 +250,8 @@ def cmd_profile(): def cmd_profile_init(ctx, name: str, non_interactive: bool = False, noop: bool = False): """Create or edit a profile""" profile = deepcopy(PROFILE_EMPTY) - if name in config.file['profiles']: - profile |= config.file['profiles'][name] + if name in config.file.profiles: + profile |= config.file.profiles[name] if not non_interactive: profile, _changed = prompt_profile(name, create=True) @@ -262,4 +262,4 @@ def cmd_profile_init(ctx, name: str, non_interactive: bool = False, noop: bool = return config.write() else: - logging.info(f'--noop passed, not writing to {config.runtime["config_file"]}!') + logging.info(f'--noop passed, not writing to {config.runtime.config_file}!') diff --git a/config/state.py b/config/state.py index cb6f318..0ba9215 100644 --- a/config/state.py +++ b/config/state.py @@ -204,7 +204,7 @@ class ConfigStateHolder: def try_load_file(self, config_file=None, base=CONFIG_DEFAULTS): config_file = config_file or CONFIG_DEFAULT_PATH - self.runtime['config_file'] = config_file + self.runtime.config_file = config_file self._profile_cache = None try: self.file = parse_file(config_file=config_file, base=base) @@ -227,8 +227,8 @@ class ConfigStateHolder: raise ex def get_profile(self, name: Optional[str] = None) -> Profile: - name = name or self.file['profiles']['current'] - self._profile_cache = resolve_profile(name=name, sparse_profiles=self.file['profiles'], resolved=self._profile_cache) + name = name or self.file.profiles.current + self._profile_cache = resolve_profile(name=name, sparse_profiles=self.file.profiles, resolved=self._profile_cache) return self._profile_cache[name] def enforce_profile_device_set(self, profile_name: Optional[str] = None, hint_or_set_arch: bool = False) -> Profile: @@ -255,7 +255,7 @@ class ConfigStateHolder: return profile def get_path(self, path_name: str) -> str: - paths = self.file['paths'] + paths = self.file.paths return resolve_path_template(paths[path_name], paths) def get_package_dir(self, arch: str): @@ -268,7 +268,8 @@ class ConfigStateHolder: def write(self, path=None): """write toml representation of `self.file` to `path`""" if path is None: - path = self.runtime['config_file'] + path = self.runtime.config_file + assert path os.makedirs(os.path.dirname(path), exist_ok=True) dump_file(path, self.file) logging.info(f'Created config file at {path}') @@ -282,18 +283,18 @@ class ConfigStateHolder: merged = merge_configs(config_fragment, conf_base=self.file, warn_missing_defaultprofile=warn_missing_defaultprofile) changed = self.file != merged self.file.update(merged) - if changed and 'profiles' in config_fragment and self.file['profiles'] != config_fragment['profiles']: + if changed and 'profiles' in config_fragment and self.file.profiles != config_fragment['profiles']: self.invalidate_profile_cache() return changed def update_profile(self, name: str, profile: Profile, merge: bool = False, create: bool = True, prune: bool = True): new = {} - if name not in self.file['profiles']: + if name not in self.file.profiles: if not create: raise Exception(f'Unknown profile: {name}') else: if merge: - new = deepcopy(self.file['profiles'][name]) + new = deepcopy(self.file.profiles[name]) logging.debug(f'new: {new}') logging.debug(f'profile: {profile}') @@ -301,5 +302,5 @@ class ConfigStateHolder: if prune: new = {key: val for key, val in new.items() if val is not None} - self.file['profiles'][name] = new + self.file.profiles[name] = new self.invalidate_profile_cache() diff --git a/config/test_config.py b/config/test_config.py index 1755919..4856c11 100644 --- a/config/test_config.py +++ b/config/test_config.py @@ -53,7 +53,7 @@ def validate_ConfigStateHolder(c: ConfigStateHolder, should_load: Optional[bool] def test_fixture_configstate(conf_fixture: str, exists: bool, request): configstate = request.getfixturevalue(conf_fixture) assert 'config_file' in configstate.runtime - confpath = configstate.runtime['config_file'] + confpath = configstate.runtime.config_file assert isinstance(confpath, str) assert confpath assert exists == os.path.exists(confpath) @@ -124,12 +124,13 @@ def load_toml_file(path) -> dict: def get_path_from_stateholder(c: ConfigStateHolder): - return c.runtime['config_file'] + return c.runtime.config_file def test_config_save_nonexistant(configstate_nonexistant: ConfigStateHolder): c = configstate_nonexistant - confpath = c.runtime['config_file'] + confpath = c.runtime.config_file + assert confpath assert not os.path.exists(confpath) c.write() assert confpath diff --git a/distro/distro.py b/distro/distro.py index 6691252..86ca22e 100644 --- a/distro/distro.py +++ b/distro/distro.py @@ -75,7 +75,7 @@ _kupfer_local_chroots = dict[Arch, Distro]() def get_kupfer_https(arch: Arch, scan: bool = False) -> Distro: global _kupfer_https if arch not in _kupfer_https or not _kupfer_https[arch]: - _kupfer_https[arch] = get_kupfer(arch, KUPFER_HTTPS.replace('%branch%', config.file['pacman']['repo_branch']), scan) + _kupfer_https[arch] = get_kupfer(arch, KUPFER_HTTPS.replace('%branch%', config.file.pacman.repo_branch), scan) item = _kupfer_https[arch] if scan and not item.is_scanned(): item.scan() @@ -85,7 +85,8 @@ def get_kupfer_https(arch: Arch, scan: bool = False) -> Distro: def get_kupfer_local(arch: Optional[Arch] = None, in_chroot: bool = True, scan: bool = False) -> Distro: global _kupfer_local, _kupfer_local_chroots cache = _kupfer_local_chroots if in_chroot else _kupfer_local - arch = arch or config.runtime['arch'] + arch = arch or config.runtime.arch + assert arch if arch not in cache or not cache[arch]: dir = CHROOT_PATHS['packages'] if in_chroot else config.get_path('packages') cache[arch] = get_kupfer(arch, f"file://{dir}/$arch/$repo") diff --git a/generator.py b/generator.py index 05a2cde..0200583 100644 --- a/generator.py +++ b/generator.py @@ -7,7 +7,8 @@ def generate_makepkg_conf(arch: Arch, cross: bool = False, chroot: str = None) - Generate a makepkg.conf. For use with crosscompiling, specify `cross=True` and pass as `chroot` the relative path inside the native chroot where the foreign chroot will be mounted. """ - hostspec = GCC_HOSTSPECS[config.runtime['arch'] if cross else arch][arch] + assert config.runtime.arch + hostspec = GCC_HOSTSPECS[config.runtime.arch if cross else arch][arch] cflags = CFLAGS_ARCHES[arch] + CFLAGS_GENERAL if cross and not chroot: raise Exception('Cross-compile makepkg conf requested but no chroot path given: "{chroot}"') @@ -233,7 +234,7 @@ Color #NoProgressBar {'' if check_space else '#'}CheckSpace VerbosePkgLists -ParallelDownloads = {config.file['pacman']['parallel_downloads']} +ParallelDownloads = {config.file.pacman.parallel_downloads} # By default, pacman accepts packages signed by keys that its local keyring # trusts (see pacman-key and its man page), as well as unsigned packages. diff --git a/image.py b/image.py index 12fa633..6ca0391 100644 --- a/image.py +++ b/image.py @@ -399,7 +399,7 @@ def cmd_build(profile_name: str = None, packages = BASE_PACKAGES + DEVICES[device] + FLAVOURS[flavour]['packages'] + profile['pkgs_include'] - if arch != config.runtime['arch']: + if arch != config.runtime.arch: build_enable_qemu_binfmt(arch) if local_repos and build_pkgs: @@ -475,7 +475,7 @@ def cmd_inspect(profile: str = None, shell: bool = False): if shell: chroot.initialized = True chroot.activate() - if arch != config.runtime['arch']: + if arch != config.runtime.arch: logging.info('Installing requisites for foreign-arch shell') build_enable_qemu_binfmt(arch) logging.info('Starting inspection shell') diff --git a/main.py b/main.py index 338a6a3..3987b07 100755 --- a/main.py +++ b/main.py @@ -27,9 +27,9 @@ from ssh import cmd_ssh @nowrapper_option def cli(verbose: bool = False, config_file: str = None, wrapper_override: Optional[bool] = None, error_shell: bool = False): setup_logging(verbose) - config.runtime['verbose'] = verbose - config.runtime['no_wrap'] = wrapper_override is False - config.runtime['error_shell'] = error_shell + config.runtime.verbose = verbose + config.runtime.no_wrap = wrapper_override is False + config.runtime.error_shell = error_shell config.try_load_file(config_file) if wrapper_override: enforce_wrap() @@ -39,11 +39,11 @@ def main(): try: return cli(prog_name='kupferbootstrap') except Exception as ex: - if config.runtime['verbose']: + if config.runtime.verbose: logging.fatal(get_trace()) else: logging.fatal(ex) - if config.runtime['error_shell']: + if config.runtime.error_shell: logging.info('Starting error shell. Type exit to quit.') subprocess.call('/bin/bash') exit(1) diff --git a/packages/__init__.py b/packages/__init__.py index abf7cb2..b97ff73 100644 --- a/packages/__init__.py +++ b/packages/__init__.py @@ -36,7 +36,7 @@ pacman_cmd = [ def get_makepkg_env(arch: Optional[Arch] = None): # has to be a function because calls to `config` must be done after config file was read - threads = config.file['build']['threads'] or multiprocessing.cpu_count() + threads = config.file.build.threads or multiprocessing.cpu_count() env = {key: val for key, val in os.environ.items() if not key.split('_', maxsplit=1)[0] in ['CI', 'GITLAB', 'FF']} env |= { 'LANG': 'C', @@ -333,7 +333,8 @@ def try_download_package(dest_file_path: str, package: Pkgbuild, arch: Arch) -> def check_package_version_built(package: Pkgbuild, arch: Arch, try_download: bool = False) -> bool: enforce_wrap() - native_chroot = setup_build_chroot(config.runtime['arch']) + assert config.runtime.arch + native_chroot = setup_build_chroot(config.runtime.arch) config_path = '/' + native_chroot.write_makepkg_conf( target_arch=arch, cross_chroot_relative=os.path.join('chroot', arch), @@ -407,7 +408,8 @@ def setup_build_chroot( add_kupfer_repos: bool = True, clean_chroot: bool = False, ) -> BuildChroot: - if arch != config.runtime['arch']: + assert config.runtime.arch + if arch != config.runtime.arch: wrap_if_foreign_arch(arch) build_enable_qemu_binfmt(arch) init_prebuilts(arch) @@ -467,15 +469,16 @@ def build_package( makepkg_compile_opts = ['--holdver'] makepkg_conf_path = 'etc/makepkg.conf' repo_dir = repo_dir if repo_dir else config.get_path('pkgbuilds') - foreign_arch = config.runtime['arch'] != arch + foreign_arch = config.runtime.arch != arch deps = (list(set(package.depends) - set(package.names()))) target_chroot = setup_build_chroot( arch=arch, extra_packages=deps, clean_chroot=clean_chroot, ) + assert config.runtime.arch native_chroot = target_chroot if not foreign_arch else setup_build_chroot( - arch=config.runtime['arch'], + arch=config.runtime.arch, extra_packages=['base-devel'] + CROSSDIRECT_PKGS, clean_chroot=clean_chroot, ) @@ -635,7 +638,8 @@ def build_packages_by_paths( if isinstance(paths, str): paths = [paths] - for _arch in set([arch, config.runtime['arch']]): + assert config.runtime.arch + for _arch in set([arch, config.runtime.arch]): init_prebuilts(_arch) packages = filter_packages(paths, repo=repo, allow_empty_results=False) return build_packages( @@ -661,7 +665,8 @@ def build_enable_qemu_binfmt(arch: Arch, repo: Optional[dict[str, Pkgbuild]] = N logging.info('Installing qemu-user (building if necessary)') if lazy and _qemu_enabled[arch]: return - native = config.runtime['arch'] + native = config.runtime.arch + assert native if arch == native: return wrap_if_foreign_arch(arch) @@ -735,10 +740,10 @@ def build( force=force, rebuild_dependants=rebuild_dependants, try_download=try_download, - enable_crosscompile=config.file['build']['crosscompile'], - enable_crossdirect=config.file['build']['crossdirect'], - enable_ccache=config.file['build']['ccache'], - clean_chroot=config.file['build']['clean_mode'], + enable_crosscompile=config.file.build.crosscompile, + enable_crossdirect=config.file.build.crossdirect, + enable_ccache=config.file.build.ccache, + clean_chroot=config.file.build.clean_mode, ) diff --git a/packages/pkgbuild.py b/packages/pkgbuild.py index 0fc0a8a..658ee73 100644 --- a/packages/pkgbuild.py +++ b/packages/pkgbuild.py @@ -47,8 +47,8 @@ def clone_pkbuilds(pkgbuilds_dir: str, repo_url: str, branch: str, interactive=F def init_pkgbuilds(interactive=False): pkgbuilds_dir = config.get_path('pkgbuilds') - repo_url = config.file['pkgbuilds']['git_repo'] - branch = config.file['pkgbuilds']['git_branch'] + repo_url = config.file.pkgbuilds.git_repo + branch = config.file.pkgbuilds.git_branch clone_pkbuilds(pkgbuilds_dir, repo_url, branch, interactive=interactive, update=False) @@ -193,7 +193,7 @@ def parse_pkgbuild(relative_pkg_dir: str, _config: Optional[ConfigStateHolder] = global config if _config: config = _config - setup_logging(verbose=config.runtime['verbose'], log_setup=False) # different thread needs log setup. + setup_logging(verbose=config.runtime.verbose, log_setup=False) # different thread needs log setup. logging.info(f"Parsing PKGBUILD for {relative_pkg_dir}") pkgbuilds_dir = config.get_path('pkgbuilds') pkgdir = os.path.join(pkgbuilds_dir, relative_pkg_dir) diff --git a/ssh.py b/ssh.py index 5011f0a..5326b51 100644 --- a/ssh.py +++ b/ssh.py @@ -32,7 +32,7 @@ def run_ssh_command(cmd: list[str] = [], extra_args = [] if len(keys) > 0: extra_args += ['-i', keys[0]] - if config.runtime['verbose']: + if config.runtime.verbose: extra_args += ['-v'] if alloc_tty: extra_args += ['-t'] diff --git a/wrapper/__init__.py b/wrapper/__init__.py index b680edd..5ec4d37 100644 --- a/wrapper/__init__.py +++ b/wrapper/__init__.py @@ -15,7 +15,7 @@ wrapper_impls: dict[str, Wrapper] = { def get_wrapper_type(wrapper_type: str = None): - return wrapper_type or config.file['wrapper']['type'] + return wrapper_type or config.file.wrapper.type def get_wrapper_impl(wrapper_type: str = None) -> Wrapper: @@ -34,7 +34,7 @@ def is_wrapped(wrapper_type: str = None): def enforce_wrap(no_wrapper=False): wrapper_type = get_wrapper_type() - if wrapper_type != 'none' and not is_wrapped(wrapper_type) and not config.runtime['no_wrap'] and not no_wrapper: + if wrapper_type != 'none' and not is_wrapped(wrapper_type) and not config.runtime.no_wrap and not no_wrapper: logging.info(f'Wrapping in {wrapper_type}') wrap() diff --git a/wrapper/docker.py b/wrapper/docker.py index 94952de..d6215fb 100644 --- a/wrapper/docker.py +++ b/wrapper/docker.py @@ -22,7 +22,7 @@ class DockerWrapper(BaseWrapper): type: str = 'docker' def wrap(self): - script_path = config.runtime['script_source_dir'] + script_path = config.runtime.script_source_dir with open(os.path.join(script_path, 'version.txt')) as version_file: version = version_file.read().replace('\n', '') tag = f'registry.gitlab.com/kupfer/kupferbootstrap:{version}' @@ -34,7 +34,7 @@ class DockerWrapper(BaseWrapper): '.', '-t', tag, - ] + (['-q'] if not config.runtime['verbose'] else []) + ] + (['-q'] if not config.runtime.verbose else []) logging.debug('Running docker cmd: ' + ' '.join(cmd)) result = subprocess.run(cmd, cwd=script_path, capture_output=True) if result.returncode != 0: From ac7d16e4a781b07288678eff7802f68da2094317 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sun, 28 Aug 2022 01:19:08 +0200 Subject: [PATCH 11/44] exec.file.write_file(): fix situation where file exists but stat fails due to permissions --- exec/file.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/exec/file.py b/exec/file.py index a5882f5..b3baf6b 100644 --- a/exec/file.py +++ b/exec/file.py @@ -82,8 +82,12 @@ def write_file( fstat: os.stat_result exists = root_check_exists(path) dirname = os.path.dirname(path) + failed = False if exists: - fstat = os.stat(path) + try: + fstat = os.stat(path) + except PermissionError: + failed = True else: chown_user = chown_user or get_user_name(os.getuid()) chown_group = chown_group or get_group_name(os.getgid()) @@ -94,9 +98,10 @@ def write_file( if mode: if not mode.isnumeric(): raise Exception(f"Unknown file mode '{mode}' (must be numeric): {path}") - if not exists or stat.filemode(int(mode, 8)) != stat.filemode(fstat.st_mode): + if not exists or failed or stat.filemode(int(mode, 8)) != stat.filemode(fstat.st_mode): chmod_mode = mode - failed = try_native_filewrite(path, content, chmod_mode) + if not failed: + failed = try_native_filewrite(path, content, chmod_mode) is not None if exists or failed: if failed: try: From 4dc134c8f8474d9de29fc6d53651b7b04f4f9bb3 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sun, 28 Aug 2022 02:12:05 +0200 Subject: [PATCH 12/44] exec/cmd: generate_cmd_{su,elevated}: tolerate flat string as input for cmd instead of list --- exec/cmd.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/exec/cmd.py b/exec/cmd.py index 9e97c69..cd7fedb 100644 --- a/exec/cmd.py +++ b/exec/cmd.py @@ -47,15 +47,18 @@ def wrap_in_bash(cmd: Union[list[str], str], flatten_result=True) -> Union[str, return res -def generate_cmd_elevated(cmd: list[str], elevation_method: ElevationMethod): +def generate_cmd_elevated(cmd: Union[list[str], str], elevation_method: ElevationMethod): "wraps `cmd` in the necessary commands to escalate, e.g. `['sudo', '--', cmd]`." + if isinstance(cmd, str): + cmd = wrap_in_bash(cmd, flatten_result=False) + assert not isinstance(cmd, str) # typhints cmd as list[str] if elevation_method not in ELEVATION_METHODS: raise Exception(f"Unknown elevation method {elevation_method}") return ELEVATION_METHODS[elevation_method] + cmd def generate_cmd_su( - cmd: list[str], + cmd: Union[list[str], str], switch_user: str, elevation_method: Optional[ElevationMethod] = None, force_su: bool = False, From 20975feec6af8d43c1e328b00f1658cafbcdea16 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sun, 28 Aug 2022 00:29:56 +0200 Subject: [PATCH 13/44] chroot.run_cmd(): add switch_user parameter --- chroot/abstract.py | 9 +++++++-- exec/cmd.py | 1 + 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/chroot/abstract.py b/chroot/abstract.py index 3153221..66621a6 100644 --- a/chroot/abstract.py +++ b/chroot/abstract.py @@ -10,7 +10,7 @@ from uuid import uuid4 from config import config from constants import Arch, CHROOT_PATHS, GCC_HOSTSPECS from distro.distro import get_base_distro, get_kupfer_local, RepoInfo -from exec.cmd import run_root_cmd, generate_env_cmd, flatten_shell_script, wrap_in_bash +from exec.cmd import run_root_cmd, generate_env_cmd, flatten_shell_script, wrap_in_bash, generate_cmd_su from exec.file import makedir, root_makedir, root_write_file, write_file from generator import generate_makepkg_conf from utils import mount, umount, check_findmnt, log_or_exception @@ -223,6 +223,7 @@ class Chroot(AbstractChroot): cwd: Optional[str] = None, fail_inactive: bool = True, stdout: Optional[int] = None, + switch_user: Optional[str] = None, ) -> Union[int, subprocess.CompletedProcess]: if not self.active and fail_inactive: raise Exception(f'Chroot {self.name} is inactive, not running command! Hint: pass `fail_inactive=False`') @@ -239,7 +240,11 @@ class Chroot(AbstractChroot): script = flatten_shell_script(script, shell_quote_items=False, wrap_in_shell_quote=False) if cwd: script = f"cd {shell_quote(cwd)} && ( {script} )" - cmd = flatten_shell_script(['chroot', self.path] + env_cmd + wrap_in_bash(script, flatten_result=False), shell_quote_items=True) + if switch_user: + inner_cmd = generate_cmd_su(script, switch_user=switch_user, elevation_method='none', force_su=True) + else: + inner_cmd = wrap_in_bash(script, flatten_result=False) + cmd = flatten_shell_script(['chroot', self.path] + env_cmd + inner_cmd, shell_quote_items=True) return run_root_cmd(cmd, env=outer_env, attach_tty=attach_tty, capture_output=capture_output, stdout=stdout) diff --git a/exec/cmd.py b/exec/cmd.py index cd7fedb..e218f9d 100644 --- a/exec/cmd.py +++ b/exec/cmd.py @@ -14,6 +14,7 @@ ElevationMethod: TypeAlias = str ELEVATION_METHOD_DEFAULT = "sudo" ELEVATION_METHODS: dict[ElevationMethod, list[str]] = { + "none": [], "sudo": ['sudo', '--'], } From fc922981002830075095583169f4657f0f880af6 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sun, 28 Aug 2022 00:29:34 +0200 Subject: [PATCH 14/44] chroot.create_user(): add optional uid and non_unique parameter --- chroot/abstract.py | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/chroot/abstract.py b/chroot/abstract.py index 66621a6..12e542e 100644 --- a/chroot/abstract.py +++ b/chroot/abstract.py @@ -311,17 +311,21 @@ class Chroot(AbstractChroot): def create_user( self, - user='kupfer', - password='123456', - groups=['network', 'video', 'audio', 'optical', 'storage', 'input', 'scanner', 'games', 'lp', 'rfkill', 'wheel'], + user: str = 'kupfer', + password: Optional[str] = None, + groups: list[str] = ['network', 'video', 'audio', 'optical', 'storage', 'input', 'scanner', 'games', 'lp', 'rfkill', 'wheel'], + uid: Optional[int] = None, + non_unique: bool = False, ): user = user or 'kupfer' + uid_param = f'-u {uid}' if uid is not None else '' + unique_param = '--non-unique' if non_unique else '' install_script = f''' set -e if ! id -u "{user}" >/dev/null 2>&1; then - useradd -m {user} + useradd -m {unique_param} {uid_param} {user} fi - usermod -a -G {",".join(groups)} {user} + usermod -a -G {",".join(groups)} {unique_param} {uid_param} {user} chown {user}:{user} /home/{user} -R ''' if password: @@ -329,8 +333,9 @@ class Chroot(AbstractChroot): else: install_script += f'echo "Set user password:" && passwd {user}' result = self.run_cmd(install_script) + assert isinstance(result, subprocess.CompletedProcess) if result.returncode != 0: - raise Exception('Failed to setup user') + raise Exception(f'Failed to setup user {user} in self.name') def try_install_packages( self, From dcccc9bdc86b387937b16273520777f5d4e7905b Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sun, 28 Aug 2022 01:48:53 +0200 Subject: [PATCH 15/44] chroot: add chroot.add_sudo_config() --- chroot/abstract.py | 8 ++++++++ image.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/chroot/abstract.py b/chroot/abstract.py index 12e542e..84b506c 100644 --- a/chroot/abstract.py +++ b/chroot/abstract.py @@ -337,6 +337,14 @@ class Chroot(AbstractChroot): if result.returncode != 0: raise Exception(f'Failed to setup user {user} in self.name') + def add_sudo_config(self, config_name: str = 'wheel', privilegee: str = '%wheel', password_required: bool = True): + if '.' in config_name: + raise Exception(f"won't create sudoers.d file {config_name} since it will be ignored by sudo because it contains a dot!") + comment = ('# allow ' + (f'members of group {privilegee.strip("%")}' if privilegee.startswith('%') else f'user {privilegee}') + + 'to run any program as root' + ('' if password_required else ' without a password')) + line = privilegee + (' ALL=(ALL:ALL) ALL' if password_required else ' ALL=(ALL) NOPASSWD: ALL') + root_write_file(self.get_path(f'/etc/sudoers.d/{config_name}'), f'{comment}\n{line}') + def try_install_packages( self, packages: list[str], diff --git a/image.py b/image.py index 6ca0391..47430d2 100644 --- a/image.py +++ b/image.py @@ -319,6 +319,7 @@ def install_rootfs( user=user, password=profile['password'], ) + chroot.add_sudo_config(config_name='wheel', privilegee='%wheel', password_required=True) copy_ssh_keys( chroot.path, user=user, @@ -329,7 +330,6 @@ def install_rootfs( extra_repos=get_kupfer_https(arch).repos, in_chroot=True, ), - 'etc/sudoers.d/wheel': "# allow members of group wheel to execute any command\n%wheel ALL=(ALL:ALL) ALL\n", 'etc/hostname': profile['hostname'], } for target, content in files.items(): From d9a88e14746ee31e26baa6848e15768e8ee6238f Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sun, 28 Aug 2022 01:50:07 +0200 Subject: [PATCH 16/44] packages: use user 'kupfer' in chroots for building --- packages/__init__.py | 28 +++++++++++++++++----------- 1 file changed, 17 insertions(+), 11 deletions(-) diff --git a/packages/__init__.py b/packages/__init__.py index b97ff73..365d505 100644 --- a/packages/__init__.py +++ b/packages/__init__.py @@ -42,7 +42,6 @@ def get_makepkg_env(arch: Optional[Arch] = None): 'LANG': 'C', 'CARGO_BUILD_JOBS': str(threads), 'MAKEFLAGS': f"-j{threads}", - 'HOME': '/root', } native = config.runtime.arch assert native @@ -349,10 +348,7 @@ def check_package_version_built(package: Pkgbuild, arch: Arch, try_download: boo '--skippgpcheck', '--packagelist', ] - result: Any = native_chroot.run_cmd( - cmd, - capture_output=True, - ) + result: Any = native_chroot.run_cmd(cmd, capture_output=True, switch_user='kupfer') if result.returncode != 0: raise Exception(f'Failed to get package list for {package.path}:' + '\n' + result.stdout.decode() + '\n' + result.stderr.decode()) @@ -423,19 +419,23 @@ def setup_build_chroot( chroot.mount_pkgbuilds() if extra_packages: chroot.try_install_packages(extra_packages, allow_fail=False) + if not os.path.exists(chroot.get_path('/home/kupfer')): + assert config.runtime.uid is not None + chroot.create_user('kupfer', password='12345678', uid=config.runtime.uid, non_unique=True) + if not os.path.exists(chroot.get_path('/etc/sudoers.d/kupfer_nopw')): + chroot.add_sudo_config('kupfer_nopw', 'kupfer', password_required=False) + return chroot -def setup_git_insecure_paths(chroot: BuildChroot): +def setup_git_insecure_paths(chroot: BuildChroot, username: str = 'kupfer'): chroot.run_cmd( ["git", "config", "--global", "--add", "safe.directory", "'*'"], - inner_env={ - 'HOME': '/root' - }, + switch_user=username, ).check_returncode() # type: ignore[union-attr] -def setup_sources(package: Pkgbuild, chroot: BuildChroot, makepkg_conf_path='/etc/makepkg.conf'): +def setup_sources(package: Pkgbuild, chroot: BuildChroot, makepkg_conf_path='/etc/makepkg.conf', switch_user: str = 'kupfer'): makepkg_setup_args = [ '--config', makepkg_conf_path, @@ -451,6 +451,7 @@ def setup_sources(package: Pkgbuild, chroot: BuildChroot, makepkg_conf_path='/et MAKEPKG_CMD + makepkg_setup_args, cwd=os.path.join(CHROOT_PATHS['pkgbuilds'], package.path), inner_env=get_makepkg_env(chroot.arch), + switch_user=switch_user, ) assert isinstance(result, subprocess.CompletedProcess) if result.returncode != 0: @@ -530,7 +531,12 @@ def build_package( build_cmd = f'makepkg --config {makepkg_conf_absolute} --skippgpcheck --needed --noconfirm --ignorearch {" ".join(makepkg_compile_opts)}' logging.debug(f'Building: Running {build_cmd}') - result = build_root.run_cmd(build_cmd, inner_env=env, cwd=os.path.join(CHROOT_PATHS['pkgbuilds'], package.path)) + result = build_root.run_cmd( + build_cmd, + inner_env=env, + cwd=os.path.join(CHROOT_PATHS['pkgbuilds'], package.path), + switch_user='kupfer', + ) assert isinstance(result, subprocess.CompletedProcess) if result.returncode != 0: raise Exception(f'Failed to compile package {package.path}') From ea88397f1fbf0b3ea5da28091a37e180786a80ab Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sun, 28 Aug 2022 03:04:48 +0200 Subject: [PATCH 17/44] packages.filter_packages(): optionally check package arch --- packages/__init__.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/packages/__init__.py b/packages/__init__.py index 365d505..e464c5e 100644 --- a/packages/__init__.py +++ b/packages/__init__.py @@ -80,6 +80,7 @@ def init_prebuilts(arch: Arch, dir: str = None): def filter_packages( paths: Iterable[str], repo: Optional[dict[str, Pkgbuild]] = None, + arch: Optional[Arch] = None, allow_empty_results=True, use_paths=True, use_names=True, @@ -96,7 +97,12 @@ def filter_packages( comparison.add(pkg.path) if use_names: comparison.add(pkg.name) - if comparison.intersection(paths): + matches = list(comparison.intersection(paths)) + if matches: + assert pkg.arches + if arch and not set([arch, 'any']).intersection(pkg.arches): + logging.warn(f"Pkg {pkg.name} matches query {matches[0]} but isn't available for architecture {arch}: {pkg.arches}") + continue result += [pkg] if not allow_empty_results and not result: @@ -647,7 +653,7 @@ def build_packages_by_paths( assert config.runtime.arch for _arch in set([arch, config.runtime.arch]): init_prebuilts(_arch) - packages = filter_packages(paths, repo=repo, allow_empty_results=False) + packages = filter_packages(paths, arch=arch, repo=repo, allow_empty_results=False) return build_packages( packages, arch, From 2f98ffc79d4c8e4595c8efe86ae180d673d2f997 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sun, 28 Aug 2022 04:06:36 +0200 Subject: [PATCH 18/44] pkgbuild: add get_filename(arch) --- packages/pkgbuild.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/pkgbuild.py b/packages/pkgbuild.py index 658ee73..ce6fa5b 100644 --- a/packages/pkgbuild.py +++ b/packages/pkgbuild.py @@ -128,6 +128,13 @@ class Pkgbuild(PackageInfo): def refresh_sources(self): raise NotImplementedError() + def get_filename(self, arch: Arch): + if not self.version: + self.update_version() + if self.arches[0] == 'any': + arch = 'any' + return f'{self.name}-{self.version}-{arch}.pkg.tar.zst' + class Pkgbase(Pkgbuild): subpackages: list[SubPkgbuild] From 7b05fa4fdb0191e5416a0691eff2c8352eb09622 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sun, 28 Aug 2022 04:06:53 +0200 Subject: [PATCH 19/44] packages.check_package_version_built(): use Pkgbuild.get_filename() instead of running `makepkg --packagelist` --- packages/__init__.py | 48 ++++++++++++-------------------------------- 1 file changed, 13 insertions(+), 35 deletions(-) diff --git a/packages/__init__.py b/packages/__init__.py index e464c5e..5678d70 100644 --- a/packages/__init__.py +++ b/packages/__init__.py @@ -9,7 +9,7 @@ from glob import glob from urllib.error import HTTPError from urllib.request import urlopen from shutil import copyfileobj -from typing import Iterable, Iterator, Any, Optional +from typing import Iterable, Iterator, Optional from binfmt import register as binfmt_register from constants import REPOSITORIES, CROSSDIRECT_PKGS, QEMU_BINFMT_PKGS, GCC_HOSTSPECS, ARCHES, Arch, CHROOT_PATHS, MAKEPKG_CMD @@ -337,45 +337,21 @@ def try_download_package(dest_file_path: str, package: Pkgbuild, arch: Arch) -> def check_package_version_built(package: Pkgbuild, arch: Arch, try_download: bool = False) -> bool: - enforce_wrap() - assert config.runtime.arch - native_chroot = setup_build_chroot(config.runtime.arch) - config_path = '/' + native_chroot.write_makepkg_conf( - target_arch=arch, - cross_chroot_relative=os.path.join('chroot', arch), - cross=True, - ) - - cmd = ['cd', os.path.join(CHROOT_PATHS['pkgbuilds'], package.path), '&&'] + MAKEPKG_CMD + [ - '--config', - config_path, - '--nobuild', - '--noprepare', - '--skippgpcheck', - '--packagelist', - ] - result: Any = native_chroot.run_cmd(cmd, capture_output=True, switch_user='kupfer') - if result.returncode != 0: - raise Exception(f'Failed to get package list for {package.path}:' + '\n' + result.stdout.decode() + '\n' + result.stderr.decode()) - missing = True - for line in result.stdout.decode('utf-8').split('\n'): - if not line: - continue - basename = os.path.basename(line) - file = os.path.join(config.get_package_dir(arch), package.repo, basename) - filename_stripped = strip_compression_extension(file) - logging.debug(f'Checking if {file} is built') + filename = package.get_filename(arch) + filename_stripped = strip_compression_extension(filename) + logging.debug(f'Checking if {filename_stripped} is built') + for ext in ['xz', 'zst']: + file = os.path.join(config.get_package_dir(arch), package.repo, f'{filename_stripped}.{ext}') if not filename_stripped.endswith('.pkg.tar'): - logging.debug(f'skipping unknown file extension {basename}') - continue + raise Exception(f'stripped filename has unknown extension. {filename}') if os.path.exists(file) or (try_download and try_download_package(file, package, arch)): missing = False add_file_to_repo(file, repo_name=package.repo, arch=arch) # copy arch=(any) packages to all arches if filename_stripped.endswith('any.pkg.tar'): logging.debug("any-arch pkg detected") - target_repo_file = os.path.join(config.get_package_dir(arch), package.repo, basename) + target_repo_file = os.path.join(config.get_package_dir(arch), package.repo, filename) if os.path.exists(target_repo_file): missing = False else: @@ -383,7 +359,7 @@ def check_package_version_built(package: Pkgbuild, arch: Arch, try_download: boo for repo_arch in ARCHES: if repo_arch == arch: continue # we already checked that - other_repo_path = os.path.join(config.get_package_dir(repo_arch), package.repo, basename) + other_repo_path = os.path.join(config.get_package_dir(repo_arch), package.repo, filename) if os.path.exists(other_repo_path): missing = False logging.info(f"package {file} found in {repo_arch} repos, copying to {arch}") @@ -396,12 +372,14 @@ def check_package_version_built(package: Pkgbuild, arch: Arch, try_download: boo for repo_arch in ARCHES: if repo_arch == arch: continue # we already have that - copy_target = os.path.join(config.get_package_dir(repo_arch), package.repo, basename) + copy_target = os.path.join(config.get_package_dir(repo_arch), package.repo, filename) if not os.path.exists(copy_target): logging.info(f"copying to {copy_target}") shutil.copyfile(target_repo_file, copy_target) add_file_to_repo(copy_target, package.repo, repo_arch) - return not missing + if not missing: + return True + return False def setup_build_chroot( From 39b98d30ae68505332ccfad2b3f5da7aaf0ed4e4 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sun, 28 Aug 2022 05:46:46 +0200 Subject: [PATCH 20/44] chroot.create_user(): add `primary_group` parameter --- chroot/abstract.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/chroot/abstract.py b/chroot/abstract.py index 84b506c..45557b4 100644 --- a/chroot/abstract.py +++ b/chroot/abstract.py @@ -314,19 +314,21 @@ class Chroot(AbstractChroot): user: str = 'kupfer', password: Optional[str] = None, groups: list[str] = ['network', 'video', 'audio', 'optical', 'storage', 'input', 'scanner', 'games', 'lp', 'rfkill', 'wheel'], + primary_group: Optional[str] = 'users', uid: Optional[int] = None, non_unique: bool = False, ): user = user or 'kupfer' uid_param = f'-u {uid}' if uid is not None else '' unique_param = '--non-unique' if non_unique else '' + pgroup_param = f'-g {primary_group}' if primary_group else '' install_script = f''' set -e if ! id -u "{user}" >/dev/null 2>&1; then - useradd -m {unique_param} {uid_param} {user} + useradd -m {unique_param} {uid_param} {pgroup_param} {user} fi - usermod -a -G {",".join(groups)} {unique_param} {uid_param} {user} - chown {user}:{user} /home/{user} -R + usermod -a -G {",".join(groups)} {unique_param} {uid_param} {pgroup_param} {user} + chown {user}:{primary_group if primary_group else user} /home/{user} -R ''' if password: install_script += f'echo "{user}:{password}" | chpasswd' From b5214d9cd69b643b1e49fb77cee826391d651fce Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sun, 28 Aug 2022 05:47:46 +0200 Subject: [PATCH 21/44] packages: respect package arches before and during building --- packages/__init__.py | 19 ++++++++++++------- packages/pkgbuild.py | 2 +- 2 files changed, 13 insertions(+), 8 deletions(-) diff --git a/packages/__init__.py b/packages/__init__.py index 5678d70..923ad0c 100644 --- a/packages/__init__.py +++ b/packages/__init__.py @@ -85,11 +85,13 @@ def filter_packages( use_paths=True, use_names=True, ) -> Iterable[Pkgbuild]: + if not (use_names or use_paths): + raise Exception('Error: filter_packages instructed to match neither by names nor paths; impossible!') if not allow_empty_results and not paths: raise Exception("Can't search for packages: no query given") repo = repo or discover_pkgbuilds() if 'all' in paths: - return list(repo.values()) + return [pkg for pkg in repo.values() if set([arch, 'any']).intersection(pkg.arches)] result = [] for pkg in repo.values(): comparison = set() @@ -268,7 +270,7 @@ def strip_compression_extension(filename: str): for ext in ['zst', 'xz', 'gz', 'bz2']: if filename.endswith(f'.pkg.tar.{ext}'): return filename[:-(len(ext) + 1)] - logging.warning(f"file {filename} matches no known package extension") + logging.debug(f"file {filename} matches no known package extension") return filename @@ -403,9 +405,8 @@ def setup_build_chroot( chroot.mount_pkgbuilds() if extra_packages: chroot.try_install_packages(extra_packages, allow_fail=False) - if not os.path.exists(chroot.get_path('/home/kupfer')): - assert config.runtime.uid is not None - chroot.create_user('kupfer', password='12345678', uid=config.runtime.uid, non_unique=True) + assert config.runtime.uid is not None + chroot.create_user('kupfer', password='12345678', uid=config.runtime.uid, non_unique=True) if not os.path.exists(chroot.get_path('/etc/sudoers.d/kupfer_nopw')): chroot.add_sudo_config('kupfer_nopw', 'kupfer', password_required=False) @@ -529,15 +530,19 @@ def build_package( def get_dependants( repo: dict[str, Pkgbuild], packages: Iterable[Pkgbuild], + arch: Arch, recursive: bool = True, ) -> set[Pkgbuild]: names = set([pkg.name for pkg in packages]) to_add = set[Pkgbuild]() for pkg in repo.values(): if set.intersection(names, set(pkg.depends)): + if not set([arch, 'any']).intersection(pkg.arches): + logging.warn(f'get_dependants: skipping matched pkg {pkg.name} due to wrong arch: {pkg.arches}') + continue to_add.add(pkg) if recursive and to_add: - to_add.update(get_dependants(repo, to_add)) + to_add.update(get_dependants(repo, to_add, arch=arch)) return to_add @@ -552,7 +557,7 @@ def get_unbuilt_package_levels( repo = repo or discover_pkgbuilds() dependants = set[Pkgbuild]() if rebuild_dependants: - dependants = get_dependants(repo, packages) + dependants = get_dependants(repo, packages, arch=arch) package_levels = generate_dependency_chain(repo, set(packages).union(dependants)) build_names = set[str]() build_levels = list[set[Pkgbuild]]() diff --git a/packages/pkgbuild.py b/packages/pkgbuild.py index ce6fa5b..b134b31 100644 --- a/packages/pkgbuild.py +++ b/packages/pkgbuild.py @@ -327,7 +327,7 @@ def discover_pkgbuilds(parallel: bool = True, lazy: bool = True) -> dict[str, Pk _pkgbuilds_paths[pkglist[0].path] = pkglist results += pkglist - logging.debug('Building package dictionary!') + logging.info('Building package dictionary') for package in results: for name in [package.name] + package.replaces: if name in packages: From 57be5367810e67211a69240773408ffbfb9c42e2 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sun, 28 Aug 2022 07:26:34 +0200 Subject: [PATCH 22/44] packages.cmd_sideload(): fix escape of `--overwrite=*` --- packages/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/__init__.py b/packages/__init__.py index 923ad0c..595a732 100644 --- a/packages/__init__.py +++ b/packages/__init__.py @@ -767,7 +767,7 @@ def cmd_sideload(paths: Iterable[str], arch: Optional[Arch] = None, no_build: bo '-U', ] + [os.path.join('/tmp', os.path.basename(file)) for file in files] + [ '--noconfirm', - '--overwrite=\\*', + "'--overwrite=\\*'", ], alloc_tty=True).check_returncode() From 9f1281f1cb7ac77dd567a4dd3680a25b21378c3d Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sun, 28 Aug 2022 07:27:04 +0200 Subject: [PATCH 23/44] wrapper_su_helper.py: use `su -P` to allocate a pseudo-TTY --- wrapper_su_helper.py | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/wrapper_su_helper.py b/wrapper_su_helper.py index 40031e5..b19eba0 100755 --- a/wrapper_su_helper.py +++ b/wrapper_su_helper.py @@ -1,12 +1,11 @@ #!/bin/python3 import click -import os import pwd from logger import logging, setup_logging -from exec.cmd import run_cmd +from exec.cmd import run_cmd, flatten_shell_script from exec.file import chown @@ -22,12 +21,9 @@ def kupferbootstrap_su(cmd: list[str], uid: int = 1000, username: str = 'kupfer' if uid != user.pw_uid: run_cmd(['usermod', '-u', str(uid), username]).check_returncode() # type: ignore[union-attr] chown(home, username, recursive=False) - env = os.environ | { - 'HOME': home, - 'USER': username, - } - logging.debug(f'wrapper: running {cmd} as {repr(username)}') - result = run_cmd(cmd, attach_tty=True, switch_user=username, env=env) + logging.debug(f'wrapper_su_helper: running {cmd} as {repr(username)}') + su_cmd = ['sudo', 'su', '-P', username, '-c', flatten_shell_script(cmd, wrap_in_shell_quote=True, shell_quote_items=True)] + result = run_cmd(su_cmd, attach_tty=True) assert isinstance(result, int) exit(result) From a76ad5ac4b8be5835bc52a0a5d92a7fb0c3209b4 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sun, 28 Aug 2022 17:20:35 +0200 Subject: [PATCH 24/44] packages.filter_packages(): only filter by arch if arch is not None --- packages/__init__.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/packages/__init__.py b/packages/__init__.py index 595a732..79954a3 100644 --- a/packages/__init__.py +++ b/packages/__init__.py @@ -91,7 +91,10 @@ def filter_packages( raise Exception("Can't search for packages: no query given") repo = repo or discover_pkgbuilds() if 'all' in paths: - return [pkg for pkg in repo.values() if set([arch, 'any']).intersection(pkg.arches)] + all_pkgs = list(repo.values()) + if arch: + all_pkgs = [pkg for pkg in all_pkgs if set([arch, 'any']).intersection(pkg.arches)] + return all_pkgs result = [] for pkg in repo.values(): comparison = set() From 3c2e6fe2d042a14f3652421a7c865d76ba70fcec Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sun, 28 Aug 2022 17:21:16 +0200 Subject: [PATCH 25/44] packages and image: wrap more upfront on missing binaries --- image.py | 4 ++-- packages/__init__.py | 7 ++++--- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/image.py b/image.py index 47430d2..adb44dc 100644 --- a/image.py +++ b/image.py @@ -18,7 +18,7 @@ from exec.file import root_write_file, root_makedir, makedir from packages import build_enable_qemu_binfmt, build_packages_by_paths from packages.device import get_profile_device from ssh import copy_ssh_keys -from wrapper import wrap_if_foreign_arch +from wrapper import check_programs_wrap, wrap_if_foreign_arch # image files need to be slightly smaller than partitions to fit IMG_FILE_ROOT_DEFAULT_SIZE = "1800M" @@ -389,7 +389,7 @@ def cmd_build(profile_name: str = None, Unless overriden, required packages will be built or preferably downloaded from HTTPS repos. """ arch = get_profile_device(profile_name).arch - wrap_if_foreign_arch(arch) + check_programs_wrap(['makepkg', 'pacman', 'pacstrap']) profile: Profile = config.get_profile(profile_name) device, flavour = get_device_and_flavour(profile_name) size_extra_mb: int = int(profile["size_extra_mb"]) diff --git a/packages/__init__.py b/packages/__init__.py index 79954a3..667d2b6 100644 --- a/packages/__init__.py +++ b/packages/__init__.py @@ -11,7 +11,7 @@ from urllib.request import urlopen from shutil import copyfileobj from typing import Iterable, Iterator, Optional -from binfmt import register as binfmt_register +from binfmt import register as binfmt_register, QEMU_ARCHES from constants import REPOSITORIES, CROSSDIRECT_PKGS, QEMU_BINFMT_PKGS, GCC_HOSTSPECS, ARCHES, Arch, CHROOT_PATHS, MAKEPKG_CMD from config import config from exec.cmd import run_cmd, run_root_cmd @@ -591,6 +591,7 @@ def build_packages( enable_ccache: bool = True, clean_chroot: bool = False, ): + check_programs_wrap(['makepkg', 'pacman', 'pacstrap']) init_prebuilts(arch) build_levels = get_unbuilt_package_levels( packages, @@ -636,6 +637,7 @@ def build_packages_by_paths( if isinstance(paths, str): paths = [paths] + check_programs_wrap(['makepkg', 'pacman', 'pacstrap']) assert config.runtime.arch for _arch in set([arch, config.runtime.arch]): init_prebuilts(_arch) @@ -667,7 +669,7 @@ def build_enable_qemu_binfmt(arch: Arch, repo: Optional[dict[str, Pkgbuild]] = N assert native if arch == native: return - wrap_if_foreign_arch(arch) + check_programs_wrap([f'qemu-{QEMU_ARCHES[arch]}-static', 'pacman', 'makepkg']) # build qemu-user, binfmt, crossdirect build_packages_by_paths( CROSSDIRECT_PKGS, @@ -725,7 +727,6 @@ def build( rebuild_dependants: bool = False, try_download: bool = False, ): - # TODO: arch = config.get_profile()... arch = arch or get_profile_device(hint_or_set_arch=True).arch if arch not in ARCHES: From 8274a31068d9a0cd879490fa0f299ecd352be4e8 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sun, 28 Aug 2022 17:26:45 +0200 Subject: [PATCH 26/44] pkgbuild.discover_pkgbuilds(): warn and skip directories that don't contain a PKGBUILD --- packages/pkgbuild.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/packages/pkgbuild.py b/packages/pkgbuild.py index b134b31..f0e7af5 100644 --- a/packages/pkgbuild.py +++ b/packages/pkgbuild.py @@ -300,7 +300,11 @@ def discover_pkgbuilds(parallel: bool = True, lazy: bool = True) -> dict[str, Pk init_pkgbuilds(interactive=False) for repo in REPOSITORIES: for dir in os.listdir(os.path.join(pkgbuilds_dir, repo)): - paths.append(os.path.join(repo, dir)) + p = os.path.join(repo, dir) + if not os.path.exists(os.path.join(pkgbuilds_dir, p, 'PKGBUILD')): + logging.warning(f"{p} doesn't include a PKGBUILD file; skipping") + continue + paths.append(p) logging.info("Parsing PKGBUILDs") From fd1f75942980c43f7b58d35f4dc5c68d810620d0 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Mon, 29 Aug 2022 00:53:09 +0200 Subject: [PATCH 27/44] chroot: add chroot.mount_chroots() to mount /chroot and use in cmd_chroot() --- chroot/__init__.py | 5 ++++- chroot/abstract.py | 7 +++++++ 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/chroot/__init__.py b/chroot/__init__.py index 86925c0..ca54bf5 100644 --- a/chroot/__init__.py +++ b/chroot/__init__.py @@ -50,11 +50,14 @@ def cmd_chroot(type: str = 'build', arch: str = None, enable_crossdirect=True): build_chroot.initialize() build_chroot.initialized = True build_chroot.mount_pkgbuilds() - if config.file.build.crossdirect and enable_crossdirect: + build_chroot.mount_chroots() + assert arch and config.runtime.arch + if config.file.build.crossdirect and enable_crossdirect and arch != config.runtime.arch: build_chroot.mount_crossdirect() else: raise Exception('Really weird bug') + chroot.mount_packages() chroot.activate() logging.debug(f'Starting shell in {chroot.name}:') chroot.run_cmd('bash', attach_tty=True) diff --git a/chroot/abstract.py b/chroot/abstract.py index 45557b4..00b89be 100644 --- a/chroot/abstract.py +++ b/chroot/abstract.py @@ -273,6 +273,13 @@ class Chroot(AbstractChroot): fail_if_mounted=fail_if_mounted, ) + def mount_chroots(self, fail_if_mounted: bool = False) -> str: + return self.mount( + absolute_source=config.get_path('chroots'), + relative_destination=CHROOT_PATHS['chroots'].lstrip('/'), + fail_if_mounted=fail_if_mounted, + ) + def write_makepkg_conf(self, target_arch: Arch, cross_chroot_relative: Optional[str], cross: bool = True) -> str: """ Generate a `makepkg.conf` or `makepkg_cross_$arch.conf` file in /etc. From 4285cf734c71574d87bf021bb590aa960432e8e0 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Mon, 29 Aug 2022 04:21:37 +0200 Subject: [PATCH 28/44] config: introduce per-arch persisted ccache dir --- chroot/build.py | 10 ++++++++++ config/scheme.py | 1 + config/state.py | 1 + packages/__init__.py | 6 +++++- 4 files changed, 17 insertions(+), 1 deletion(-) diff --git a/chroot/build.py b/chroot/build.py index 4118957..0dd698f 100644 --- a/chroot/build.py +++ b/chroot/build.py @@ -132,6 +132,16 @@ class BuildChroot(Chroot): fail_if_mounted=fail_if_mounted, ) + def mount_ccache(self, user: str = 'kupfer', fail_if_mounted: bool = False): + mount_source = os.path.join(config.file.paths.ccache, self.arch) + mount_dest = os.path.join(f'/home/{user}' if user != 'root' else '/root', '.ccache') + makedir(mount_source) + return self.mount( + absolute_source=mount_source, + relative_destination=mount_dest, + fail_if_mounted=fail_if_mounted, + ) + def get_build_chroot(arch: Arch, add_kupfer_repos: bool = True, **kwargs) -> BuildChroot: name = build_chroot_name(arch) diff --git a/config/scheme.py b/config/scheme.py index 3add208..26a9b6a 100644 --- a/config/scheme.py +++ b/config/scheme.py @@ -141,6 +141,7 @@ class PathsSection(DataClass): pkgbuilds: str jumpdrive: str images: str + ccache: str class ProfilesSection(DataClass): diff --git a/config/state.py b/config/state.py index 0ba9215..0e73831 100644 --- a/config/state.py +++ b/config/state.py @@ -42,6 +42,7 @@ CONFIG_DEFAULTS_DICT = { 'pkgbuilds': os.path.join('%cache_dir%', 'pkgbuilds'), 'jumpdrive': os.path.join('%cache_dir%', 'jumpdrive'), 'images': os.path.join('%cache_dir%', 'images'), + 'ccache': os.path.join('%cache_dir%', 'ccache'), }, 'profiles': { 'current': 'default', diff --git a/packages/__init__.py b/packages/__init__.py index 667d2b6..bc0f99a 100644 --- a/packages/__init__.py +++ b/packages/__init__.py @@ -454,6 +454,7 @@ def build_package( enable_crossdirect: bool = True, enable_ccache: bool = True, clean_chroot: bool = False, + build_user: str = 'kupfer', ): makepkg_compile_opts = ['--holdver'] makepkg_conf_path = 'etc/makepkg.conf' @@ -482,6 +483,7 @@ def build_package( env = deepcopy(get_makepkg_env(arch)) if enable_ccache: env['PATH'] = f"/usr/lib/ccache:{env['PATH']}" + native_chroot.mount_ccache(user=build_user) logging.info('Setting up dependencies for cross-compilation') # include crossdirect for ccache symlinks and qemu-user results = native_chroot.try_install_packages(package.depends + CROSSDIRECT_PKGS + [f"{GCC_HOSTSPECS[native_chroot.arch][arch]}-gcc"]) @@ -513,6 +515,8 @@ def build_package( if failed_deps: raise Exception(f'Dependencies failed to install: {failed_deps}') + if enable_ccache: + build_root.mount_ccache(user=build_user) setup_git_insecure_paths(build_root) makepkg_conf_absolute = os.path.join('/', makepkg_conf_path) setup_sources(package, build_root, makepkg_conf_path=makepkg_conf_absolute) @@ -523,7 +527,7 @@ def build_package( build_cmd, inner_env=env, cwd=os.path.join(CHROOT_PATHS['pkgbuilds'], package.path), - switch_user='kupfer', + switch_user=build_user, ) assert isinstance(result, subprocess.CompletedProcess) if result.returncode != 0: From 2ef5f27c6cf388e6b9308f48eff56b9a6bee94d2 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Mon, 29 Aug 2022 04:43:12 +0200 Subject: [PATCH 29/44] config: introduce rust cache --- chroot/build.py | 14 ++++++++++++++ config/scheme.py | 1 + config/state.py | 1 + packages/__init__.py | 4 ++++ 4 files changed, 20 insertions(+) diff --git a/chroot/build.py b/chroot/build.py index 0dd698f..21d17a0 100644 --- a/chroot/build.py +++ b/chroot/build.py @@ -142,6 +142,20 @@ class BuildChroot(Chroot): fail_if_mounted=fail_if_mounted, ) + def mount_rust(self, user: str = 'kupfer', fail_if_mounted: bool = False) -> list[str]: + results = [] + mount_source_base = config.file.paths.rust # apparently arch-agnostic + for rust_dir in ['cargo', 'rustup']: + mount_source = os.path.join(mount_source_base, rust_dir) + mount_dest = os.path.join(f'/home/{user}' if user != 'root' else '/root', f'.{rust_dir}') + makedir(mount_source) + results.append(self.mount( + absolute_source=mount_source, + relative_destination=mount_dest, + fail_if_mounted=fail_if_mounted, + )) + return results + def get_build_chroot(arch: Arch, add_kupfer_repos: bool = True, **kwargs) -> BuildChroot: name = build_chroot_name(arch) diff --git a/config/scheme.py b/config/scheme.py index 26a9b6a..37569d9 100644 --- a/config/scheme.py +++ b/config/scheme.py @@ -142,6 +142,7 @@ class PathsSection(DataClass): jumpdrive: str images: str ccache: str + rust: str class ProfilesSection(DataClass): diff --git a/config/state.py b/config/state.py index 0e73831..52d6c8e 100644 --- a/config/state.py +++ b/config/state.py @@ -43,6 +43,7 @@ CONFIG_DEFAULTS_DICT = { 'jumpdrive': os.path.join('%cache_dir%', 'jumpdrive'), 'images': os.path.join('%cache_dir%', 'images'), 'ccache': os.path.join('%cache_dir%', 'ccache'), + 'rust': os.path.join('%cache_dir%', 'rust'), }, 'profiles': { 'current': 'default', diff --git a/packages/__init__.py b/packages/__init__.py index bc0f99a..1203493 100644 --- a/packages/__init__.py +++ b/packages/__init__.py @@ -461,6 +461,8 @@ def build_package( repo_dir = repo_dir if repo_dir else config.get_path('pkgbuilds') foreign_arch = config.runtime.arch != arch deps = (list(set(package.depends) - set(package.names()))) + needs_rust = 'rust' in deps + build_root: BuildChroot target_chroot = setup_build_chroot( arch=arch, extra_packages=deps, @@ -517,6 +519,8 @@ def build_package( if enable_ccache: build_root.mount_ccache(user=build_user) + if needs_rust: + build_root.mount_rust(user=build_user) setup_git_insecure_paths(build_root) makepkg_conf_absolute = os.path.join('/', makepkg_conf_path) setup_sources(package, build_root, makepkg_conf_path=makepkg_conf_absolute) From 58fd21256059d13cb260abff914acfa26cdf407e Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Mon, 29 Aug 2022 04:50:00 +0200 Subject: [PATCH 30/44] Dockerfile: clean up pkgconfig-aarch64 leftover --- Dockerfile | 6 ------ local/update-pacman-files.sh | 32 -------------------------------- 2 files changed, 38 deletions(-) delete mode 100755 local/update-pacman-files.sh diff --git a/Dockerfile b/Dockerfile index ec3cea4..b391514 100644 --- a/Dockerfile +++ b/Dockerfile @@ -12,12 +12,6 @@ RUN pacman-key --init && \ RUN sed -i "s/EUID == 0/EUID == -1/g" $(which makepkg) -RUN cd /tmp && \ - git clone https://aur.archlinux.org/aarch64-linux-gnu-pkg-config.git && \ - cd aarch64-linux-gnu-pkg-config && \ - makepkg -s --skippgpcheck && \ - pacman -U --noconfirm *.pkg* - RUN yes | pacman -Scc RUN sed -i "s/SigLevel.*/SigLevel = Never/g" /etc/pacman.conf diff --git a/local/update-pacman-files.sh b/local/update-pacman-files.sh deleted file mode 100755 index 28ed175..0000000 --- a/local/update-pacman-files.sh +++ /dev/null @@ -1,32 +0,0 @@ -#!/bin/sh -set -e - -wget https://raw.githubusercontent.com/archlinuxarm/PKGBUILDs/master/core/pacman/makepkg.conf -O etc/makepkg.conf -sed -i "s/@CARCH@/aarch64/g" etc/makepkg.conf -sed -i "s/@CHOST@/aarch64-unknown-linux-gnu/g" etc/makepkg.conf -sed -i "s/@CARCHFLAGS@/-march=armv8-a /g" etc/makepkg.conf -sed -i "s/xz /xz -T0 /g" etc/makepkg.conf -sed -i "s/ check / !check /g" etc/makepkg.conf -chroot="/chroot/base_aarch64" -include="-I\${CROOT}/usr/include -I$chroot/usr/include" -lib_croot="\${CROOT}/lib" -lib_chroot="$chroot/usr/lib" -cat >>etc/makepkg.conf < Date: Mon, 29 Aug 2022 19:05:02 +0200 Subject: [PATCH 31/44] requirements.txt: add setuptools required by munch --- requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements.txt b/requirements.txt index 76cf8ca..b0b0ff1 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,3 +5,4 @@ toml typing_extensions coloredlogs munch +setuptools # required by munch From ba58aa1a291a0b509f0c56ce708c74705f92fa84 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Mon, 29 Aug 2022 19:52:12 +0200 Subject: [PATCH 32/44] wrapper: add WRAPPER_PATHS to point ccache and rust to predictable locations --- wrapper/docker.py | 5 ++--- wrapper/wrapper.py | 7 ++++++- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/wrapper/docker.py b/wrapper/docker.py index d6215fb..1f37df2 100644 --- a/wrapper/docker.py +++ b/wrapper/docker.py @@ -5,10 +5,9 @@ import subprocess import sys from config import config -from constants import CHROOT_PATHS -from .wrapper import BaseWrapper +from .wrapper import BaseWrapper, WRAPPER_PATHS -DOCKER_PATHS = CHROOT_PATHS.copy() +DOCKER_PATHS = WRAPPER_PATHS.copy() def docker_volumes_args(volume_mappings: dict[str, str]) -> list[str]: diff --git a/wrapper/wrapper.py b/wrapper/wrapper.py index 5a0adb3..06d0d23 100644 --- a/wrapper/wrapper.py +++ b/wrapper/wrapper.py @@ -9,6 +9,11 @@ from config import config from config.state import dump_file as dump_config_file from constants import CHROOT_PATHS +WRAPPER_PATHS = CHROOT_PATHS | { + 'ccache': '/ccache', + 'rust': '/rust', +} + class Wrapper(Protocol): """Wrappers wrap kupferbootstrap in some form of isolation from the host OS, i.e. docker or chroots""" @@ -63,7 +68,7 @@ class BaseWrapper(Wrapper): def generate_wrapper_config( self, target_path: str = '/tmp/kupferbootstrap', - paths: dict[str, str] = CHROOT_PATHS, + paths: dict[str, str] = WRAPPER_PATHS, config_overrides: dict[str, dict] = {}, ) -> str: wrapped_config = f'{target_path.rstrip("/")}/{self.identifier}_wrapped.toml' From 6c262600015f6e7ce832722d7bd326d4a0e91d22 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Mon, 29 Aug 2022 20:12:09 +0200 Subject: [PATCH 33/44] chroot: add chroot.get_uid(user: str), use in chroot.mount_{ccache,rust} to apply correct ownership --- chroot/abstract.py | 12 ++++++++++++ chroot/build.py | 6 ++++-- 2 files changed, 16 insertions(+), 2 deletions(-) diff --git a/chroot/abstract.py b/chroot/abstract.py index 00b89be..eb33346 100644 --- a/chroot/abstract.py +++ b/chroot/abstract.py @@ -346,6 +346,18 @@ class Chroot(AbstractChroot): if result.returncode != 0: raise Exception(f'Failed to setup user {user} in self.name') + def get_uid(self, user: Union[str, int]) -> int: + if isinstance(user, int): + return user + if user == 'root': + return 0 + res = self.run_cmd(['id', '-u', user], capture_output=True) + assert isinstance(res, subprocess.CompletedProcess) + if res.returncode or not res.stdout: + raise Exception(f"chroot {self.name}: Couldnt detect uid for user {user}: {repr(res.stdout)}") + uid = res.stdout.decode() + return int(uid) + def add_sudo_config(self, config_name: str = 'wheel', privilegee: str = '%wheel', password_required: bool = True): if '.' in config_name: raise Exception(f"won't create sudoers.d file {config_name} since it will be ignored by sudo because it contains a dot!") diff --git a/chroot/build.py b/chroot/build.py index 21d17a0..1ecdc2d 100644 --- a/chroot/build.py +++ b/chroot/build.py @@ -135,7 +135,8 @@ class BuildChroot(Chroot): def mount_ccache(self, user: str = 'kupfer', fail_if_mounted: bool = False): mount_source = os.path.join(config.file.paths.ccache, self.arch) mount_dest = os.path.join(f'/home/{user}' if user != 'root' else '/root', '.ccache') - makedir(mount_source) + uid = self.get_uid(user) + makedir(mount_source, user=uid) return self.mount( absolute_source=mount_source, relative_destination=mount_dest, @@ -144,11 +145,12 @@ class BuildChroot(Chroot): def mount_rust(self, user: str = 'kupfer', fail_if_mounted: bool = False) -> list[str]: results = [] + uid = self.get_uid(user) mount_source_base = config.file.paths.rust # apparently arch-agnostic for rust_dir in ['cargo', 'rustup']: mount_source = os.path.join(mount_source_base, rust_dir) mount_dest = os.path.join(f'/home/{user}' if user != 'root' else '/root', f'.{rust_dir}') - makedir(mount_source) + makedir(mount_source, user=uid) results.append(self.mount( absolute_source=mount_source, relative_destination=mount_dest, From 86b4b30685a84728eef76b93b71baf31529334aa Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Mon, 29 Aug 2022 20:18:41 +0200 Subject: [PATCH 34/44] exec: makedir() accept Union[str, int] for user and group --- exec/file.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/exec/file.py b/exec/file.py index b3baf6b..7eeadc1 100644 --- a/exec/file.py +++ b/exec/file.py @@ -144,7 +144,7 @@ def remove_file(path: str, recursive=False): raise Exception(f"Unable to remove {path}: cmd returned {rc}") -def makedir(path, user: Optional[str] = None, group: Optional[str] = None, parents: bool = True): +def makedir(path, user: Optional[Union[str, int]] = None, group: Optional[Union[str, int]] = None, parents: bool = True): if not root_check_exists(path): try: if parents: From cc2e24285fb2ff9fe3b6208886fea964fdbd3a0d Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Mon, 29 Aug 2022 23:30:00 +0200 Subject: [PATCH 35/44] image.py: use Device instead of the device name from config --- boot.py | 8 +++++--- flash.py | 6 ++++-- image.py | 38 +++++++++++++++++++++----------------- 3 files changed, 30 insertions(+), 22 deletions(-) diff --git a/boot.py b/boot.py index 0793b8b..593f361 100644 --- a/boot.py +++ b/boot.py @@ -6,7 +6,8 @@ from config import config from constants import BOOT_STRATEGIES, FLASH_PARTS, FASTBOOT, JUMPDRIVE, JUMPDRIVE_VERSION from exec.file import makedir from fastboot import fastboot_boot, fastboot_erase_dtbo -from image import get_device_and_flavour, losetup_rootfs_image, get_image_path, dump_aboot, dump_lk2nd +from image import get_flavour, get_device_name, losetup_rootfs_image, get_image_path, dump_aboot, dump_lk2nd +from packages.device import get_profile_device from wrapper import enforce_wrap LK2ND = FLASH_PARTS['LK2ND'] @@ -20,7 +21,8 @@ TYPES = [LK2ND, JUMPDRIVE, ABOOT] def cmd_boot(type): """Boot JumpDrive or the Kupfer aboot image. Erases Android DTBO in the process.""" enforce_wrap() - device, flavour = get_device_and_flavour() + device = get_profile_device() + flavour = get_flavour() # TODO: parse arch and sector size sector_size = 4096 image_path = get_image_path(device, flavour) @@ -28,7 +30,7 @@ def cmd_boot(type): if strategy == FASTBOOT: if type == JUMPDRIVE: - file = f'boot-{device}.img' + file = f'boot-{get_device_name(device)}.img' path = os.path.join(config.get_path('jumpdrive'), file) makedir(os.path.dirname(path)) if not os.path.exists(path): diff --git a/flash.py b/flash.py index 72a9ea2..0afd3fd 100644 --- a/flash.py +++ b/flash.py @@ -6,7 +6,8 @@ from constants import FLASH_PARTS, LOCATIONS from exec.cmd import run_root_cmd from exec.file import get_temp_dir from fastboot import fastboot_flash -from image import dd_image, partprobe, shrink_fs, losetup_rootfs_image, losetup_destroy, dump_aboot, dump_lk2nd, dump_qhypstub, get_device_and_flavour, get_image_name, get_image_path +from image import dd_image, partprobe, shrink_fs, losetup_rootfs_image, losetup_destroy, dump_aboot, dump_lk2nd, dump_qhypstub, get_flavour, get_image_name, get_image_path +from packages.device import get_profile_device from wrapper import enforce_wrap ABOOT = FLASH_PARTS['ABOOT'] @@ -21,7 +22,8 @@ ROOTFS = FLASH_PARTS['ROOTFS'] def cmd_flash(what: str, location: str): """Flash a partition onto a device. `location` takes either a path to a block device or one of emmc, sdcard""" enforce_wrap() - device, flavour = get_device_and_flavour() + device = get_profile_device() + flavour = get_flavour() device_image_name = get_image_name(device, flavour) device_image_path = get_image_path(device, flavour) diff --git a/image.py b/image.py index adb44dc..3e8677a 100644 --- a/image.py +++ b/image.py @@ -7,16 +7,16 @@ import click import logging from signal import pause from subprocess import CompletedProcess -from typing import Optional +from typing import Optional, Union from chroot.device import DeviceChroot, get_device_chroot -from constants import Arch, BASE_PACKAGES, DEVICES, FLAVOURS +from constants import Arch, BASE_PACKAGES, FLAVOURS from config import config, Profile from distro.distro import get_base_distro, get_kupfer_https from exec.cmd import run_root_cmd, generate_cmd_su from exec.file import root_write_file, root_makedir, makedir from packages import build_enable_qemu_binfmt, build_packages_by_paths -from packages.device import get_profile_device +from packages.device import Device, get_profile_device from ssh import copy_ssh_keys from wrapper import check_programs_wrap, wrap_if_foreign_arch @@ -131,23 +131,25 @@ def losetup_destroy(loop_device): ) -def get_device_and_flavour(profile_name: Optional[str] = None) -> tuple[str, str]: +def get_flavour(profile_name: Optional[str] = None) -> str: config.enforce_config_loaded() profile = config.get_profile(profile_name) - if not profile['device']: - raise Exception("Please set the device using 'kupferbootstrap config init ...'") if not profile['flavour']: raise Exception("Please set the flavour using 'kupferbootstrap config init ...'") - return (profile['device'], profile['flavour']) + return profile['flavour'] -def get_image_name(device, flavour, img_type='full') -> str: - return f'{device}-{flavour}-{img_type}.img' +def get_device_name(device: Union[str, Device]) -> str: + return device.name if isinstance(device, Device) else device -def get_image_path(device, flavour, img_type='full') -> str: +def get_image_name(device: Union[str, Device], flavour, img_type='full') -> str: + return f'{get_device_name(device)}-{flavour}-{img_type}.img' + + +def get_image_path(device: Union[str, Device], flavour, img_type='full') -> str: return os.path.join(config.get_path('images'), get_image_name(device, flavour, img_type)) @@ -299,7 +301,7 @@ def create_boot_fs(device: str, blocksize: int): def install_rootfs( rootfs_device: str, bootfs_device: str, - device: str, + device: Union[str, Device], flavour: str, arch: Arch, packages: list[str], @@ -308,7 +310,7 @@ def install_rootfs( ): user = profile['username'] or 'kupfer' post_cmds = FLAVOURS[flavour].get('post_cmds', []) - chroot = get_device_chroot(device=device, flavour=flavour, arch=arch, packages=packages, use_local_repos=use_local_repos) + chroot = get_device_chroot(device=get_device_name(device), flavour=flavour, arch=arch, packages=packages, use_local_repos=use_local_repos) mount_chroot(rootfs_device, bootfs_device, chroot) @@ -388,16 +390,17 @@ def cmd_build(profile_name: str = None, Unless overriden, required packages will be built or preferably downloaded from HTTPS repos. """ - arch = get_profile_device(profile_name).arch + device = get_profile_device(profile_name) + arch = device.arch check_programs_wrap(['makepkg', 'pacman', 'pacstrap']) profile: Profile = config.get_profile(profile_name) - device, flavour = get_device_and_flavour(profile_name) + flavour = get_flavour(profile_name) size_extra_mb: int = int(profile["size_extra_mb"]) sector_size = 4096 rootfs_size_mb = FLAVOURS[flavour].get('size', 2) * 1000 - packages = BASE_PACKAGES + DEVICES[device] + FLAVOURS[flavour]['packages'] + profile['pkgs_include'] + packages = BASE_PACKAGES + [device.package.name] + FLAVOURS[flavour]['packages'] + profile['pkgs_include'] if arch != config.runtime.arch: build_enable_qemu_binfmt(arch) @@ -459,9 +462,10 @@ def cmd_build(profile_name: str = None, @click.argument('profile', required=False) def cmd_inspect(profile: str = None, shell: bool = False): """Open a shell in a device image""" - arch = get_profile_device(profile).arch + device = get_profile_device(profile) + arch = device.arch wrap_if_foreign_arch(arch) - device, flavour = get_device_and_flavour(profile) + flavour = get_flavour(profile) # TODO: PARSE DEVICE SECTOR SIZE sector_size = 4096 chroot = get_device_chroot(device, flavour, arch) From d89ad54fc5a60db8a4f70521cd2cf8790fa3c63c Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Mon, 29 Aug 2022 23:30:22 +0200 Subject: [PATCH 36/44] constants.py: remove DEVICES array, now comes from pkgbuilds.git --- constants.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/constants.py b/constants.py index 7495207..e4c652d 100644 --- a/constants.py +++ b/constants.py @@ -23,14 +23,6 @@ BOOT_STRATEGIES: dict[str, str] = { 'bq-paella': FASTBOOT, } -DEVICES: dict[str, list[str]] = { - 'oneplus-enchilada': ['device-sdm845-oneplus-enchilada'], - 'oneplus-fajita': ['device-sdm845-oneplus-fajita'], - 'xiaomi-beryllium-ebbg': ['device-sdm845-xiaomi-beryllium-ebbg'], - 'xiaomi-beryllium-tianma': ['device-sdm845-xiaomi-beryllium-tianma'], - 'bq-paella': ['device-msm8916-bq-paella'], -} - BASE_PACKAGES: list[str] = [ 'base', 'base-kupfer', From e00160f6df504a71057b6e69aff9e8df047f5055 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Tue, 30 Aug 2022 00:20:04 +0200 Subject: [PATCH 37/44] packages: move filter_packages() to pkgbuild, rename to filter_pkgbuilds() --- packages/__init__.py | 44 +++----------------------------------------- packages/pkgbuild.py | 40 +++++++++++++++++++++++++++++++++++++++- 2 files changed, 42 insertions(+), 42 deletions(-) diff --git a/packages/__init__.py b/packages/__init__.py index 1203493..ad46088 100644 --- a/packages/__init__.py +++ b/packages/__init__.py @@ -22,7 +22,7 @@ from ssh import run_ssh_command, scp_put_files from wrapper import enforce_wrap, check_programs_wrap, wrap_if_foreign_arch from utils import git -from .pkgbuild import discover_pkgbuilds, init_pkgbuilds, Pkgbuild +from .pkgbuild import discover_pkgbuilds, filter_pkgbuilds, init_pkgbuilds, Pkgbuild from .device import get_profile_device pacman_cmd = [ @@ -77,44 +77,6 @@ def init_prebuilts(arch: Arch, dir: str = None): raise Exception(f'Failed to create local repo {repo}') -def filter_packages( - paths: Iterable[str], - repo: Optional[dict[str, Pkgbuild]] = None, - arch: Optional[Arch] = None, - allow_empty_results=True, - use_paths=True, - use_names=True, -) -> Iterable[Pkgbuild]: - if not (use_names or use_paths): - raise Exception('Error: filter_packages instructed to match neither by names nor paths; impossible!') - if not allow_empty_results and not paths: - raise Exception("Can't search for packages: no query given") - repo = repo or discover_pkgbuilds() - if 'all' in paths: - all_pkgs = list(repo.values()) - if arch: - all_pkgs = [pkg for pkg in all_pkgs if set([arch, 'any']).intersection(pkg.arches)] - return all_pkgs - result = [] - for pkg in repo.values(): - comparison = set() - if use_paths: - comparison.add(pkg.path) - if use_names: - comparison.add(pkg.name) - matches = list(comparison.intersection(paths)) - if matches: - assert pkg.arches - if arch and not set([arch, 'any']).intersection(pkg.arches): - logging.warn(f"Pkg {pkg.name} matches query {matches[0]} but isn't available for architecture {arch}: {pkg.arches}") - continue - result += [pkg] - - if not allow_empty_results and not result: - raise Exception('No packages matched by paths: ' + ', '.join([f'"{p}"' for p in paths])) - return result - - def generate_dependency_chain(package_repo: dict[str, Pkgbuild], to_build: Iterable[Pkgbuild]) -> list[set[Pkgbuild]]: """ This figures out all dependencies and their sub-dependencies for the selection and adds those packages to the selection. @@ -649,7 +611,7 @@ def build_packages_by_paths( assert config.runtime.arch for _arch in set([arch, config.runtime.arch]): init_prebuilts(_arch) - packages = filter_packages(paths, arch=arch, repo=repo, allow_empty_results=False) + packages = filter_pkgbuilds(paths, arch=arch, repo=repo, allow_empty_results=False) return build_packages( packages, arch, @@ -858,7 +820,7 @@ def cmd_check(paths): return False paths = list(paths) - packages = filter_packages(paths, allow_empty_results=False) + packages = filter_pkgbuilds(paths, allow_empty_results=False) for package in packages: name = package.name diff --git a/packages/pkgbuild.py b/packages/pkgbuild.py index f0e7af5..4c02c54 100644 --- a/packages/pkgbuild.py +++ b/packages/pkgbuild.py @@ -7,7 +7,7 @@ import os import subprocess from joblib import Parallel, delayed -from typing import Optional +from typing import Iterable, Optional from config import config, ConfigStateHolder from constants import REPOSITORIES @@ -358,3 +358,41 @@ def discover_pkgbuilds(parallel: bool = True, lazy: bool = True) -> dict[str, Pk _pkgbuilds_cache.update(packages) _pkgbuilds_scanned = True return packages + + +def filter_pkgbuilds( + paths: Iterable[str], + repo: Optional[dict[str, Pkgbuild]] = None, + arch: Optional[Arch] = None, + allow_empty_results=True, + use_paths=True, + use_names=True, +) -> Iterable[Pkgbuild]: + if not (use_names or use_paths): + raise Exception('Error: filter_packages instructed to match neither by names nor paths; impossible!') + if not allow_empty_results and not paths: + raise Exception("Can't search for packages: no query given") + repo = repo or discover_pkgbuilds() + if 'all' in paths: + all_pkgs = list(repo.values()) + if arch: + all_pkgs = [pkg for pkg in all_pkgs if set([arch, 'any']).intersection(pkg.arches)] + return all_pkgs + result = [] + for pkg in repo.values(): + comparison = set() + if use_paths: + comparison.add(pkg.path) + if use_names: + comparison.add(pkg.name) + matches = list(comparison.intersection(paths)) + if matches: + assert pkg.arches + if arch and not set([arch, 'any']).intersection(pkg.arches): + logging.warn(f"Pkg {pkg.name} matches query {matches[0]} but isn't available for architecture {arch}: {pkg.arches}") + continue + result += [pkg] + + if not allow_empty_results and not result: + raise Exception('No packages matched by paths: ' + ', '.join([f'"{p}"' for p in paths])) + return result From b86b7c94f030096cdc7960065b5243ff7e6c4642 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Tue, 30 Aug 2022 01:29:12 +0200 Subject: [PATCH 38/44] config: DataClass.transform(): add allow_extra=False parameter --- config/scheme.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/config/scheme.py b/config/scheme.py index 37569d9..bb5ff5b 100644 --- a/config/scheme.py +++ b/config/scheme.py @@ -30,7 +30,7 @@ class DataClass(Munch): self.update(d | kwargs, validate=validate) @classmethod - def transform(cls, values: Mapping[str, Any], validate: bool = True) -> Any: + def transform(cls, values: Mapping[str, Any], validate: bool = True, allow_extra: bool = False) -> Any: results = {} values = dict(values) for key in list(values.keys()): @@ -48,7 +48,7 @@ class DataClass(Munch): if validate: if not isinstance(value, _classes): raise Exception(f'key "{key}" has value of wrong type {_classes}: {value}') - elif validate: + elif validate and not allow_extra: raise Exception(f'Unknown key "{key}"') else: if isinstance(value, dict) and not isinstance(value, Munch): @@ -150,12 +150,14 @@ class ProfilesSection(DataClass): default: SparseProfile @classmethod - def transform(cls, values: Mapping[str, Any], validate: bool = True): + def transform(cls, values: Mapping[str, Any], validate: bool = True, allow_extra: bool = True): results = {} for k, v in values.items(): if k == 'current': results[k] = v continue + if not allow_extra and k != 'default': + raise Exception(f'Unknown key {k} in profiles section (Hint: extra_keys not allowed for some reason)') if not isinstance(v, dict): raise Exception(f'profile {v} is not a dict!') results[k] = SparseProfile.fromDict(v, validate=True) @@ -178,7 +180,13 @@ class Config(DataClass): profiles: ProfilesSection @classmethod - def fromDict(cls, values: Mapping[str, Any], validate: bool = True, allow_incomplete: bool = False): + def fromDict( + cls, + values: Mapping[str, Any], + validate: bool = True, + allow_extra: bool = False, + allow_incomplete: bool = False, + ): values = dict(values) # copy for later modification _vals = {} for name, _class in cls._type_hints.items(): From 483375397593871c47c72a36bef7ca0e18fb2aa4 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Tue, 30 Aug 2022 01:37:09 +0200 Subject: [PATCH 39/44] config/scheme: move DataClass to dataclass.py --- config/scheme.py | 73 ++---------------------------------------------- dataclass.py | 72 +++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 74 insertions(+), 71 deletions(-) create mode 100644 dataclass.py diff --git a/config/scheme.py b/config/scheme.py index bb5ff5b..0223208 100644 --- a/config/scheme.py +++ b/config/scheme.py @@ -1,81 +1,12 @@ from __future__ import annotations -from dataclasses import dataclass -from typing import Optional, Union, Mapping, Any, get_type_hints, get_origin, get_args, Iterable from munch import Munch +from typing import Any, Optional, Mapping, Union +from dataclass import DataClass, munchclass from constants import Arch -def munchclass(*args, init=False, **kwargs): - return dataclass(*args, init=init, slots=True, **kwargs) - - -def resolve_type_hint(hint: type): - origin = get_origin(hint) - args: Iterable[type] = get_args(hint) - if origin is Optional: - args = set(list(args) + [type(None)]) - if origin in [Union, Optional]: - results = [] - for arg in args: - results += resolve_type_hint(arg) - return results - return [origin or hint] - - -class DataClass(Munch): - - def __init__(self, d: dict = {}, validate: bool = True, **kwargs): - self.update(d | kwargs, validate=validate) - - @classmethod - def transform(cls, values: Mapping[str, Any], validate: bool = True, allow_extra: bool = False) -> Any: - results = {} - values = dict(values) - for key in list(values.keys()): - value = values.pop(key) - type_hints = cls._type_hints - if key in type_hints: - _classes = tuple(resolve_type_hint(type_hints[key])) - if issubclass(_classes[0], dict): - assert isinstance(value, dict) - target_class = _classes[0] - if not issubclass(_classes[0], Munch): - target_class = DataClass - if not isinstance(value, target_class): - value = target_class.fromDict(value, validate=validate) - if validate: - if not isinstance(value, _classes): - raise Exception(f'key "{key}" has value of wrong type {_classes}: {value}') - elif validate and not allow_extra: - raise Exception(f'Unknown key "{key}"') - else: - if isinstance(value, dict) and not isinstance(value, Munch): - value = Munch.fromDict(value) - results[key] = value - if values: - if validate: - raise Exception(f'values contained unknown keys: {list(values.keys())}') - results |= values - - return results - - @classmethod - def fromDict(cls, values: Mapping[str, Any], validate: bool = True): - return cls(**cls.transform(values, validate)) - - def update(self, d: Mapping[str, Any], validate: bool = True): - Munch.update(self, type(self).transform(d, validate)) - - def __init_subclass__(cls): - super().__init_subclass__() - cls._type_hints = get_type_hints(cls) - - def __repr__(self): - return f'{type(self)}{dict.__repr__(self.toDict())}' - - @munchclass() class SparseProfile(DataClass): parent: Optional[str] diff --git a/dataclass.py b/dataclass.py new file mode 100644 index 0000000..8a19d45 --- /dev/null +++ b/dataclass.py @@ -0,0 +1,72 @@ +from dataclasses import dataclass +from munch import Munch +from typing import Optional, Union, Mapping, Any, get_type_hints, get_origin, get_args, Iterable + + +def munchclass(*args, init=False, **kwargs): + return dataclass(*args, init=init, slots=True, **kwargs) + + +def resolve_type_hint(hint: type): + origin = get_origin(hint) + args: Iterable[type] = get_args(hint) + if origin is Optional: + args = set(list(args) + [type(None)]) + if origin in [Union, Optional]: + results = [] + for arg in args: + results += resolve_type_hint(arg) + return results + return [origin or hint] + + +class DataClass(Munch): + + def __init__(self, d: dict = {}, validate: bool = True, **kwargs): + self.update(d | kwargs, validate=validate) + + @classmethod + def transform(cls, values: Mapping[str, Any], validate: bool = True, allow_extra: bool = False) -> Any: + results = {} + values = dict(values) + for key in list(values.keys()): + value = values.pop(key) + type_hints = cls._type_hints + if key in type_hints: + _classes = tuple(resolve_type_hint(type_hints[key])) + if issubclass(_classes[0], dict): + assert isinstance(value, dict) + target_class = _classes[0] + if not issubclass(_classes[0], Munch): + target_class = DataClass + if not isinstance(value, target_class): + value = target_class.fromDict(value, validate=validate) + if validate: + if not isinstance(value, _classes): + raise Exception(f'key "{key}" has value of wrong type {_classes}: {value}') + elif validate and not allow_extra: + raise Exception(f'Unknown key "{key}"') + else: + if isinstance(value, dict) and not isinstance(value, Munch): + value = Munch.fromDict(value) + results[key] = value + if values: + if validate: + raise Exception(f'values contained unknown keys: {list(values.keys())}') + results |= values + + return results + + @classmethod + def fromDict(cls, values: Mapping[str, Any], validate: bool = True): + return cls(**cls.transform(values, validate)) + + def update(self, d: Mapping[str, Any], validate: bool = True): + Munch.update(self, type(self).transform(d, validate)) + + def __init_subclass__(cls): + super().__init_subclass__() + cls._type_hints = get_type_hints(cls) + + def __repr__(self): + return f'{type(self)}{dict.__repr__(self.toDict())}' From 156612bf73f3f973d327393019ed7e613b9abf06 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Tue, 30 Aug 2022 03:12:39 +0200 Subject: [PATCH 40/44] dataclass.resolve_type_hint(): add conversion from str to [int,float] if str not in types --- dataclass.py | 32 ++++++++++++++++++++++++++------ 1 file changed, 26 insertions(+), 6 deletions(-) diff --git a/dataclass.py b/dataclass.py index 8a19d45..dbe0c18 100644 --- a/dataclass.py +++ b/dataclass.py @@ -7,13 +7,13 @@ def munchclass(*args, init=False, **kwargs): return dataclass(*args, init=init, slots=True, **kwargs) -def resolve_type_hint(hint: type): +def resolve_type_hint(hint: type) -> Iterable[type]: origin = get_origin(hint) args: Iterable[type] = get_args(hint) if origin is Optional: args = set(list(args) + [type(None)]) if origin in [Union, Optional]: - results = [] + results: list[type] = [] for arg in args: results += resolve_type_hint(arg) return results @@ -33,14 +33,34 @@ class DataClass(Munch): value = values.pop(key) type_hints = cls._type_hints if key in type_hints: - _classes = tuple(resolve_type_hint(type_hints[key])) + _classes = tuple[type](resolve_type_hint(type_hints[key])) if issubclass(_classes[0], dict): assert isinstance(value, dict) target_class = _classes[0] - if not issubclass(_classes[0], Munch): - target_class = DataClass + if target_class is dict: + target_class = Munch if not isinstance(value, target_class): - value = target_class.fromDict(value, validate=validate) + assert issubclass(target_class, Munch) + # despite the above assert, mypy doesn't seem to understand target_class is a Munch here + value = target_class.fromDict(value, validate=validate) # type:ignore[attr-defined] + # handle numerics + elif set(_classes).intersection([int, float]) and isinstance(value, str) and str not in _classes: + parsed_number = None + parsers: list[tuple[type, list]] = [(int, [10]), (int, [0]), (float, [])] + for _cls, args in parsers: + if _cls not in _classes: + continue + try: + parsed_number = _cls(value, *args) + break + except ValueError: + continue + if parsed_number is None: + if validate: + raise Exception(f"Couldn't parse string value {repr(value)} for key '{key}' into number formats: " + + (', '.join(list(c.__name__ for c in _classes)))) + else: + value = parsed_number if validate: if not isinstance(value, _classes): raise Exception(f'key "{key}" has value of wrong type {_classes}: {value}') From ccec875a0cd17760977b44dcbaf4020bd840c5c0 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Tue, 30 Aug 2022 15:51:55 +0200 Subject: [PATCH 41/44] wrapper/docker: fix indentation (only version needs to be pulled from `with open():`) --- wrapper/docker.py | 122 +++++++++++++++++++++++----------------------- 1 file changed, 61 insertions(+), 61 deletions(-) diff --git a/wrapper/docker.py b/wrapper/docker.py index 1f37df2..96c5576 100644 --- a/wrapper/docker.py +++ b/wrapper/docker.py @@ -24,71 +24,71 @@ class DockerWrapper(BaseWrapper): script_path = config.runtime.script_source_dir with open(os.path.join(script_path, 'version.txt')) as version_file: version = version_file.read().replace('\n', '') - tag = f'registry.gitlab.com/kupfer/kupferbootstrap:{version}' - if version == 'dev': - logging.info(f'Building docker image "{tag}"') - cmd = [ - 'docker', - 'build', - '.', - '-t', - tag, - ] + (['-q'] if not config.runtime.verbose else []) - logging.debug('Running docker cmd: ' + ' '.join(cmd)) - result = subprocess.run(cmd, cwd=script_path, capture_output=True) - if result.returncode != 0: - logging.fatal('Failed to build docker image:\n' + result.stderr.decode()) - exit(1) - else: - # Check if the image for the version already exists - result = subprocess.run( - [ - 'docker', - 'images', - '-q', - tag, - ], - capture_output=True, - ) - if result.stdout == b'': - logging.info(f'Pulling kupferbootstrap docker image version \'{version}\'') - subprocess.run([ - 'docker', - 'pull', - tag, - ]) - container_name = f'kupferbootstrap-{self.uuid}' - - wrapped_config = self.generate_wrapper_config() - - target_user = 'root' if config.runtime.uid == 0 else 'kupfer' - target_home = '/root' if target_user == 'root' else f'/home/{target_user}' - - ssh_dir = os.path.join(pathlib.Path.home(), '.ssh') - if not os.path.exists(ssh_dir): - os.makedirs(ssh_dir, mode=0o700) - volumes = self.get_bind_mounts_default(wrapped_config, ssh_dir=ssh_dir, target_home=target_home) - volumes |= dict({config.get_path(vol_name): vol_dest for vol_name, vol_dest in DOCKER_PATHS.items()}) - docker_cmd = [ + tag = f'registry.gitlab.com/kupfer/kupferbootstrap:{version}' + if version == 'dev': + logging.info(f'Building docker image "{tag}"') + cmd = [ 'docker', - 'run', - '--name', - container_name, - '--rm', - '--interactive', - '--tty', - '--privileged', - ] + docker_volumes_args(volumes) + [tag] + 'build', + '.', + '-t', + tag, + ] + (['-q'] if not config.runtime.verbose else []) + logging.debug('Running docker cmd: ' + ' '.join(cmd)) + result = subprocess.run(cmd, cwd=script_path, capture_output=True) + if result.returncode != 0: + logging.fatal('Failed to build docker image:\n' + result.stderr.decode()) + exit(1) + else: + # Check if the image for the version already exists + result = subprocess.run( + [ + 'docker', + 'images', + '-q', + tag, + ], + capture_output=True, + ) + if result.stdout == b'': + logging.info(f'Pulling kupferbootstrap docker image version \'{version}\'') + subprocess.run([ + 'docker', + 'pull', + tag, + ]) + container_name = f'kupferbootstrap-{self.uuid}' - kupfer_cmd = ['kupferbootstrap', '--config', volumes[wrapped_config]] + self.filter_args_wrapper(sys.argv[1:]) - if config.runtime.uid: - kupfer_cmd = ['wrapper_su_helper', '--uid', str(config.runtime.uid), '--username', 'kupfer', '--'] + kupfer_cmd + wrapped_config = self.generate_wrapper_config() - cmd = docker_cmd + kupfer_cmd - logging.debug('Wrapping in docker:' + repr(cmd)) - result = subprocess.run(cmd) + target_user = 'root' if config.runtime.uid == 0 else 'kupfer' + target_home = '/root' if target_user == 'root' else f'/home/{target_user}' - exit(result.returncode) + ssh_dir = os.path.join(pathlib.Path.home(), '.ssh') + if not os.path.exists(ssh_dir): + os.makedirs(ssh_dir, mode=0o700) + volumes = self.get_bind_mounts_default(wrapped_config, ssh_dir=ssh_dir, target_home=target_home) + volumes |= dict({config.get_path(vol_name): vol_dest for vol_name, vol_dest in DOCKER_PATHS.items()}) + docker_cmd = [ + 'docker', + 'run', + '--name', + container_name, + '--rm', + '--interactive', + '--tty', + '--privileged', + ] + docker_volumes_args(volumes) + [tag] + + kupfer_cmd = ['kupferbootstrap', '--config', volumes[wrapped_config]] + self.filter_args_wrapper(sys.argv[1:]) + if config.runtime.uid: + kupfer_cmd = ['wrapper_su_helper', '--uid', str(config.runtime.uid), '--username', 'kupfer', '--'] + kupfer_cmd + + cmd = docker_cmd + kupfer_cmd + logging.debug('Wrapping in docker:' + repr(cmd)) + result = subprocess.run(cmd) + + exit(result.returncode) def stop(self): subprocess.run( From 4c3e264de38822ea26953c510f189bab75a487e8 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Tue, 30 Aug 2022 16:01:05 +0200 Subject: [PATCH 42/44] wrapper/docker: create volume dirs ourselfes for better permissions and podman compat --- wrapper/docker.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/wrapper/docker.py b/wrapper/docker.py index 96c5576..02f7272 100644 --- a/wrapper/docker.py +++ b/wrapper/docker.py @@ -5,6 +5,8 @@ import subprocess import sys from config import config +from exec.file import makedir + from .wrapper import BaseWrapper, WRAPPER_PATHS DOCKER_PATHS = WRAPPER_PATHS.copy() @@ -68,7 +70,10 @@ class DockerWrapper(BaseWrapper): if not os.path.exists(ssh_dir): os.makedirs(ssh_dir, mode=0o700) volumes = self.get_bind_mounts_default(wrapped_config, ssh_dir=ssh_dir, target_home=target_home) - volumes |= dict({config.get_path(vol_name): vol_dest for vol_name, vol_dest in DOCKER_PATHS.items()}) + for vol_name, vol_dest in DOCKER_PATHS.items(): + vol_src = config.get_path(vol_name) + makedir(vol_src) + volumes[vol_src] = vol_dest docker_cmd = [ 'docker', 'run', From f9cf76e9371a740706dd7db1df9371500e08b441 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sat, 3 Sep 2022 03:17:14 +0200 Subject: [PATCH 43/44] packages: check_package_built(): makedir() the other arch's repo dir before trying to copy our any-arch package there --- packages/__init__.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/__init__.py b/packages/__init__.py index ad46088..f921d23 100644 --- a/packages/__init__.py +++ b/packages/__init__.py @@ -339,9 +339,11 @@ def check_package_version_built(package: Pkgbuild, arch: Arch, try_download: boo for repo_arch in ARCHES: if repo_arch == arch: continue # we already have that - copy_target = os.path.join(config.get_package_dir(repo_arch), package.repo, filename) + repo_dir = os.path.join(config.get_package_dir(repo_arch), package.repo) + copy_target = os.path.join(repo_dir, filename) if not os.path.exists(copy_target): logging.info(f"copying to {copy_target}") + makedir(repo_dir) shutil.copyfile(target_repo_file, copy_target) add_file_to_repo(copy_target, package.repo, repo_arch) if not missing: From 0e103f5a40f4a3cddc79ae2f3a64fe715b1f8aed Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sat, 3 Sep 2022 03:54:30 +0200 Subject: [PATCH 44/44] add_package_to_repo: create foreign arch repo dir before copying foreign-arch packages --- packages/__init__.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/__init__.py b/packages/__init__.py index f921d23..e06e912 100644 --- a/packages/__init__.py +++ b/packages/__init__.py @@ -259,7 +259,9 @@ def add_package_to_repo(package: Pkgbuild, arch: Arch): for repo_arch in ARCHES: if repo_arch == arch: continue - copy_target = os.path.join(config.get_package_dir(repo_arch), package.repo, file) + repo_dir = os.path.join(config.get_package_dir(repo_arch), package.repo) + makedir(repo_dir) + copy_target = os.path.join(repo_dir, file) shutil.copy(repo_file, copy_target) add_file_to_repo(copy_target, package.repo, repo_arch)