From 572142bf0b3d422ed64ba75b8885de27870a3be6 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sun, 16 Apr 2023 03:28:33 +0200 Subject: [PATCH 01/82] dataclass: replace print spam with decent logging --- dataclass.py | 65 +++++++++++++++++++++++++--------------------------- 1 file changed, 31 insertions(+), 34 deletions(-) diff --git a/dataclass.py b/dataclass.py index bed0473..8ad9035 100644 --- a/dataclass.py +++ b/dataclass.py @@ -1,5 +1,6 @@ from __future__ import annotations +import logging import toml from dataclasses import dataclass @@ -41,7 +42,6 @@ def resolve_dict_hints(hints: Any) -> Generator[tuple[Any, ...], None, None]: t_origin = get_origin(hint) t_args = get_args(hint) if t_origin == dict: - print(f"Yielding {t_args=}") yield t_args continue if t_origin in [NoneType, Optional, Union, UnionType] and t_args: @@ -66,15 +66,17 @@ class DataClass(Munch): allow_extra: bool = False, type_hints: Optional[dict[str, Any]] = None, ) -> Any: - results = {} + results: dict[str, Any] = {} values = dict(values) - print(f"\ntransform function:\n{values}, {type_hints=}") for key in list(values.keys()): value = values.pop(key) type_hints = cls._type_hints if type_hints is None else type_hints if key in type_hints: _classes = tuple[type](resolve_type_hint(type_hints[key])) optional = NoneType in _classes + if optional and value is None: + results[key] = None + continue if issubclass(_classes[0], dict): assert isinstance(value, dict) or optional target_class = _classes[0] @@ -85,34 +87,37 @@ class DataClass(Munch): break if target_class is dict: dict_hints = list(resolve_dict_hints(type_hints[key])) - print(f"Got {key=} {dict_hints=}") if len(dict_hints) != 1: - print(f"Received wrong amount of type hints for key {key}: {len(dict_hints)}") + msg = f"transform(): Received wrong amount of type hints for key {key}: {len(dict_hints)}" + if validate: + raise Exception(msg) + logging.warning(msg) if len(dict_hints) == 1 and value is not None: if len(dict_hints[0]) != 2 or not all(dict_hints[0]): - print(f"Weird dict hints received: {dict_hints}") + logging.debug(f"Weird dict hints received: {dict_hints}") continue key_type, value_type = dict_hints[0] if not isinstance(value, Mapping): + msg = f"Got non-mapping {value!r} for expected dict type: {key_type} => {value_type}. Allowed classes: {_classes}" if validate: - raise Exception( - f"Got non-mapping {value!r} for expected dict type: {key_type} => {value_type}. Allowed classes: {_classes}") - print(f"Got non-mapping {value!r} for expected dict type: {key_type} => {value_type}. Allowed classes: {_classes}") + raise Exception(msg) + logging.warning(msg) results[key] = value continue if isinstance(key_type, type): if issubclass(key_type, str): target_class = Munch else: - print(f"{key=} DICT WRONG KEY TYPE: {key_type}") + msg = f"{key=} subdict got wrong key type hint (expected str): {key_type}" + if validate: + raise Exception(msg) + logging.warning(msg) if validate: for k in value: if not isinstance(k, tuple(flatten_hints(key_type))): raise Exception(f'Subdict "{key}": wrong type for subkey "{k}": got: {type(k)}, expected: {key_type}') dict_content_hints = {k: value_type for k in value} - print(f"tranforming: {value=} {dict_content_hints=}") value = cls.transform(value, validate=validate, allow_extra=allow_extra, type_hints=dict_content_hints) - print(f"tranformed: {value=}") if not isinstance(value, target_class): if not (optional and value is None): assert issubclass(target_class, Munch) @@ -120,7 +125,8 @@ class DataClass(Munch): kwargs = {'validate': validate} if issubclass(target_class, DataClass) else {} value = target_class(value, **kwargs) # type:ignore[attr-defined] else: - print(f"nothing to do: '{key}' was already {target_class}") + # print(f"nothing to do: '{key}' was already {target_class}) + pass # handle numerics elif set(_classes).intersection([int, float]) and isinstance(value, str) and str not in _classes: parsed_number = None @@ -145,7 +151,6 @@ class DataClass(Munch): f'{" ,".join([ c.__name__ for c in _classes])}; ' f'got: {type(value).__name__}; value: {value}') elif validate and not allow_extra: - import logging logging.debug(f"{cls}: unknown key '{key}': {value}") raise Exception(f'{cls}: Unknown key "{key}"') else: @@ -183,6 +188,7 @@ class DataClass(Munch): sparse: Optional[bool] = None, recursive: bool = True, hints: Optional[dict[str, Any]] = None, + validate: bool = True, ) -> dict[Any, Any]: # preserve original None-type args _sparse = cls._sparse if sparse is None else sparse @@ -190,64 +196,55 @@ class DataClass(Munch): hints = cls._type_hints if hints is None else hints result = dict(d) if not (_strip_hidden or _sparse or result): - print(f"shortcircuiting {d=}") return result - print(f"Stripping {result} with hints: {hints}") for k, v in d.items(): type_hint = resolve_type_hint(hints.get(k, "abc")) - print(f"Working on key {k}, type hints: {type_hint}") if not isinstance(k, str): - print(f"skipping unknown key type {k=}") + msg = f"strip_dict(): unknown key type {k=}: {type(k)=}" + if validate: + raise Exception(msg) + logging.warning(f"{msg} (skipping)") continue - if strip_hidden and k.startswith('_'): + if _strip_hidden and k.startswith('_'): result.pop(k) continue if v is None: if NoneType not in type_hint: msg = f'encountered illegal null value at key "{k}" for typehint {type_hint}' - if True: + if validate: raise Exception(msg) - print(msg) + logging.warning(msg) if _sparse: - print(f"popping empty {k}") result.pop(k) continue - print(f"encountered legal null value at {k}: {_sparse=}") if recursive and isinstance(v, dict): if not v: result[k] = {} continue if isinstance(v, DataClass): - print(f"Dataclass detected in {k=}") - result[k] = v.toDict(strip_hidden=strip_hidden, sparse=sparse) # pass None in sparse and strip_hidden + # pass None in sparse and strip_hidden + result[k] = v.toDict(strip_hidden=strip_hidden, sparse=sparse) continue if isinstance(v, Munch): - print(f"Converting munch {k=}") result[k] = v.toDict() if k not in hints: - print(f"skipping unknown {k=}") continue - print(f"STRIPPING RECURSIVELY: {k}: {v}, parent hints: {hints[k]}") _subhints = {} _hints = resolve_type_hint(hints[k], [dict]) hints_flat = list(flatten_hints(_hints)) - print(f"going over hints for {k}: {_hints=} {hints_flat=}") subclass = DataClass for hint in hints_flat: - print(f"working on hint: {hint}") if get_origin(hint) == dict: _valtype = get_args(hint)[1] _subhints = {n: _valtype for n in v.keys()} - print(f"generated {_subhints=} from {_valtype=}") break if isinstance(hint, type) and issubclass(hint, DataClass): subclass = hint _subhints = hint._type_hints - print(f"found subhints: {_subhints}") break else: - print(f"ignoring {hint=}") - print(f"STRIPPING SUBDICT {k=} WITH {_subhints=}") + # print(f"ignoring {hint=}") + continue result[k] = subclass.strip_dict( v, hints=_subhints, From 13aa258794c330c5181878ff2880b5acbf2c9248 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sun, 16 Apr 2023 03:31:35 +0200 Subject: [PATCH 02/82] distro: use repo_config properly --- distro/distro.py | 21 +++++++++++++++----- distro/repo_config.py | 46 ++++++++++++++++++++++++------------------- 2 files changed, 42 insertions(+), 25 deletions(-) diff --git a/distro/distro.py b/distro/distro.py index 2c64b7a..c489b20 100644 --- a/distro/distro.py +++ b/distro/distro.py @@ -1,3 +1,5 @@ +import logging + from enum import IntFlag from typing import Generic, Mapping, Optional, TypeVar @@ -115,6 +117,7 @@ def get_kupfer_url(url: str = KUPFER_HTTPS, branch: Optional[str] = None) -> str def get_repo_config(*args, **kwargs) -> ReposConfigFile: repo_config, changed = _get_repo_config(*args, **kwargs) if changed: + logging.debug("Repo configs changed, resetting caches") reset_distro_caches() return repo_config @@ -132,6 +135,8 @@ def get_kupfer_repo_names(local) -> list[str]: def get_RepoInfo(arch: Arch, repo_config: AbstrRepoConfig, default_url: Optional[str]) -> RepoInfo: url = repo_config.remote_url or default_url if isinstance(url, dict): + if arch not in url and not default_url: + raise Exception(f"Invalid repo config: Architecture {arch} not in remote_url mapping: {url}") url = url.get(arch, default_url) assert url return RepoInfo( @@ -171,8 +176,13 @@ def get_kupfer_distro( repos = {repo: get_RepoInfo(arch, conf, default_url) for repo, conf in repo_config.repos.items() if not conf.local_only} cls = RemoteDistro elif location in [DistroLocation.CHROOT, DistroLocation.LOCAL]: - cache = _kupfer_local_chroots - pkgdir = CHROOT_PATHS['packages'] if location == DistroLocation.CHROOT else config.get_path('packages') + if location == DistroLocation.CHROOT: + cache = _kupfer_local_chroots + pkgdir = CHROOT_PATHS['packages'] + else: + assert location == DistroLocation.LOCAL + cache = _kupfer_local + pkgdir = config.get_path('packages') default_url = f"file://{pkgdir}/$arch/$repo" cls = LocalDistro repos = {} @@ -181,7 +191,7 @@ def get_kupfer_distro( repo.remote_url = default_url repos[name] = get_RepoInfo(arch, repo, default_url) else: - raise Exception(f"Unknown location {location}") + raise Exception(f"Unknown distro location {location}") if cache is None: cache = {} assert arch @@ -193,8 +203,8 @@ def get_kupfer_distro( scan=scan, ) assert isinstance(distro, (LocalDistro, RemoteDistro)) - return distro cache[arch] = distro + return distro item: Distro = cache[arch] if scan and not item.is_scanned(): item.scan() @@ -210,6 +220,7 @@ def get_kupfer_https(arch: Arch, scan: bool = False) -> RemoteDistro: def get_kupfer_local(arch: Optional[Arch] = None, in_chroot: bool = True, scan: bool = False) -> LocalDistro: arch = arch or config.runtime.arch assert arch - d = get_kupfer_distro(arch, location=DistroLocation.CHROOT if in_chroot else DistroLocation.LOCAL, scan=scan) + location = DistroLocation.CHROOT if in_chroot else DistroLocation.LOCAL + d = get_kupfer_distro(arch, location=location, scan=scan) assert isinstance(d, LocalDistro) return d diff --git a/distro/repo_config.py b/distro/repo_config.py index 9a5bbd0..6bcaa26 100644 --- a/distro/repo_config.py +++ b/distro/repo_config.py @@ -53,18 +53,18 @@ class ReposConfigFile(DataClass): def __init__(self, d, **kwargs): super().__init__(d=d, **kwargs) + self[REPOS_KEY] = self.get(REPOS_KEY, {}) for repo_cls, defaults, repos, remote_url in [ - (RepoConfig, REPO_DEFAULTS, self.get(REPOS_KEY, {}), d.get(REMOTEURL_KEY, None)), + (RepoConfig, REPO_DEFAULTS, self.get(REPOS_KEY), d.get(REMOTEURL_KEY, None)), *[(BaseDistroRepo, BASE_DISTRO_DEFAULTS, _distro.repos, _distro.get(REMOTEURL_KEY, None)) for _distro in self.base_distros.values()], ]: if repos is None: continue for name, repo in repos.items(): - _repo = defaults | (repo or {}) # type: ignore[operator] + _repo = dict(defaults | (repo or {})) # type: ignore[operator] if REMOTEURL_KEY not in repo and not repo.get(LOCALONLY_KEY, None): _repo[REMOTEURL_KEY] = remote_url repos[name] = repo_cls(_repo, **kwargs) - # self.repos = repos @staticmethod def parse_config(path: str) -> ReposConfigFile: @@ -103,11 +103,11 @@ BASE_DISTRO_DEFAULTS = { } REPOS_CONFIG_DEFAULT = ReposConfigFile({ - '_path': None, + '_path': '__DEFAULTS__', '_checksum': None, REMOTEURL_KEY: KUPFER_HTTPS, REPOS_KEY: { - 'local': REPO_DEFAULTS | { + 'kupfer_local': REPO_DEFAULTS | { LOCALONLY_KEY: True }, **{r: deepcopy(REPO_DEFAULTS) for r in REPOSITORIES}, @@ -135,24 +135,30 @@ def get_repo_config( repo_config_file_path = repo_config_file_default else: repo_config_file_path = repo_config_file - if not os.path.exists(repo_config_file_path): + config_exists = os.path.exists(repo_config_file_path) + if not config_exists and _current_config is None: + if initialize_pkgbuilds: + from packages.pkgbuild import init_pkgbuilds + init_pkgbuilds(update=False) + return get_repo_config(initialize_pkgbuilds=False, repo_config_file=repo_config_file) if repo_config_file is not None: raise Exception(f"Requested repo config {repo_config_file} doesn't exist") - if not initialize_pkgbuilds: - logging.warning(f"{repo_config_file_path} doesn't exist, using default Repositories") - return deepcopy(REPOS_CONFIG_DEFAULT), False - from packages.pkgbuild import init_pkgbuilds - init_pkgbuilds() - return get_repo_config(initialize_pkgbuilds=False, repo_config_file=repo_config_file_path) - conf = _current_config + logging.warning(f"{repo_config_file_path} doesn't exist, using built-in repo config defaults") + _current_config = deepcopy(REPOS_CONFIG_DEFAULT) + return _current_config, False changed = False - if (not _current_config) or _current_config._path != repo_config_file_path or _current_config._checksum != sha256sum(repo_config_file_path): - conf = ReposConfigFile.parse_config(repo_config_file_path) - if repo_config_file_path == repo_config_file_default: - _current_config = conf - changed = True - assert conf - return conf, changed + if (not _current_config) or (config_exists and _current_config._checksum != sha256sum(repo_config_file_path)): + if config_exists: + conf = ReposConfigFile.parse_config(repo_config_file_path) + else: + conf = REPOS_CONFIG_DEFAULT + changed = conf != (_current_config or {}) + if changed: + _current_config = deepcopy(conf) + else: + logging.debug("Repo config: Cache hit!") + assert _current_config + return _current_config, changed def get_repos(**kwargs) -> list[RepoConfig]: From dfd191060a36a9bf23815b9afa6089c67020605d Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Mon, 6 Feb 2023 01:51:35 +0100 Subject: [PATCH 03/82] config/state: remove `field_name` from missing device/flavour hint as it gets used as the profile name --- config/state.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config/state.py b/config/state.py index 6919c3a..af4c743 100644 --- a/config/state.py +++ b/config/state.py @@ -257,7 +257,7 @@ class ConfigStateHolder: profile = self.get_profile(profile_name) if field not in profile or not profile[field]: m = (f'Profile "{profile_name}" has no {field.upper()} configured.\n' - f'Please run `kupferbootstrap config profile init {field}`{arch_hint}') + f'Please run `kupferbootstrap config profile init {profile_name}`{arch_hint}') raise Exception(m) return profile From e07306d5c42b8ffef0e96efdb1734bd11d2b6ff8 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sun, 5 Mar 2023 18:33:13 +0100 Subject: [PATCH 04/82] wrapper: add needs_wrap(), typehint return values --- wrapper/__init__.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/wrapper/__init__.py b/wrapper/__init__.py index f7ce80a..f6f519f 100644 --- a/wrapper/__init__.py +++ b/wrapper/__init__.py @@ -14,7 +14,7 @@ wrapper_impls: dict[str, Wrapper] = { } -def get_wrapper_type(wrapper_type: Optional[str] = None): +def get_wrapper_type(wrapper_type: Optional[str] = None) -> str: return wrapper_type or config.file.wrapper.type @@ -28,14 +28,19 @@ def wrap(wrapper_type: Optional[str] = None): get_wrapper_impl(wrapper_type).wrap() -def is_wrapped(wrapper_type: Optional[str] = None): +def is_wrapped(wrapper_type: Optional[str] = None) -> bool: wrapper_type = get_wrapper_type(wrapper_type) return wrapper_type != 'none' and get_wrapper_impl(wrapper_type).is_wrapped() +def needs_wrap(wrapper_type: Optional[str] = None) -> bool: + wrapper_type = wrapper_type or get_wrapper_type() + return wrapper_type != 'none' and not is_wrapped(wrapper_type) and not config.runtime.no_wrap + + def enforce_wrap(no_wrapper=False): wrapper_type = get_wrapper_type() - if wrapper_type != 'none' and not is_wrapped(wrapper_type) and not config.runtime.no_wrap and not no_wrapper: + if needs_wrap(wrapper_type) and not no_wrapper: logging.info(f'Wrapping in {wrapper_type}') wrap() From 954592fc626575d6ee01928cec7bbd941db837aa Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Mon, 13 Mar 2023 01:16:41 +0100 Subject: [PATCH 05/82] config/cli: warn when saving config in container --- config/cli.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/config/cli.py b/config/cli.py index e0e6d1b..289dc59 100644 --- a/config/cli.py +++ b/config/cli.py @@ -176,7 +176,12 @@ def prompt_for_save(retry_ctx: Optional[click.Context] = None): If `retry_ctx` is passed, the context's command will be reexecuted with the same arguments if the user chooses to retry. False will still be returned as the retry is expected to either save, perform another retry or arbort. """ + from wrapper import is_wrapped if click.confirm(f'Do you want to save your changes to {config.runtime.config_file}?', default=True): + if is_wrapped(): + logging.warning("Writing to config file inside wrapper." + "This is pointless and probably a bug." + "Your host config file will not be modified.") return True if retry_ctx: if click.confirm('Retry? ("n" to quit without saving)', default=True): @@ -333,7 +338,9 @@ def cmd_profile_init(ctx, name: Optional[str] = None, non_interactive: bool = Fa config.update_profile(name, profile) if not noop: if not prompt_for_save(ctx): + logging.info("Not saving.") return + config.write() else: logging.info(f'--noop passed, not writing to {config.runtime.config_file}!') From ec0e430c004926208dbb6268eaf1850efa6a0a13 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Mon, 13 Mar 2023 01:39:32 +0100 Subject: [PATCH 06/82] config/cli: save main config body separately from profiles to support flavour and device listing --- config/cli.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/config/cli.py b/config/cli.py index 289dc59..237e434 100644 --- a/config/cli.py +++ b/config/cli.py @@ -206,6 +206,8 @@ noninteractive_flag = click.option('-N', '--non-interactive', is_flag=True) noop_flag = click.option('--noop', '-n', help="Don't write changes to file", is_flag=True) noparse_flag = click.option('--no-parse', help="Don't search PKGBUILDs for devices and flavours", is_flag=True) +CONFIG_MSG = ("Leave fields empty to leave them at their currently displayed value.") + @cmd_config.command(name='init') @noninteractive_flag @@ -229,6 +231,7 @@ def cmd_config_init( ): """Initialize the config file""" if not non_interactive: + logging.info(CONFIG_MSG) results: dict[str, dict] = {} for section in sections: if section not in CONFIG_SECTIONS: @@ -244,7 +247,14 @@ def cmd_config_init( results[section][key] = result config.update(results) + print("Main configuration complete") + if not noop: + if prompt_for_save(ctx): + config.write() + else: + return if 'profiles' in sections: + print("Configuring profiles") current_profile = 'default' if 'current' not in config.file.profiles else config.file.profiles.current new_current, _ = prompt_config('profiles.current', default=current_profile, field_type=str) profile, changed = prompt_profile(new_current, create=True, no_parse=no_parse) @@ -271,6 +281,7 @@ def cmd_config_set(ctx, key_vals: list[str], non_interactive: bool = False, noop like `build.clean_mode=false` or alternatively just keys to get prompted if run interactively. """ config.enforce_config_loaded() + logging.info(CONFIG_MSG) config_copy = deepcopy(config.file) for pair in key_vals: split_pair = pair.split('=') @@ -328,6 +339,7 @@ def cmd_profile_init(ctx, name: Optional[str] = None, non_interactive: bool = Fa profile = deepcopy(PROFILE_EMPTY) if name == 'current': raise Exception("profile name 'current' not allowed") + logging.info(CONFIG_MSG) name = name or config.file.profiles.current if name in config.file.profiles: profile |= config.file.profiles[name] From d2d9cb6c7cb9481fffa6fd55306eee3b5e460a47 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Mon, 13 Mar 2023 05:32:37 +0100 Subject: [PATCH 07/82] wrapper: add Wrapper.argv_override --- wrapper/docker.py | 7 ++++++- wrapper/wrapper.py | 4 +++- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/wrapper/docker.py b/wrapper/docker.py index 2112548..cac3088 100644 --- a/wrapper/docker.py +++ b/wrapper/docker.py @@ -86,7 +86,12 @@ class DockerWrapper(BaseWrapper): '--privileged', ] + docker_volumes_args(volumes) + [tag] - kupfer_cmd = ['kupferbootstrap', '--config', volumes[wrapped_config]] + self.filter_args_wrapper(sys.argv[1:]) + kupfer_cmd = [ + 'kupferbootstrap', + '--config', + volumes[wrapped_config], + ] + kupfer_cmd += self.argv_override or self.filter_args_wrapper(sys.argv[1:]) if config.runtime.uid: kupfer_cmd = ['wrapper_su_helper', '--uid', str(config.runtime.uid), '--username', 'kupfer', '--'] + kupfer_cmd diff --git a/wrapper/wrapper.py b/wrapper/wrapper.py index 619c299..1c7ba01 100644 --- a/wrapper/wrapper.py +++ b/wrapper/wrapper.py @@ -31,15 +31,17 @@ class Wrapper(Protocol): """ -class BaseWrapper(Wrapper): +class Wrapper(WrapperProtocol): uuid: str identifier: str type: str wrapped_config_path: str + argv_override: Optional[list[str]] def __init__(self, random_id: Optional[str] = None, name: Optional[str] = None): self.uuid = str(random_id or uuid.uuid4()) self.identifier = name or f'kupferbootstrap-{self.uuid}' + self.argv_override = None def filter_args_wrapper(self, args): """filter out -c/--config since it doesn't apply in wrapper""" From 389d44e776419e3f637413aadafe6b0f1824f9d7 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Mon, 13 Mar 2023 05:33:21 +0100 Subject: [PATCH 08/82] wrapper: add Wrapper.should_exit --- wrapper/docker.py | 9 +++++---- wrapper/wrapper.py | 4 +++- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/wrapper/docker.py b/wrapper/docker.py index cac3088..d61a35b 100644 --- a/wrapper/docker.py +++ b/wrapper/docker.py @@ -7,7 +7,7 @@ import sys from config.state import config from exec.file import makedir -from .wrapper import BaseWrapper, WRAPPER_PATHS +from .wrapper import Wrapper, WRAPPER_PATHS DOCKER_PATHS = WRAPPER_PATHS.copy() @@ -19,7 +19,7 @@ def docker_volumes_args(volume_mappings: dict[str, str]) -> list[str]: return result -class DockerWrapper(BaseWrapper): +class DockerWrapper(Wrapper): type: str = 'docker' def wrap(self): @@ -98,8 +98,9 @@ class DockerWrapper(BaseWrapper): cmd = docker_cmd + kupfer_cmd logging.debug('Wrapping in docker:' + repr(cmd)) result = subprocess.run(cmd) - - exit(result.returncode) + if self.should_exit: + exit(result.returncode) + return result.returncode def stop(self): subprocess.run( diff --git a/wrapper/wrapper.py b/wrapper/wrapper.py index 1c7ba01..61bc197 100644 --- a/wrapper/wrapper.py +++ b/wrapper/wrapper.py @@ -15,7 +15,7 @@ WRAPPER_PATHS = CHROOT_PATHS | { } -class Wrapper(Protocol): +class WrapperProtocol(Protocol): """Wrappers wrap kupferbootstrap in some form of isolation from the host OS, i.e. docker or chroots""" def wrap(self): @@ -37,11 +37,13 @@ class Wrapper(WrapperProtocol): type: str wrapped_config_path: str argv_override: Optional[list[str]] + should_exit: bool def __init__(self, random_id: Optional[str] = None, name: Optional[str] = None): self.uuid = str(random_id or uuid.uuid4()) self.identifier = name or f'kupferbootstrap-{self.uuid}' self.argv_override = None + self.should_exit = True def filter_args_wrapper(self, args): """filter out -c/--config since it doesn't apply in wrapper""" From b9969d8feb9ccd0dc2a9c8e49e12e971387b68dc Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Mon, 13 Mar 2023 05:54:10 +0100 Subject: [PATCH 09/82] wrapper: add execute_without_exit() --- wrapper/__init__.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/wrapper/__init__.py b/wrapper/__init__.py index f6f519f..a86f879 100644 --- a/wrapper/__init__.py +++ b/wrapper/__init__.py @@ -56,6 +56,26 @@ def wrap_if_foreign_arch(arch: Arch): enforce_wrap() +def execute_without_exit(f, argv_override: Optional[list[str]], *args, **kwargs): + """If no wrap is needed, executes and returns f(*args, **kwargs). + If a wrap is determined to be necessary, force a wrap with argv_override applied. + If a wrap was forced, None is returned. + WARNING: No protection against f() returning None is taken.""" + if not needs_wrap(): + return f(*args, **kwargs) + assert get_wrapper_type() != 'none', "needs_wrap() should've returned False" + w = get_wrapper_impl() + w_cmd = w.argv_override + # we need to avoid throwing and catching SystemExit due to FDs getting closed otherwise + w_should_exit = w.should_exit + w.argv_override = argv_override + w.should_exit = False + w.wrap() + w.argv_override = w_cmd + w.should_exit = w_should_exit + return None + + nowrapper_option = click.option( '-w/-W', '--force-wrapper/--no-wrapper', From 91d2cd3681cbc7c6ca3cc3b2a935e4967322a7fa Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Mon, 13 Mar 2023 05:54:36 +0100 Subject: [PATCH 10/82] config/cli: use wrapper.execute_without_exit() for prompt_profile_{flavour,device}() to avoid prompting in docker --- config/cli.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/config/cli.py b/config/cli.py index 237e434..e07e50b 100644 --- a/config/cli.py +++ b/config/cli.py @@ -7,6 +7,7 @@ from typing import Any, Iterable, Optional, Union from devices.device import get_devices from flavours.flavour import get_flavours +from wrapper import execute_without_exit from .scheme import Profile from .profile import PROFILE_EMPTY, PROFILE_DEFAULTS @@ -132,16 +133,22 @@ def prompt_choice(current: Optional[Any], key: str, choices: Iterable[Any], allo def prompt_profile_device(current: Optional[str], profile_name: str) -> tuple[str, bool]: - devices = get_devices() print(click.style("Pick your device!\nThese are the available devices:", bold=True)) + devices = execute_without_exit(get_devices, ['devices']) + if devices is None: + print("(wrapper mode, input for this field will not be checked for correctness)") + return prompt_config(text=f'{profile_name}.device', default=current) for dev in sorted(devices.keys()): print(f"{devices[dev]}\n") return prompt_choice(current, f'profiles.{profile_name}.device', devices.keys()) def prompt_profile_flavour(current: Optional[str], profile_name: str) -> tuple[str, bool]: - flavours = get_flavours() print(click.style("Pick your flavour!\nThese are the available flavours:", bold=True)) + flavours = execute_without_exit(get_flavours, ['flavours']) + if flavours is None: + print("(wrapper mode, input for this field will not be checked for correctness)") + return prompt_config(text=f'{profile_name}.flavour', default=current) for f in sorted(flavours.keys()): print(flavours[f]) return prompt_choice(current, f'profiles.{profile_name}.flavour', flavours.keys()) From f113faa2011ef5aaacd62b5740a2760894675c93 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Tue, 14 Mar 2023 19:44:15 +0100 Subject: [PATCH 11/82] constants: add kupfer-config --user --- constants.py | 5 ++++- image/image.py | 11 ++++++----- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/constants.py b/constants.py index 0d470ec..62fb230 100644 --- a/constants.py +++ b/constants.py @@ -24,7 +24,10 @@ BASE_PACKAGES: list[str] = BASE_LOCAL_PACKAGES + [ 'vim', ] -POST_CMDS = ['kupfer-config apply'] +POST_INSTALL_CMDS = [ + 'kupfer-config apply', + 'kupfer-config --user apply', +] REPOS_CONFIG_FILE = "repos.yml" diff --git a/image/image.py b/image/image.py index e4b2c73..b8834ed 100644 --- a/image/image.py +++ b/image/image.py @@ -11,7 +11,7 @@ from typing import Optional, Union from config.state import config, Profile from chroot.device import DeviceChroot, get_device_chroot -from constants import Arch, BASE_LOCAL_PACKAGES, BASE_PACKAGES, POST_CMDS +from constants import Arch, BASE_LOCAL_PACKAGES, BASE_PACKAGES, POST_INSTALL_CMDS from distro.distro import get_base_distro, get_kupfer_https from devices.device import Device, get_profile_device from exec.cmd import run_root_cmd, generate_cmd_su @@ -318,12 +318,13 @@ def install_rootfs( } for target, content in files.items(): root_write_file(os.path.join(chroot.path, target.lstrip('/')), content) - if POST_CMDS: - logging.info("Running post-install CMDs") - result = chroot.run_cmd(' && '.join(POST_CMDS)) + + logging.info("Running post-install CMDs") + for cmd in POST_INSTALL_CMDS: + result = chroot.run_cmd(cmd) assert isinstance(result, subprocess.CompletedProcess) if result.returncode != 0: - raise Exception('Error running post_cmds') + raise Exception(f'Error running post-install cmd: {cmd}') logging.info('Preparing to unmount chroot') res = chroot.run_cmd('sync && umount /boot', attach_tty=True) From f6fb521c8af2aafbef9c1abcf6db89b22b3e17bc Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Thu, 22 Dec 2022 17:18:30 +0100 Subject: [PATCH 12/82] packages: build_enable_qemu_binfmt(): don't use is_registered() --- packages/build.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/build.py b/packages/build.py index c763327..61050db 100644 --- a/packages/build.py +++ b/packages/build.py @@ -9,7 +9,7 @@ from copy import deepcopy from urllib.error import HTTPError from typing import Iterable, Iterator, Optional -from binfmt import register as binfmt_register, binfmt_is_registered +from binfmt import register as binfmt_register from constants import CROSSDIRECT_PKGS, QEMU_BINFMT_PKGS, GCC_HOSTSPECS, ARCHES, Arch, CHROOT_PATHS, MAKEPKG_CMD from config.state import config from exec.cmd import run_cmd, run_root_cmd @@ -800,7 +800,7 @@ def build_enable_qemu_binfmt(arch: Arch, repo: Optional[dict[str, Pkgbuild]] = N if arch not in ARCHES: raise Exception(f'Unknown architecture "{arch}". Choices: {", ".join(ARCHES)}') logging.info('Installing qemu-user (building if necessary)') - if lazy and _qemu_enabled[arch] and binfmt_is_registered(arch): + if lazy and _qemu_enabled[arch]: _qemu_enabled[arch] = True return native = config.runtime.arch From 0353693025b9035ad5415a165d0a2393b765ee8f Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Thu, 22 Dec 2022 17:16:30 +0100 Subject: [PATCH 13/82] exec/cmd: flatten_shell_script(): specifically quote empty strings even when shell_quote is disabled --- exec/cmd.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/exec/cmd.py b/exec/cmd.py index bff9aec..be8218b 100644 --- a/exec/cmd.py +++ b/exec/cmd.py @@ -38,6 +38,8 @@ def flatten_shell_script(script: Union[list[str], str], shell_quote_items: bool cmds = script if shell_quote_items: cmds = [shell_quote(i) for i in cmds] + else: + cmds = [(i if i != '' else '""') for i in cmds] script = " ".join(cmds) if wrap_in_shell_quote: script = shell_quote(script) From 4115d6ba003ff34122d5f713d561aa94060c69eb Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Thu, 29 Dec 2022 13:13:48 +0100 Subject: [PATCH 14/82] packages/build: build_package(): source /etc/profile before building so PATH is complete --- packages/build.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/build.py b/packages/build.py index 61050db..a7a2fe9 100644 --- a/packages/build.py +++ b/packages/build.py @@ -578,7 +578,7 @@ def build_package( setup_git_insecure_paths(build_root) makepkg_conf_absolute = os.path.join('/', makepkg_conf_path) - build_cmd = MAKEPKG_CMD + ['--config', makepkg_conf_absolute, '--skippgpcheck'] + makepkg_compile_opts + build_cmd = ['source', '/etc/profile', '&&', *MAKEPKG_CMD, '--config', makepkg_conf_absolute, '--skippgpcheck', *makepkg_compile_opts] logging.debug(f'Building: Running {build_cmd}') result = build_root.run_cmd( build_cmd, From 61b14443600b8bcf56cf9fdc8c35729039ab8aee Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Thu, 29 Dec 2022 13:15:06 +0100 Subject: [PATCH 15/82] wrapper_su_helper.py: tolerate non-unique uid --- wrapper_su_helper.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/wrapper_su_helper.py b/wrapper_su_helper.py index bc1683f..7f8426c 100755 --- a/wrapper_su_helper.py +++ b/wrapper_su_helper.py @@ -21,7 +21,7 @@ def kupferbootstrap_su(cmd: list[str], uid: int = 1000, username: str = 'kupfer' user = pwd.getpwnam(username) home = user.pw_dir if uid != user.pw_uid: - run_cmd(['usermod', '-u', str(uid), username]).check_returncode() # type: ignore[union-attr] + run_cmd(['usermod', '-o', '-u', str(uid), username]).check_returncode() # type: ignore[union-attr] chown(home, username, recursive=False) logging.debug(f'wrapper_su_helper: running {cmd} as {repr(username)}') env_inject = ['env', f'{WRAPPER_ENV_VAR}={os.environ[WRAPPER_ENV_VAR]}'] if WRAPPER_ENV_VAR in os.environ else [] From cfd65f9638cdaf7d2691294d56dd3828f9f05804 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Thu, 29 Dec 2022 13:17:23 +0100 Subject: [PATCH 16/82] gitignore: add kate swap files --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index bb5670e..68c1c72 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ +*.kate-swp venv/ __pycache__/ .coverage* From d3cc5e948311ba194c6178fb1caa8a1a21ebcccd Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sun, 1 Jan 2023 05:14:19 +0100 Subject: [PATCH 17/82] main.py: announce force-enabling wrapper --- main.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/main.py b/main.py index 04cea11..46e51c0 100755 --- a/main.py +++ b/main.py @@ -8,7 +8,7 @@ from traceback import format_exc, format_exception_only, format_tb from typing import Optional from logger import color_option, logging, quiet_option, setup_logging, verbose_option -from wrapper import nowrapper_option, enforce_wrap +from wrapper import get_wrapper_type, enforce_wrap, nowrapper_option from progressbar import progress_bars_option from config.cli import config, config_option, cmd_config @@ -49,6 +49,7 @@ def cli( if config.file_state.exception: logging.warning(f"Config file couldn't be loaded: {config.file_state.exception}") if wrapper_override: + logging.info(f'Force-wrapping in wrapper-type: "{get_wrapper_type()}"!') enforce_wrap() From 1a695adff455582c62975bdef2de7610d61eba0c Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Mon, 2 Jan 2023 02:41:59 +0100 Subject: [PATCH 18/82] wrapper/docker: don't suppress `docker build` stdout when verbose enabled --- wrapper/docker.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/wrapper/docker.py b/wrapper/docker.py index d61a35b..682b0da 100644 --- a/wrapper/docker.py +++ b/wrapper/docker.py @@ -38,7 +38,12 @@ class DockerWrapper(Wrapper): tag, ] + (['-q'] if not config.runtime.verbose else []) logging.debug('Running docker cmd: ' + ' '.join(cmd)) - result = subprocess.run(cmd, cwd=script_path, capture_output=True) + result = subprocess.run( + cmd, + cwd=script_path, + stdout=(sys.stdout if config.runtime.verbose else subprocess.PIPE), + stderr=(sys.stderr if config.runtime.verbose else subprocess.PIPE), + ) if result.returncode != 0: logging.fatal('Failed to build docker image:\n' + result.stderr.decode()) exit(1) From eb13a7d093403b8e69b9d0fa47c861b5ea3338de Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Mon, 9 Jan 2023 03:27:50 +0100 Subject: [PATCH 19/82] image/cli: improve help for cmd_inspect() --- image/image.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/image/image.py b/image/image.py index b8834ed..bdc3200 100644 --- a/image/image.py +++ b/image/image.py @@ -453,10 +453,10 @@ def cmd_build(profile_name: Optional[str] = None, @cmd_image.command(name='inspect') -@click.option('--shell', '-s', is_flag=True) +@click.option('--shell', '-s', help="Open a shell in the image's rootfs", is_flag=True) @click.argument('profile', required=False) def cmd_inspect(profile: Optional[str] = None, shell: bool = False): - """Open a shell in a device image""" + """Loop-mount the device image for inspection.""" config.enforce_profile_device_set() config.enforce_profile_flavour_set() enforce_wrap() From 6961cb7f3666a1e109add3e4e6cd4de460f781c5 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sun, 16 Apr 2023 04:33:59 +0200 Subject: [PATCH 20/82] gitlab-ci: override docker dind mtu sigh. -.- --- .gitlab-ci.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 003a8ec..2327114 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -44,7 +44,9 @@ pytest: build_docker: stage: build image: docker:latest - services: ['docker:dind'] + services: + - name: docker:dind + command: ["--mtu=1100"] # very low, safe value -.- variables: DOCKER_TLS_CERTDIR: "" script: From cd1d0543fea070425d9f088eb289268afc2221a8 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sun, 16 Apr 2023 05:22:17 +0200 Subject: [PATCH 21/82] wrapper: move at_exit handling into wrap() --- wrapper/docker.py | 1 + wrapper/wrapper.py | 13 +++++-------- 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/wrapper/docker.py b/wrapper/docker.py index 682b0da..52da4c1 100644 --- a/wrapper/docker.py +++ b/wrapper/docker.py @@ -23,6 +23,7 @@ class DockerWrapper(Wrapper): type: str = 'docker' def wrap(self): + super().wrap() script_path = config.runtime.script_source_dir assert script_path with open(os.path.join(script_path, 'version.txt')) as version_file: diff --git a/wrapper/wrapper.py b/wrapper/wrapper.py index 61bc197..35a5f2a 100644 --- a/wrapper/wrapper.py +++ b/wrapper/wrapper.py @@ -77,13 +77,6 @@ class Wrapper(WrapperProtocol): ) -> str: wrapped_config = f'{target_path.rstrip("/")}/{self.identifier}_wrapped.toml' - # FIXME: these at_exit hooks should go and be called from somewhere better suited - def at_exit(): - self.stop() - os.remove(wrapped_config) - - atexit.register(at_exit) - dump_config_file( file_path=wrapped_config, config=(config.file | { @@ -93,8 +86,12 @@ class Wrapper(WrapperProtocol): self.wrapped_config_path = wrapped_config return wrapped_config + def at_exit(self): + os.remove(self.wrapped_config_path) + self.stop() + def wrap(self): - raise NotImplementedError() + atexit.register(self.at_exit) def stop(self): raise NotImplementedError() From 67590fe12b8ae15024aee917cae9cf850d84e82e Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sun, 16 Apr 2023 05:46:13 +0200 Subject: [PATCH 22/82] config/cli: drop obsolete warning when pkgbuilds arent initialised in prompt_profile() --- config/cli.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/config/cli.py b/config/cli.py index e07e50b..1cf7375 100644 --- a/config/cli.py +++ b/config/cli.py @@ -1,6 +1,5 @@ import click import logging -import os from copy import deepcopy from typing import Any, Iterable, Optional, Union @@ -95,15 +94,6 @@ def prompt_profile( raise Exception(f'Unknown profile "{name}"') logging.info(f'Configuring profile "{name}"') changed = False - if not (no_parse or os.path.exists(os.path.join(config.get_path('pkgbuilds'), 'device'))): - logging.warning("PKGBUILDS NOT INITIALISED:\n" - "Usually we'd present you with detailed lists of choices for devices and flavours in this dialogue,\n" - "but your pkgbuilds.git seem to not have been cloned yet.\n\n" - "You can:\n1. complete the dialogue with default values for now\n" - "2. run `kupferbootstrap packages update` afterwards\n" - f"3. then get back to this dialogue by running `kupferbootstrap config profile init {name}`\n\n" - "You can also use `kupferbootstrap packages flavours` and `kupferbootstrap packages devices` to list them.") - no_parse = True for key, current in profile.items(): current = profile[key] text = f'{name}.{key}' From c357b0a9684b1f5c805cbbaa8b4e44469babee36 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sun, 16 Apr 2023 05:50:54 +0200 Subject: [PATCH 23/82] wrapper: only run at_exit handler once --- wrapper/wrapper.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/wrapper/wrapper.py b/wrapper/wrapper.py index 35a5f2a..a65021a 100644 --- a/wrapper/wrapper.py +++ b/wrapper/wrapper.py @@ -38,12 +38,14 @@ class Wrapper(WrapperProtocol): wrapped_config_path: str argv_override: Optional[list[str]] should_exit: bool + atexit_registered: bool def __init__(self, random_id: Optional[str] = None, name: Optional[str] = None): self.uuid = str(random_id or uuid.uuid4()) self.identifier = name or f'kupferbootstrap-{self.uuid}' self.argv_override = None self.should_exit = True + self.atexit_registered = False def filter_args_wrapper(self, args): """filter out -c/--config since it doesn't apply in wrapper""" @@ -89,9 +91,12 @@ class Wrapper(WrapperProtocol): def at_exit(self): os.remove(self.wrapped_config_path) self.stop() + self.atexit_registered = False def wrap(self): - atexit.register(self.at_exit) + if not self.atexit_registered: + atexit.register(self.at_exit) + self.atexit_registered = True def stop(self): raise NotImplementedError() From b84d2202db6a98d101c22a1b712ce54e19541361 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sun, 16 Apr 2023 20:48:48 +0000 Subject: [PATCH 24/82] python 3.9 compat: introduce typehelpers.py for NoneType, UnionType, TypeAlias --- config/scheme.py | 11 +---------- constants.py | 2 +- dataclass.py | 10 +++------- devices/device.py | 3 +-- distro/package.py | 6 +++--- distro/repo_config.py | 4 ++-- exec/cmd.py | 4 +++- packages/pkgbuild.py | 3 ++- typehelpers.py | 18 ++++++++++++++++++ 9 files changed, 34 insertions(+), 27 deletions(-) create mode 100644 typehelpers.py diff --git a/config/scheme.py b/config/scheme.py index a0554ad..9409661 100644 --- a/config/scheme.py +++ b/config/scheme.py @@ -3,11 +3,10 @@ from __future__ import annotations from munch import Munch from typing import Any, Optional, Mapping, Union -from dataclass import DataClass, munchclass +from dataclass import DataClass from constants import Arch -@munchclass() class SparseProfile(DataClass): parent: Optional[str] device: Optional[str] @@ -23,7 +22,6 @@ class SparseProfile(DataClass): return f'{type(self)}{dict.__repr__(self.toDict())}' -@munchclass() class Profile(SparseProfile): parent: Optional[str] device: str @@ -36,12 +34,10 @@ class Profile(SparseProfile): size_extra_mb: Union[str, int] -@munchclass() class WrapperSection(DataClass): type: str # NOTE: rename to 'wrapper_type' if this causes problems -@munchclass() class BuildSection(DataClass): ccache: bool clean_mode: bool @@ -50,20 +46,17 @@ class BuildSection(DataClass): threads: int -@munchclass() class PkgbuildsSection(DataClass): git_repo: str git_branch: str -@munchclass() class PacmanSection(DataClass): parallel_downloads: int check_space: bool repo_branch: str -@munchclass() class PathsSection(DataClass): cache_dir: str chroots: str @@ -101,7 +94,6 @@ class ProfilesSection(DataClass): return f'{type(self)}{dict.__repr__(self.toDict())}' -@munchclass() class Config(DataClass): wrapper: WrapperSection build: BuildSection @@ -138,7 +130,6 @@ class Config(DataClass): return Config(_vals, validate=validate) -@munchclass() class RuntimeConfiguration(DataClass): verbose: bool no_wrap: bool diff --git a/constants.py b/constants.py index 62fb230..d881b9f 100644 --- a/constants.py +++ b/constants.py @@ -1,4 +1,4 @@ -from typing_extensions import TypeAlias +from typehelpers import TypeAlias FASTBOOT = 'fastboot' FLASH_PARTS = { diff --git a/dataclass.py b/dataclass.py index 8ad9035..6bd19df 100644 --- a/dataclass.py +++ b/dataclass.py @@ -3,15 +3,11 @@ from __future__ import annotations import logging import toml -from dataclasses import dataclass from munch import Munch from toml.encoder import TomlEncoder, TomlPreserveInlineDictEncoder from typing import ClassVar, Generator, Optional, Union, Mapping, Any, get_type_hints, get_origin, get_args, Iterable -from types import UnionType, NoneType - -def munchclass(*args, init=False, **kwargs): - return dataclass(*args, init=init, slots=True, **kwargs) +from typehelpers import UnionType, NoneType def resolve_type_hint(hint: type, ignore_origins: list[type] = []) -> Iterable[type]: @@ -73,12 +69,12 @@ class DataClass(Munch): type_hints = cls._type_hints if type_hints is None else type_hints if key in type_hints: _classes = tuple[type](resolve_type_hint(type_hints[key])) - optional = NoneType in _classes + optional = bool(set([NoneType, None]).intersection(_classes)) if optional and value is None: results[key] = None continue if issubclass(_classes[0], dict): - assert isinstance(value, dict) or optional + assert isinstance(value, dict) or (optional and value is None), f'{key=} is not dict: {value!r}, {_classes=}' target_class = _classes[0] if target_class in [None, NoneType, Optional]: for target in _classes[1:]: diff --git a/devices/device.py b/devices/device.py index 05041b3..9780a86 100644 --- a/devices/device.py +++ b/devices/device.py @@ -5,7 +5,7 @@ from typing import Optional from config.state import config from constants import Arch, ARCHES -from config.scheme import DataClass, munchclass +from dataclass import DataClass from distro.distro import get_kupfer_local from distro.package import LocalPackage from packages.pkgbuild import Pkgbuild, _pkgbuilds_cache, discover_pkgbuilds, get_pkgbuild_by_path, init_pkgbuilds @@ -43,7 +43,6 @@ class DeviceSummary(DataClass): return separator.join([f"{color_str(name, bold=True, use_colors=colors)}: {value}" for name, value in fields.items()]) -@munchclass() class Device(DataClass): name: str arch: Arch diff --git a/distro/package.py b/distro/package.py index 18c48c1..4a5b5a6 100644 --- a/distro/package.py +++ b/distro/package.py @@ -2,7 +2,7 @@ import logging import os from shutil import copyfileobj -from typing import Optional +from typing import Optional, Union from urllib.request import urlopen from exec.file import get_temp_dir, makedir @@ -17,7 +17,7 @@ class BinaryPackage(PackageInfo): arch: str filename: str resolved_url: Optional[str] - _desc: Optional[dict[str, str | list[str]]] + _desc: Optional[dict[str, Union[str, list[str]]]] def __init__( self, @@ -39,7 +39,7 @@ class BinaryPackage(PackageInfo): @classmethod def parse_desc(clss, desc_str: str, resolved_repo_url=None): """Parses a desc file, returning a PackageInfo""" - desc: dict[str, str | list[str]] = {} + desc: dict[str, Union[str, list[str]]] = {} for segment in f'\n{desc_str}'.split('\n%'): if not segment.strip(): continue diff --git a/distro/repo_config.py b/distro/repo_config.py index 6bcaa26..34c55e6 100644 --- a/distro/repo_config.py +++ b/distro/repo_config.py @@ -6,7 +6,7 @@ import toml import yaml from copy import deepcopy -from typing import ClassVar, Optional, Mapping +from typing import ClassVar, Optional, Mapping, Union from config.state import config from constants import Arch, BASE_DISTROS, KUPFER_HTTPS, REPOS_CONFIG_FILE, REPOSITORIES @@ -33,7 +33,7 @@ class BaseDistroRepo(AbstrRepoConfig): class RepoConfig(AbstrRepoConfig): - remote_url: Optional[str | dict[Arch, str]] + remote_url: Optional[Union[str, dict[Arch, str]]] local_only: Optional[bool] diff --git a/exec/cmd.py b/exec/cmd.py index be8218b..d653d05 100644 --- a/exec/cmd.py +++ b/exec/cmd.py @@ -5,7 +5,9 @@ import subprocess from subprocess import CompletedProcess # make it easy for users of this module from shlex import quote as shell_quote -from typing import IO, Optional, Union, TypeAlias +from typing import IO, Optional, Union + +from typehelpers import TypeAlias ElevationMethod: TypeAlias = str diff --git a/packages/pkgbuild.py b/packages/pkgbuild.py index a2d13e5..b61884a 100644 --- a/packages/pkgbuild.py +++ b/packages/pkgbuild.py @@ -6,7 +6,7 @@ import multiprocessing import os from joblib import Parallel, delayed -from typing import Iterable, Optional, TypeAlias +from typing import Iterable, Optional from config.state import config, ConfigStateHolder from constants import Arch @@ -16,6 +16,7 @@ from exec.file import remove_file from logger import setup_logging from utils import git, git_get_branch from wrapper import check_programs_wrap +from typehelpers import TypeAlias from .srcinfo_cache import SrcinfoMetaFile diff --git a/typehelpers.py b/typehelpers.py new file mode 100644 index 0000000..f43d96e --- /dev/null +++ b/typehelpers.py @@ -0,0 +1,18 @@ +from typing import Union + +try: + from typing import TypeAlias # type: ignore[attr-defined] +except ImportError: + from typing_extensions import TypeAlias + +TypeAlias = TypeAlias + +try: + from types import UnionType +except ImportError: + UnionType: TypeAlias = Union # type: ignore[no-redef] + +try: + from types import NoneType +except ImportError: + NoneType: TypeAlias = type(None) # type: ignore[no-redef] From acee95a0031f50e5812f99c50253b365248053dc Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Mon, 17 Apr 2023 02:32:28 +0200 Subject: [PATCH 25/82] dictscheme: rename from dataclass as it's confusing with builtin dataclasses --- config/scheme.py | 22 +++++++++++----------- config/state.py | 4 ++-- config/test_config.py | 2 +- devices/device.py | 6 +++--- devices/deviceinfo.py | 4 ++-- dataclass.py => dictscheme.py | 10 +++++----- distro/repo_config.py | 8 ++++---- flavours/flavour.py | 8 ++++---- packages/srcinfo_cache.py | 4 ++-- 9 files changed, 34 insertions(+), 34 deletions(-) rename dataclass.py => dictscheme.py (98%) diff --git a/config/scheme.py b/config/scheme.py index 9409661..a5846ba 100644 --- a/config/scheme.py +++ b/config/scheme.py @@ -3,11 +3,11 @@ from __future__ import annotations from munch import Munch from typing import Any, Optional, Mapping, Union -from dataclass import DataClass +from dictscheme import DictScheme from constants import Arch -class SparseProfile(DataClass): +class SparseProfile(DictScheme): parent: Optional[str] device: Optional[str] flavour: Optional[str] @@ -34,11 +34,11 @@ class Profile(SparseProfile): size_extra_mb: Union[str, int] -class WrapperSection(DataClass): +class WrapperSection(DictScheme): type: str # NOTE: rename to 'wrapper_type' if this causes problems -class BuildSection(DataClass): +class BuildSection(DictScheme): ccache: bool clean_mode: bool crosscompile: bool @@ -46,18 +46,18 @@ class BuildSection(DataClass): threads: int -class PkgbuildsSection(DataClass): +class PkgbuildsSection(DictScheme): git_repo: str git_branch: str -class PacmanSection(DataClass): +class PacmanSection(DictScheme): parallel_downloads: int check_space: bool repo_branch: str -class PathsSection(DataClass): +class PathsSection(DictScheme): cache_dir: str chroots: str pacman: str @@ -69,7 +69,7 @@ class PathsSection(DataClass): rust: str -class ProfilesSection(DataClass): +class ProfilesSection(DictScheme): current: str default: SparseProfile @@ -94,7 +94,7 @@ class ProfilesSection(DataClass): return f'{type(self)}{dict.__repr__(self.toDict())}' -class Config(DataClass): +class Config(DictScheme): wrapper: WrapperSection build: BuildSection pkgbuilds: PkgbuildsSection @@ -130,7 +130,7 @@ class Config(DataClass): return Config(_vals, validate=validate) -class RuntimeConfiguration(DataClass): +class RuntimeConfiguration(DictScheme): verbose: bool no_wrap: bool error_shell: bool @@ -142,7 +142,7 @@ class RuntimeConfiguration(DataClass): colors: Optional[bool] -class ConfigLoadState(DataClass): +class ConfigLoadState(DictScheme): load_finished: bool exception: Optional[Exception] diff --git a/config/state.py b/config/state.py index af4c743..2d1ba42 100644 --- a/config/state.py +++ b/config/state.py @@ -7,7 +7,7 @@ from typing import Mapping, Optional from constants import DEFAULT_PACKAGE_BRANCH -from .scheme import Config, ConfigLoadState, DataClass, Profile, RuntimeConfiguration +from .scheme import Config, ConfigLoadState, DictScheme, Profile, RuntimeConfiguration from .profile import PROFILE_DEFAULTS, PROFILE_DEFAULTS_DICT, resolve_profile CONFIG_DIR = appdirs.user_config_dir('kupfer') @@ -95,7 +95,7 @@ def merge_configs(conf_new: Mapping[str, dict], conf_base={}, warn_missing_defau continue logging.debug(f'Parsing config section "{outer_name}"') # check if outer_conf is a dict - if not (isinstance(outer_conf, (dict, DataClass))): + if not (isinstance(outer_conf, (dict, DictScheme))): parsed[outer_name] = outer_conf else: # init section diff --git a/config/test_config.py b/config/test_config.py index 6854cc8..b97ac1b 100644 --- a/config/test_config.py +++ b/config/test_config.py @@ -157,7 +157,7 @@ def test_config_save_modified(configstate_emptyfile: ConfigStateHolder): def get_config_scheme(data: dict[str, Any], validate=True, allow_incomplete=False) -> Config: """ helper func to ignore a false type error. - for some reason, mypy argues about DataClass.fromDict() instead of Config.fromDict() here + for some reason, mypy argues about DictScheme.fromDict() instead of Config.fromDict() here """ return Config.fromDict(data, validate=validate, allow_incomplete=allow_incomplete) # type: ignore[call-arg] diff --git a/devices/device.py b/devices/device.py index 9780a86..b53adbb 100644 --- a/devices/device.py +++ b/devices/device.py @@ -5,7 +5,7 @@ from typing import Optional from config.state import config from constants import Arch, ARCHES -from dataclass import DataClass +from dictscheme import DictScheme from distro.distro import get_kupfer_local from distro.package import LocalPackage from packages.pkgbuild import Pkgbuild, _pkgbuilds_cache, discover_pkgbuilds, get_pkgbuild_by_path, init_pkgbuilds @@ -22,7 +22,7 @@ DEVICE_DEPRECATIONS = { } -class DeviceSummary(DataClass): +class DeviceSummary(DictScheme): name: str description: str arch: str @@ -43,7 +43,7 @@ class DeviceSummary(DataClass): return separator.join([f"{color_str(name, bold=True, use_colors=colors)}: {value}" for name, value in fields.items()]) -class Device(DataClass): +class Device(DictScheme): name: str arch: Arch package: Pkgbuild diff --git a/devices/deviceinfo.py b/devices/deviceinfo.py index c8df9cf..b9acf3a 100644 --- a/devices/deviceinfo.py +++ b/devices/deviceinfo.py @@ -9,14 +9,14 @@ from typing import Any, Mapping, Optional from config.state import config from constants import Arch -from dataclass import DataClass +from dictscheme import DictScheme PMOS_ARCHES_OVERRIDES: dict[str, Arch] = { "armv7": 'armv7h', } -class DeviceInfo(DataClass): +class DeviceInfo(DictScheme): arch: Arch name: str manufacturer: str diff --git a/dataclass.py b/dictscheme.py similarity index 98% rename from dataclass.py rename to dictscheme.py index 6bd19df..25578bb 100644 --- a/dataclass.py +++ b/dictscheme.py @@ -45,7 +45,7 @@ def resolve_dict_hints(hints: Any) -> Generator[tuple[Any, ...], None, None]: continue -class DataClass(Munch): +class DictScheme(Munch): _type_hints: ClassVar[dict[str, Any]] _strip_hidden: ClassVar[bool] = False @@ -118,7 +118,7 @@ class DataClass(Munch): if not (optional and value is None): assert issubclass(target_class, Munch) # despite the above assert, mypy doesn't seem to understand target_class is a Munch here - kwargs = {'validate': validate} if issubclass(target_class, DataClass) else {} + kwargs = {'validate': validate} if issubclass(target_class, DictScheme) else {} value = target_class(value, **kwargs) # type:ignore[attr-defined] else: # print(f"nothing to do: '{key}' was already {target_class}) @@ -217,7 +217,7 @@ class DataClass(Munch): if not v: result[k] = {} continue - if isinstance(v, DataClass): + if isinstance(v, DictScheme): # pass None in sparse and strip_hidden result[k] = v.toDict(strip_hidden=strip_hidden, sparse=sparse) continue @@ -228,13 +228,13 @@ class DataClass(Munch): _subhints = {} _hints = resolve_type_hint(hints[k], [dict]) hints_flat = list(flatten_hints(_hints)) - subclass = DataClass + subclass = DictScheme for hint in hints_flat: if get_origin(hint) == dict: _valtype = get_args(hint)[1] _subhints = {n: _valtype for n in v.keys()} break - if isinstance(hint, type) and issubclass(hint, DataClass): + if isinstance(hint, type) and issubclass(hint, DictScheme): subclass = hint _subhints = hint._type_hints break diff --git a/distro/repo_config.py b/distro/repo_config.py index 34c55e6..e263bcf 100644 --- a/distro/repo_config.py +++ b/distro/repo_config.py @@ -10,7 +10,7 @@ from typing import ClassVar, Optional, Mapping, Union from config.state import config from constants import Arch, BASE_DISTROS, KUPFER_HTTPS, REPOS_CONFIG_FILE, REPOSITORIES -from dataclass import DataClass, toml_inline_dicts, TomlPreserveInlineDictEncoder +from dictscheme import DictScheme, toml_inline_dicts, TomlPreserveInlineDictEncoder from utils import sha256sum REPOS_KEY = 'repos' @@ -22,7 +22,7 @@ BASEDISTROS_KEY = 'base_distros' _current_config: Optional[ReposConfigFile] -class AbstrRepoConfig(DataClass): +class AbstrRepoConfig(DictScheme): options: Optional[dict[str, str]] _strip_hidden: ClassVar[bool] = True _sparse: ClassVar[bool] = True @@ -37,12 +37,12 @@ class RepoConfig(AbstrRepoConfig): local_only: Optional[bool] -class BaseDistro(DataClass): +class BaseDistro(DictScheme): remote_url: Optional[str] repos: dict[str, BaseDistroRepo] -class ReposConfigFile(DataClass): +class ReposConfigFile(DictScheme): remote_url: Optional[str] repos: dict[str, RepoConfig] base_distros: dict[Arch, BaseDistro] diff --git a/flavours/flavour.py b/flavours/flavour.py index c9a41a1..4272eca 100644 --- a/flavours/flavour.py +++ b/flavours/flavour.py @@ -8,12 +8,12 @@ from typing import Optional from config.state import config from constants import FLAVOUR_DESCRIPTION_PREFIX, FLAVOUR_INFO_FILE -from dataclass import DataClass +from dictscheme import DictScheme from packages.pkgbuild import discover_pkgbuilds, get_pkgbuild_by_name, init_pkgbuilds, Pkgbuild from utils import color_str -class FlavourInfo(DataClass): +class FlavourInfo(DictScheme): rootfs_size: int # rootfs size in GB description: Optional[str] @@ -21,7 +21,7 @@ class FlavourInfo(DataClass): return f'rootfs_size: {self.rootfs_size}' -class Flavour(DataClass): +class Flavour(DictScheme): name: str pkgbuild: Pkgbuild description: str @@ -53,7 +53,7 @@ class Flavour(DataClass): def get_lines(k, v, key_prefix=''): results = [] full_k = f'{key_prefix}.{k}' if key_prefix else k - if not isinstance(v, (dict, DataClass)): + if not isinstance(v, (dict, DictScheme)): results = [f'{color_str(full_k, bold=True)}: {v}'] else: for _k, _v in v.items(): diff --git a/packages/srcinfo_cache.py b/packages/srcinfo_cache.py index 112591f..5cb2373 100644 --- a/packages/srcinfo_cache.py +++ b/packages/srcinfo_cache.py @@ -9,14 +9,14 @@ from typing import Any, ClassVar, Optional from config.state import config from constants import MAKEPKG_CMD, SRCINFO_FILE, SRCINFO_METADATA_FILE, SRCINFO_INITIALISED_FILE -from dataclass import DataClass +from dictscheme import DictScheme from exec.cmd import run_cmd from utils import sha256sum SRCINFO_CHECKSUM_FILES = ['PKGBUILD', SRCINFO_FILE] -class JsonFile(DataClass): +class JsonFile(DictScheme): _filename: ClassVar[str] _relative_path: str From 74a7aeb6683bf51c0ef6a011372c4812c2410496 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Mon, 17 Apr 2023 04:42:00 +0200 Subject: [PATCH 26/82] packages/cli: cmd_update(): add enforce_wrap() --- packages/cli.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/cli.py b/packages/cli.py index e5ff9a1..341fc44 100644 --- a/packages/cli.py +++ b/packages/cli.py @@ -158,6 +158,7 @@ def cmd_update( discard_changes: bool = False, ): """Update PKGBUILDs git repo""" + enforce_wrap() init_pkgbuilds(interactive=not non_interactive, lazy=False, update=True, switch_branch=switch_branch, discard_changes=discard_changes) if init_caches: init_pkgbuild_caches(clean_src_dirs=clean_src_dirs) From 44eaf0d767187aa2264a35287b02154017edb7e5 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Mon, 17 Apr 2023 04:41:30 +0200 Subject: [PATCH 27/82] utils: add content-size to download_file --- utils.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/utils.py b/utils.py index 6cd613e..3fc157e 100644 --- a/utils.py +++ b/utils.py @@ -143,7 +143,13 @@ def download_file(path: str, url: str, update: bool = True): url_time = None if os.path.exists(path) and update: headers = requests.head(url).headers - if 'last-modified' in headers: + file_size = os.path.getsize(path) + missing = [i for i in ['Content-Length', 'last-modified'] if i not in headers] + if missing: + logging.debug(f"Headers not specified: {missing}") + if 'Content-Length' in headers and int(headers['Content-Length']) != file_size: + logging.debug(f"{path} size differs: local: {file_size}, http: {headers['Content-Length']}") + elif 'last-modified' in headers: url_time = parsedate(headers['last-modified']).astimezone() file_time = datetime.datetime.fromtimestamp(os.path.getmtime(path)).astimezone() if url_time == file_time: From fd2abd3805377fdf5d97b8cc6152ca101fb242cf Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Mon, 17 Apr 2023 04:43:07 +0200 Subject: [PATCH 28/82] exec/file: chmod(): add privileged=True, use False for get_temp_dir() --- exec/file.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/exec/file.py b/exec/file.py index 8471288..852ad48 100644 --- a/exec/file.py +++ b/exec/file.py @@ -8,7 +8,7 @@ from shutil import rmtree from tempfile import mkdtemp from typing import Optional, Union -from .cmd import run_root_cmd, elevation_noop, generate_cmd_su, wrap_in_bash, shell_quote +from .cmd import run_cmd, run_root_cmd, elevation_noop, generate_cmd_su, wrap_in_bash, shell_quote from utils import get_user_name, get_group_name @@ -41,7 +41,7 @@ def chown(path: str, user: Optional[Union[str, int]] = None, group: Optional[Uni raise Exception(f"Failed to change owner of '{path}' to '{owner}'") -def chmod(path, mode: Union[int, str] = 0o0755, force_sticky=True): +def chmod(path, mode: Union[int, str] = 0o0755, force_sticky=True, privileged: bool = True): if not isinstance(mode, str): octal = oct(mode)[2:] else: @@ -54,7 +54,7 @@ def chmod(path, mode: Union[int, str] = 0o0755, force_sticky=True): os.chmod(path, mode=octal) # type: ignore except: cmd = ["chmod", octal, path] - result = run_root_cmd(cmd) + result = run_cmd(cmd, switch_user='root' if privileged else None) assert isinstance(result, subprocess.CompletedProcess) if result.returncode: raise Exception(f"Failed to set mode of '{path}' to '{chmod}'") @@ -174,7 +174,7 @@ def symlink(source, target): def get_temp_dir(register_cleanup=True, mode: int = 0o0755): "create a new tempdir and sanitize ownership so root can access user files as god intended" t = mkdtemp() - chmod(t, mode) + chmod(t, mode, privileged=False) if register_cleanup: atexit.register(remove_file, t, recursive=True) return t From 7945a4756f7cf05558dd555d2ade73e32eb58148 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Mon, 17 Apr 2023 04:44:06 +0200 Subject: [PATCH 29/82] distro/repo: use persistent dir for repo db if RemoteRepo.cache_repo_db == True --- distro/distro.py | 41 +++++++++++++++++++++++++++++++---------- distro/repo.py | 22 +++++++++++++++------- 2 files changed, 46 insertions(+), 17 deletions(-) diff --git a/distro/distro.py b/distro/distro.py index c489b20..bbc6329 100644 --- a/distro/distro.py +++ b/distro/distro.py @@ -93,6 +93,11 @@ def get_kupfer(arch: str, url_template: str, scan: bool = False) -> Distro: scan=scan, ) assert isinstance(distro, (LocalDistro, RemoteDistro)) + if remote: + assert isinstance(distro, RemoteDistro) + for repo in distro.repos.values(): + repo.cache_repo_db = True + return distro @@ -145,32 +150,42 @@ def get_RepoInfo(arch: Arch, repo_config: AbstrRepoConfig, default_url: Optional ) -def get_base_distro(arch: Arch, scan: bool = False, unsigned: bool = True) -> RemoteDistro: +def get_base_distro(arch: Arch, scan: bool = False, unsigned: bool = True, cache_db: bool = True) -> RemoteDistro: base_distros = get_repo_config().base_distros if base_distros is None or arch not in base_distros: base_distros = REPOS_CONFIG_DEFAULT.base_distros assert base_distros - distro: BaseDistro - distro = base_distros.get(arch) # type: ignore[assignment] + distro_config: BaseDistro + distro_config = base_distros.get(arch) # type: ignore[assignment] repos = {} - for repo, repo_config in distro.repos.items(): + for repo, repo_config in distro_config.repos.items(): if unsigned: repo_config['options'] = (repo_config.get('options', None) or {}) | {'SigLevel': 'Never'} - repos[repo] = get_RepoInfo(arch, repo_config, default_url=distro.remote_url) + repos[repo] = get_RepoInfo(arch, repo_config, default_url=distro_config.remote_url) - return RemoteDistro(arch=arch, repo_infos=repos, scan=scan) + distro = RemoteDistro(arch=arch, repo_infos=repos, scan=False) + if cache_db: + for r in distro.repos.values(): + assert isinstance(r, RemoteRepo) + r.cache_repo_db = True + if scan: + distro.scan() + return distro def get_kupfer_distro( arch: Arch, location: DistroLocation, scan: bool = False, + cache_db: bool = True, ) -> Distro: global _kupfer_https, _kupfer_local, _kupfer_local_chroots cls: type[Distro] cache: Mapping[str, Distro] repo_config = get_repo_config() + remote = False if location == DistroLocation.REMOTE: + remote = True cache = _kupfer_https default_url = repo_config.remote_url or KUPFER_HTTPS repos = {repo: get_RepoInfo(arch, conf, default_url) for repo, conf in repo_config.repos.items() if not conf.local_only} @@ -200,10 +215,16 @@ def get_kupfer_distro( distro = cls( arch=arch, repo_infos=repos, - scan=scan, + scan=False, ) assert isinstance(distro, (LocalDistro, RemoteDistro)) cache[arch] = distro + if remote and cache_db: + assert isinstance(distro, RemoteDistro) + for r in distro.repos.values(): + r.cache_repo_db = True + if scan: + distro.scan() return distro item: Distro = cache[arch] if scan and not item.is_scanned(): @@ -211,13 +232,13 @@ def get_kupfer_distro( return item -def get_kupfer_https(arch: Arch, scan: bool = False) -> RemoteDistro: - d = get_kupfer_distro(arch, location=DistroLocation.REMOTE, scan=scan) +def get_kupfer_https(arch: Arch, scan: bool = False, cache_db: bool = True) -> RemoteDistro: + d = get_kupfer_distro(arch, location=DistroLocation.REMOTE, scan=scan, cache_db=cache_db) assert isinstance(d, RemoteDistro) return d -def get_kupfer_local(arch: Optional[Arch] = None, in_chroot: bool = True, scan: bool = False) -> LocalDistro: +def get_kupfer_local(arch: Optional[Arch] = None, scan: bool = False, in_chroot: bool = True) -> LocalDistro: arch = arch or config.runtime.arch assert arch location = DistroLocation.CHROOT if in_chroot else DistroLocation.LOCAL diff --git a/distro/repo.py b/distro/repo.py index b63c293..703d067 100644 --- a/distro/repo.py +++ b/distro/repo.py @@ -2,11 +2,13 @@ from copy import deepcopy import logging import os import tarfile -import tempfile -import urllib.request from typing import Generic, TypeVar +from config.state import config +from exec.file import get_temp_dir +from utils import download_file + from .package import BinaryPackage, LocalPackage, RemotePackage BinaryPackageType = TypeVar('BinaryPackageType', bound=BinaryPackage) @@ -112,6 +114,11 @@ class LocalRepo(Repo[LocalPackage]): class RemoteRepo(Repo[RemotePackage]): + cache_repo_db: bool + + def __init__(self, *kargs, cache_repo_db: bool = False, **kwargs): + self.cache_repo_db = cache_repo_db + super().__init__(*kargs, **kwargs) def _parse_desc(self, desc_text: str) -> RemotePackage: return RemotePackage.parse_desc(desc_text, resolved_repo_url=self.resolved_url) @@ -119,8 +126,9 @@ class RemoteRepo(Repo[RemotePackage]): def acquire_db_file(self) -> str: uri = f'{self.resolved_url}/{self.name}.db' logging.info(f'Downloading repo file from {uri}') - with urllib.request.urlopen(uri) as request: - fd, path = tempfile.mkstemp() - with open(fd, 'wb') as writable: - writable.write(request.read()) - return path + assert self.arch and self.name, f"repo has incomplete information: {self.name=}, {self.arch=}" + path = get_temp_dir() if not self.cache_repo_db else os.path.join(config.get_path('pacman'), 'repo_dbs', self.arch) + os.makedirs(path, exist_ok=True) + repo_file = f'{path}/{self.name}.tar.gz' + download_file(repo_file, uri, update=True) + return repo_file From dbc512ee3ff29ebd8bea21c9f419f307c563617e Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Mon, 24 Apr 2023 01:40:10 +0200 Subject: [PATCH 30/82] packages/cli: cmd_check(): add noextract PKGBUILD field --- packages/cli.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/cli.py b/packages/cli.py index 341fc44..2e72be0 100644 --- a/packages/cli.py +++ b/packages/cli.py @@ -377,6 +377,7 @@ def cmd_check(paths): commit_key: is_git_package, source_key: False, sha256sums_key: False, + 'noextract': False, } pkgbuild_path = os.path.join(config.get_path('pkgbuilds'), package.path, 'PKGBUILD') with open(pkgbuild_path, 'r') as file: From 68154467f3c9bfbc017a15ae89f3806316359d0a Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Mon, 24 Apr 2023 15:34:02 +0200 Subject: [PATCH 31/82] distro/repo_config: reformat with yapf 0.33 --- distro/repo_config.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/distro/repo_config.py b/distro/repo_config.py index e263bcf..5f32d2b 100644 --- a/distro/repo_config.py +++ b/distro/repo_config.py @@ -110,14 +110,18 @@ REPOS_CONFIG_DEFAULT = ReposConfigFile({ 'kupfer_local': REPO_DEFAULTS | { LOCALONLY_KEY: True }, - **{r: deepcopy(REPO_DEFAULTS) for r in REPOSITORIES}, + **{ + r: deepcopy(REPO_DEFAULTS) for r in REPOSITORIES + }, }, BASEDISTROS_KEY: { arch: { REMOTEURL_KEY: None, - 'repos': {k: { - 'remote_url': v - } for k, v in arch_def['repos'].items()}, + 'repos': { + k: { + 'remote_url': v + } for k, v in arch_def['repos'].items() + }, } for arch, arch_def in BASE_DISTROS.items() }, }) From 08285a7931bcf3322a8291405887f2a2787b247b Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Mon, 24 Apr 2023 17:15:51 +0200 Subject: [PATCH 32/82] packages/pkgbuild: fix null deref in __repr__() --- packages/pkgbuild.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/pkgbuild.py b/packages/pkgbuild.py index b61884a..65722ba 100644 --- a/packages/pkgbuild.py +++ b/packages/pkgbuild.py @@ -201,8 +201,8 @@ class Pkgbuild(PackageInfo): return ','.join([ 'Pkgbuild(' + self.name, repr(self.path), - self.version + ("🔄" if self.sources_refreshed else ""), - self.mode + ')', + str(self.version) + ("🔄" if self.sources_refreshed else ""), + repr(self.mode) + ')', ]) def names(self) -> list[str]: From 604f123067c76e4fd84647e006217a4a990e326d Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Fri, 6 Jan 2023 04:22:46 +0100 Subject: [PATCH 33/82] image/fastboot: flash_image(): add optional sparse_size parameter --- image/fastboot.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/image/fastboot.py b/image/fastboot.py index a2ce7a6..d8816bf 100644 --- a/image/fastboot.py +++ b/image/fastboot.py @@ -1,6 +1,8 @@ import logging import subprocess +from typing import Optional + def fastboot_erase_dtbo(): logging.info("Fastboot: Erasing DTBO") @@ -14,10 +16,11 @@ def fastboot_erase_dtbo(): ) -def fastboot_flash(partition, file): +def fastboot_flash(partition: str, file: str, sparse_size: Optional[str] = None): logging.info(f"Fastboot: Flashing {file} to {partition}") result = subprocess.run([ 'fastboot', + *(['-S', sparse_size] if sparse_size is not None else []), 'flash', partition, file, From 8a266f914956fb5231dea38db601766ff3acb6dc Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Fri, 6 Jan 2023 04:37:23 +0100 Subject: [PATCH 34/82] image/fastboot: use exec.cmd.run_cmd() for loggability --- image/fastboot.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/image/fastboot.py b/image/fastboot.py index d8816bf..937b348 100644 --- a/image/fastboot.py +++ b/image/fastboot.py @@ -1,12 +1,12 @@ import logging -import subprocess +from exec.cmd import run_cmd, CompletedProcess from typing import Optional def fastboot_erase_dtbo(): logging.info("Fastboot: Erasing DTBO") - subprocess.run( + run_cmd( [ 'fastboot', 'erase', @@ -18,23 +18,25 @@ def fastboot_erase_dtbo(): def fastboot_flash(partition: str, file: str, sparse_size: Optional[str] = None): logging.info(f"Fastboot: Flashing {file} to {partition}") - result = subprocess.run([ + result = run_cmd([ 'fastboot', *(['-S', sparse_size] if sparse_size is not None else []), 'flash', partition, file, ]) + assert isinstance(result, CompletedProcess) if result.returncode != 0: raise Exception(f'Failed to flash {file}') def fastboot_boot(file): logging.info(f"Fastboot: booting {file}") - result = subprocess.run([ + result = run_cmd([ 'fastboot', 'boot', file, ]) + assert isinstance(result, CompletedProcess) if result.returncode != 0: raise Exception(f'Failed to boot {file} using fastboot') From 69b7ea9db26831fd9b57eba2ad224f5c378df218 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Fri, 6 Jan 2023 04:38:05 +0100 Subject: [PATCH 35/82] image/flash: implement fastboot rootfs image flashing, add --no-shrink Use fastboot by default instead of jumpdrive, respecting the deviceinfo --- image/flash.py | 116 ++++++++++++++++++++++++++++++++----------------- 1 file changed, 75 insertions(+), 41 deletions(-) diff --git a/image/flash.py b/image/flash.py index 4351d3d..f68a1d8 100644 --- a/image/flash.py +++ b/image/flash.py @@ -4,7 +4,7 @@ import click from typing import Optional -from constants import FLASH_PARTS, LOCATIONS +from constants import FLASH_PARTS, LOCATIONS, FASTBOOT, JUMPDRIVE from exec.cmd import run_root_cmd from exec.file import get_temp_dir from devices.device import get_profile_device @@ -13,28 +13,76 @@ from flavours.cli import profile_option from wrapper import enforce_wrap from .fastboot import fastboot_flash -from .image import dd_image, partprobe, shrink_fs, losetup_rootfs_image, losetup_destroy, dump_aboot, dump_lk2nd, dump_qhypstub, get_image_name, get_image_path +from .image import dd_image, dump_aboot, dump_lk2nd, dump_qhypstub, get_image_path, losetup_destroy, losetup_rootfs_image, partprobe, shrink_fs ABOOT = FLASH_PARTS['ABOOT'] LK2ND = FLASH_PARTS['LK2ND'] QHYPSTUB = FLASH_PARTS['QHYPSTUB'] ROOTFS = FLASH_PARTS['ROOTFS'] +DD = 'dd' + +FLASH_METHODS = [FASTBOOT, JUMPDRIVE, DD] + + +def find_jumpdrive(location: str) -> str: + if location not in LOCATIONS: + raise Exception(f'Invalid location {location}. Choose one of {", ".join(LOCATIONS)}') + dir = '/dev/disk/by-id' + for file in os.listdir(dir): + sanitized_file = file.replace('-', '').replace('_', '').lower() + if f'jumpdrive{location.split("-")[0]}' in sanitized_file: + return os.path.realpath(os.path.join(dir, file)) + raise Exception('Unable to discover Jumpdrive') + + +def test_blockdev(path: str): + partprobe(path) + result = run_root_cmd(['lsblk', path, '-o', 'SIZE'], capture_output=True) + if result.returncode != 0: + raise Exception(f'Failed to lsblk {path}') + if result.stdout == b'SIZE\n 0B\n': + raise Exception(f'Disk {path} has a size of 0B. That probably means it is not available (e.g. no' + 'microSD inserted or no microSD card slot installed in the device) or corrupt or defect') + + +def prepare_minimal_image(source_path: str, sector_size: int) -> str: + minimal_image_dir = get_temp_dir(register_cleanup=True) + minimal_image_path = os.path.join(minimal_image_dir, f'minimal-{os.path.basename(source_path)}') + + shutil.copyfile(source_path, minimal_image_path) + + loop_device = losetup_rootfs_image(minimal_image_path, sector_size) + partprobe(loop_device) + shrink_fs(loop_device, minimal_image_path, sector_size) + losetup_destroy(loop_device) + return minimal_image_path + @click.command(name='flash') @profile_option +@click.option('-m', '--method', type=click.Choice(FLASH_METHODS)) +@click.option('--split-size', help='Chunk size when splitting the image into sparse files via fastboot') +@click.option('--shrink/--no-shrink', is_flag=True, default=True, help="Don't copy and shrink the image file to minimal size") @click.argument('what', type=click.Choice(list(FLASH_PARTS.values()))) @click.argument('location', type=str, required=False) -def cmd_flash(what: str, location: str, profile: Optional[str] = None): +def cmd_flash( + what: str, + location: str, + method: Optional[str] = None, + split_size: Optional[str] = None, + profile: Optional[str] = None, + shrink: bool = True, +): """Flash a partition onto a device. `location` takes either a path to a block device or one of emmc, sdcard""" enforce_wrap() device = get_profile_device(profile) flavour = get_profile_flavour(profile).name - device_image_name = get_image_name(device, flavour) device_image_path = get_image_path(device, flavour) deviceinfo = device.parse_deviceinfo() sector_size = deviceinfo.flash_pagesize + method = method or deviceinfo.flash_method if not sector_size: raise Exception(f"Device {device.name} has no flash_pagesize specified") @@ -42,46 +90,32 @@ def cmd_flash(what: str, location: str, profile: Optional[str] = None): raise Exception(f'Unknown what "{what}", must be one of {", ".join(FLASH_PARTS.values())}') if what == ROOTFS: - if location is None: - raise Exception(f'You need to specify a location to flash {what} to') - path = '' - if location.startswith("/dev/"): - path = location + if method not in FLASH_METHODS: + raise Exception(f"Flash method {method} not supported!") + if not location: + raise Exception(f'You need to specify a location to flash {what} to') + path = '' + image_path = prepare_minimal_image(device_image_path, sector_size) if shrink else device_image_path + if method == FASTBOOT: + fastboot_flash( + partition=location, + file=image_path, + sparse_size='100M', # TODO: make configurable + ) + elif method in [JUMPDRIVE, DD]: + if method == DD or location.startswith("/") or (location not in LOCATIONS and os.path.exists(location)): + path = location + elif method == JUMPDRIVE: + path = find_jumpdrive(location) + test_blockdev(path) + if dd_image(input=image_path, output=path).returncode != 0: + raise Exception(f'Failed to flash {image_path} to {path}') else: - if location not in LOCATIONS: - raise Exception(f'Invalid location {location}. Choose one of {", ".join(LOCATIONS)}') - - dir = '/dev/disk/by-id' - for file in os.listdir(dir): - sanitized_file = file.replace('-', '').replace('_', '').lower() - if f'jumpdrive{location.split("-")[0]}' in sanitized_file: - path = os.path.realpath(os.path.join(dir, file)) - partprobe(path) - result = run_root_cmd(['lsblk', path, '-o', 'SIZE'], capture_output=True) - if result.returncode != 0: - raise Exception(f'Failed to lsblk {path}') - if result.stdout == b'SIZE\n 0B\n': - raise Exception(f'Disk {path} has a size of 0B. That probably means it is not available (e.g. no' - 'microSD inserted or no microSD card slot installed in the device) or corrupt or defect') - if path == '': - raise Exception('Unable to discover Jumpdrive') - - minimal_image_dir = get_temp_dir(register_cleanup=True) - minimal_image_path = os.path.join(minimal_image_dir, f'minimal-{device_image_name}') - - shutil.copyfile(device_image_path, minimal_image_path) - - loop_device = losetup_rootfs_image(minimal_image_path, sector_size) - partprobe(loop_device) - shrink_fs(loop_device, minimal_image_path, sector_size) - losetup_destroy(loop_device) - - result = dd_image(input=minimal_image_path, output=path) - - if result.returncode != 0: - raise Exception(f'Failed to flash {minimal_image_path} to {path}') + raise Exception(f'Unhandled flash method "{method}" for "{what}"') else: + if method and method != FASTBOOT: + raise Exception(f'Flashing "{what}" with method "{method}" not supported, try no parameter or "{FASTBOOT}"') loop_device = losetup_rootfs_image(device_image_path, sector_size) if what == ABOOT: path = dump_aboot(f'{loop_device}p1') From 6648a77822b7c7b28c06d19d7d8d773d36d76508 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Mon, 9 Jan 2023 05:47:24 +0100 Subject: [PATCH 36/82] image/cli: add --sector-size option --- image/boot.py | 5 +-- image/flash.py | 4 ++- image/image.py | 87 +++++++++++++++++++++++++++++++++----------------- 3 files changed, 64 insertions(+), 32 deletions(-) diff --git a/image/boot.py b/image/boot.py index 07a0f35..a24e2e5 100644 --- a/image/boot.py +++ b/image/boot.py @@ -24,13 +24,14 @@ TYPES = [LK2ND, JUMPDRIVE, ABOOT] @click.command(name='boot') @profile_option @click.argument('type', required=False, default=ABOOT, type=click.Choice(TYPES)) -def cmd_boot(type: str, profile: Optional[str] = None): +@click.option('-b', '--sector-size', type=int, help="Override the device's sector size", default=None) +def cmd_boot(type: str, profile: Optional[str] = None, sector_size: Optional[int] = None): """Boot JumpDrive or the Kupfer aboot image. Erases Android DTBO in the process.""" enforce_wrap() device = get_profile_device(profile) flavour = get_profile_flavour(profile).name deviceinfo = device.parse_deviceinfo() - sector_size = deviceinfo.flash_pagesize + sector_size = sector_size or deviceinfo.flash_pagesize if not sector_size: raise Exception(f"Device {device.name} has no flash_pagesize specified") image_path = get_image_path(device, flavour) diff --git a/image/flash.py b/image/flash.py index f68a1d8..591bc9e 100644 --- a/image/flash.py +++ b/image/flash.py @@ -64,6 +64,7 @@ def prepare_minimal_image(source_path: str, sector_size: int) -> str: @click.option('-m', '--method', type=click.Choice(FLASH_METHODS)) @click.option('--split-size', help='Chunk size when splitting the image into sparse files via fastboot') @click.option('--shrink/--no-shrink', is_flag=True, default=True, help="Don't copy and shrink the image file to minimal size") +@click.option('-b', '--sector-size', type=int, help="Override the device's sector size", default=None) @click.argument('what', type=click.Choice(list(FLASH_PARTS.values()))) @click.argument('location', type=str, required=False) def cmd_flash( @@ -73,6 +74,7 @@ def cmd_flash( split_size: Optional[str] = None, profile: Optional[str] = None, shrink: bool = True, + sector_size: Optional[int] = None, ): """Flash a partition onto a device. `location` takes either a path to a block device or one of emmc, sdcard""" enforce_wrap() @@ -81,7 +83,7 @@ def cmd_flash( device_image_path = get_image_path(device, flavour) deviceinfo = device.parse_deviceinfo() - sector_size = deviceinfo.flash_pagesize + sector_size = sector_size or deviceinfo.flash_pagesize method = method or deviceinfo.flash_method if not sector_size: raise Exception(f"Device {device.name} has no flash_pagesize specified") diff --git a/image/image.py b/image/image.py index bdc3200..84b265d 100644 --- a/image/image.py +++ b/image/image.py @@ -342,33 +342,61 @@ def cmd_image(): """Build, flash and boot device images""" +sectorsize_option = click.option( + '-b', + '--sector-size', + help="Override the device's sector size", + type=int, + default=None, +) + + @cmd_image.command(name='build') @click.argument('profile_name', required=False) -@click.option('--local-repos/--no-local-repos', - '-l/-L', - default=True, - show_default=True, - help='Whether to use local package repos at all or only use HTTPS repos.') -@click.option('--build-pkgs/--no-build-pkgs', - '-p/-P', - default=True, - show_default=True, - help='Whether to build missing/outdated local packages if local repos are enabled.') -@click.option('--no-download-pkgs', - is_flag=True, - default=False, - help='Disable trying to download packages instead of building if building is enabled.') -@click.option('--block-target', type=click.Path(), default=None, help='Override the block device file to write the final image to') -@click.option('--skip-part-images', - is_flag=True, - default=False, - help='Skip creating image files for the partitions and directly work on the target block device.') -def cmd_build(profile_name: Optional[str] = None, - local_repos: bool = True, - build_pkgs: bool = True, - no_download_pkgs=False, - block_target: Optional[str] = None, - skip_part_images: bool = False): +@click.option( + '--local-repos/--no-local-repos', + '-l/-L', + help='Whether to use local package repos at all or only use HTTPS repos.', + default=True, + show_default=True, + is_flag=True, +) +@click.option( + '--build-pkgs/--no-build-pkgs', + '-p/-P', + help='Whether to build missing/outdated local packages if local repos are enabled.', + default=True, + show_default=True, + is_flag=True, +) +@click.option( + '--no-download-pkgs', + help='Disable trying to download packages instead of building if building is enabled.', + default=False, + is_flag=True, +) +@click.option( + '--block-target', + help='Override the block device file to write the final image to', + type=click.Path(), + default=None, +) +@click.option( + '--skip-part-images', + help='Skip creating image files for the partitions and directly work on the target block device.', + default=False, + is_flag=True, +) +@sectorsize_option +def cmd_build( + profile_name: Optional[str] = None, + local_repos: bool = True, + build_pkgs: bool = True, + no_download_pkgs=False, + block_target: Optional[str] = None, + sector_size: Optional[int] = None, + skip_part_images: bool = False, +): """ Build a device image. @@ -400,7 +428,7 @@ def cmd_build(profile_name: Optional[str] = None, build_packages(pkgbuilds, arch, try_download=not no_download_pkgs) deviceinfo = device.parse_deviceinfo() - sector_size = deviceinfo.flash_pagesize + sector_size = sector_size or deviceinfo.flash_pagesize if not sector_size: raise Exception(f"Device {device.name} has no flash_pagesize specified") @@ -453,9 +481,10 @@ def cmd_build(profile_name: Optional[str] = None, @cmd_image.command(name='inspect') -@click.option('--shell', '-s', help="Open a shell in the image's rootfs", is_flag=True) +@click.option('--shell', '-s', is_flag=True) +@sectorsize_option @click.argument('profile', required=False) -def cmd_inspect(profile: Optional[str] = None, shell: bool = False): +def cmd_inspect(profile: Optional[str] = None, shell: bool = False, sector_size: Optional[int] = None): """Loop-mount the device image for inspection.""" config.enforce_profile_device_set() config.enforce_profile_flavour_set() @@ -464,7 +493,7 @@ def cmd_inspect(profile: Optional[str] = None, shell: bool = False): arch = device.arch flavour = get_profile_flavour(profile).name deviceinfo = device.parse_deviceinfo() - sector_size = deviceinfo.flash_pagesize + sector_size = sector_size or deviceinfo.flash_pagesize if not sector_size: raise Exception(f"Device {device.name} has no flash_pagesize specified") From 3ac8fc06894e42ab06f7a2a7c1dac04e68f3c311 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Wed, 26 Apr 2023 13:26:16 +0200 Subject: [PATCH 37/82] image/flash: actually use --split-size --- image/flash.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/image/flash.py b/image/flash.py index 591bc9e..afa8f60 100644 --- a/image/flash.py +++ b/image/flash.py @@ -103,7 +103,7 @@ def cmd_flash( fastboot_flash( partition=location, file=image_path, - sparse_size='100M', # TODO: make configurable + sparse_size=split_size if split_size is not None else '100M', ) elif method in [JUMPDRIVE, DD]: if method == DD or location.startswith("/") or (location not in LOCATIONS and os.path.exists(location)): From 4ba5f87f1ef9d8d39ef50674bd6e73237e6169d4 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sat, 29 Apr 2023 22:29:48 +0200 Subject: [PATCH 38/82] image: factor out get_fs_size() from shrink_fs() --- image/image.py | 44 ++++++++++++++++++++++++++++++++------------ 1 file changed, 32 insertions(+), 12 deletions(-) diff --git a/image/image.py b/image/image.py index 84b265d..a6e73e9 100644 --- a/image/image.py +++ b/image/image.py @@ -44,10 +44,32 @@ def partprobe(device: str): return run_root_cmd(['partprobe', device]) +def bytes_to_sectors(b: int, sector_size: int, round_up: bool = True): + sectors, rest = divmod(b, sector_size) + if rest and round_up: + sectors += 1 + return sectors + + +def get_fs_size(partition: str) -> tuple[int, int]: + blocks_cmd = run_root_cmd(['dumpe2fs', '-h', partition], env={"LC_ALL": "C"}, capture_output=True) + if blocks_cmd.returncode != 0: + logging.debug(f"dumpe2fs stdout:\n: {blocks_cmd.stdout}") + logging.debug(f"dumpe2fs stderr:\n {blocks_cmd.stderr}") + raise Exception(f'Failed to detect new filesystem size of {partition}') + blocks_text = blocks_cmd.stdout.decode('utf-8') if blocks_cmd.stdout else '' + try: + fs_blocks = int(re.search('\\nBlock count:[ ]+([0-9]+)\\n', blocks_text, flags=re.MULTILINE).group(1)) # type: ignore[union-attr] + fs_block_size = int(re.search('\\nBlock size:[ ]+([0-9]+)\\n', blocks_text).group(1)) # type: ignore[union-attr] + except Exception as ex: + logging.debug(f"dumpe2fs stdout:\n {blocks_text}") + logging.debug(f"dumpe2fs stderr:\n: {blocks_cmd.stderr}") + logging.info("Failed to scrape block size and count from dumpe2fs:", ex) + raise ex + return fs_blocks, fs_block_size + + def shrink_fs(loop_device: str, file: str, sector_size: int): - # 8: 512 bytes sectors - # 1: 4096 bytes sectors - sectors_blocks_factor = 4096 // sector_size partprobe(loop_device) logging.debug(f"Checking filesystem at {loop_device}p2") result = run_root_cmd(['e2fsck', '-fy', f'{loop_device}p2']) @@ -55,18 +77,16 @@ def shrink_fs(loop_device: str, file: str, sector_size: int): # https://man7.org/linux/man-pages/man8/e2fsck.8.html#EXIT_CODE raise Exception(f'Failed to e2fsck {loop_device}p2 with exit code {result.returncode}') - logging.debug(f'Shrinking filesystem at {loop_device}p2') - result = run_root_cmd(['resize2fs', '-M', f'{loop_device}p2'], capture_output=True) + logging.info(f'Shrinking filesystem at {loop_device}p2') + result = run_root_cmd(['resize2fs', '-M', f'{loop_device}p2']) if result.returncode != 0: - print(result.stdout) - print(result.stderr) raise Exception(f'Failed to resize2fs {loop_device}p2') - logging.debug(f'Finding end block of shrunken filesystem on {loop_device}p2') - blocks = int(re.search('is now [0-9]+', result.stdout.decode('utf-8')).group(0).split(' ')[2]) # type: ignore - sectors = blocks * sectors_blocks_factor + logging.debug(f'Reading size of shrunken filesystem on {loop_device}p2') + fs_blocks, fs_block_size = get_fs_size(f'{loop_device}p2') + sectors = bytes_to_sectors(fs_blocks * fs_block_size, sector_size) - logging.debug(f'Shrinking partition at {loop_device}p2 to {sectors} sectors') + logging.info(f'Shrinking partition at {loop_device}p2 to {sectors} sectors ({sectors * sector_size} bytes)') child_proccess = subprocess.Popen( generate_cmd_su(['fdisk', '-b', str(sector_size), loop_device], switch_user='root'), # type: ignore stdin=subprocess.PIPE, @@ -92,7 +112,7 @@ def shrink_fs(loop_device: str, file: str, sector_size: int): if returncode > 1: raise Exception(f'Failed to shrink partition size of {loop_device}p2 with fdisk') - partprobe(loop_device) + partprobe(loop_device).check_returncode() logging.debug(f'Finding end sector of partition at {loop_device}p2') result = run_root_cmd(['fdisk', '-b', str(sector_size), '-l', loop_device], capture_output=True) From 33e1214aef22a2d8c0ac5eedadd9d35bfe732a36 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sun, 30 Apr 2023 03:24:17 +0200 Subject: [PATCH 39/82] image/fastboot: add --confirm option and generalize fastboot_erase{_dtbo,}() --- image/boot.py | 33 +++++++++++++++++++++-------- image/fastboot.py | 53 +++++++++++++++++++++++++++++++++-------------- image/flash.py | 17 +++++++++++---- 3 files changed, 75 insertions(+), 28 deletions(-) diff --git a/image/boot.py b/image/boot.py index a24e2e5..73ea6fd 100644 --- a/image/boot.py +++ b/image/boot.py @@ -12,28 +12,42 @@ from flavours.flavour import get_profile_flavour from flavours.cli import profile_option from wrapper import enforce_wrap -from .fastboot import fastboot_boot, fastboot_erase_dtbo +from .fastboot import fastboot_boot, fastboot_erase from .image import get_device_name, losetup_rootfs_image, get_image_path, dump_aboot, dump_lk2nd LK2ND = FLASH_PARTS['LK2ND'] ABOOT = FLASH_PARTS['ABOOT'] -TYPES = [LK2ND, JUMPDRIVE, ABOOT] +BOOT_TYPES = [LK2ND, JUMPDRIVE, ABOOT] @click.command(name='boot') @profile_option -@click.argument('type', required=False, default=ABOOT, type=click.Choice(TYPES)) +@click.argument('type', required=False, default=ABOOT, type=click.Choice(BOOT_TYPES)) @click.option('-b', '--sector-size', type=int, help="Override the device's sector size", default=None) -def cmd_boot(type: str, profile: Optional[str] = None, sector_size: Optional[int] = None): +@click.option( + '--erase-dtbo/--no-erase-dtbo', + is_flag=True, + default=True, + show_default=True, + help="Erase the DTBO partition before flashing", +) +@click.option('--confirm', is_flag=True, help="Ask for confirmation before executing fastboot commands") +def cmd_boot( + type: str, + profile: Optional[str] = None, + sector_size: Optional[int] = None, + erase_dtbo: bool = True, + confirm: bool = False, +): """Boot JumpDrive or the Kupfer aboot image. Erases Android DTBO in the process.""" enforce_wrap() device = get_profile_device(profile) flavour = get_profile_flavour(profile).name deviceinfo = device.parse_deviceinfo() - sector_size = sector_size or deviceinfo.flash_pagesize + sector_size = sector_size or device.get_image_sectorsize_default() if not sector_size: - raise Exception(f"Device {device.name} has no flash_pagesize specified") + raise Exception(f"Device {device.name} has no rootfs_image_sector_size specified") image_path = get_image_path(device, flavour) strategy = deviceinfo.flash_method if not strategy: @@ -54,7 +68,8 @@ def cmd_boot(type: str, profile: Optional[str] = None, sector_size: Optional[int path = dump_aboot(loop_device + 'p1') else: raise Exception(f'Unknown boot image type {type}') - fastboot_erase_dtbo() - fastboot_boot(path) + if erase_dtbo: + fastboot_erase('dtbo', confirm=confirm) + fastboot_boot(path, confirm=confirm) else: - raise Exception(f"Unknown flash strategy {strategy} for device {device.name}") + raise Exception(f'Unsupported flash strategy "{strategy}" for device {device.name}') diff --git a/image/fastboot.py b/image/fastboot.py index 937b348..cbe7ec6 100644 --- a/image/fastboot.py +++ b/image/fastboot.py @@ -1,42 +1,65 @@ +import click import logging from exec.cmd import run_cmd, CompletedProcess from typing import Optional -def fastboot_erase_dtbo(): - logging.info("Fastboot: Erasing DTBO") +def confirm_cmd(cmd: list[str], color='green', default=True, msg='Really execute fastboot cmd?') -> bool: + return click.confirm( + f'{click.style(msg, fg=color, bold=True)} {" ".join(cmd)}', + default=default, + abort=False, + ) + + +def fastboot_erase(target: str, confirm: bool = False): + if not target: + raise Exception(f"No fastboot erase target specified: {repr(target)}") + cmd = [ + 'fastboot', + 'erase', + target, + ] + if confirm: + if not confirm_cmd(cmd, msg=f'Really erase fastboot "{target}" partition?', color='yellow'): + raise Exception("user aborted") + logging.info(f"Fastboot: Erasing {target}") run_cmd( - [ - 'fastboot', - 'erase', - 'dtbo', - ], + cmd, capture_output=True, ) -def fastboot_flash(partition: str, file: str, sparse_size: Optional[str] = None): - logging.info(f"Fastboot: Flashing {file} to {partition}") - result = run_cmd([ +def fastboot_flash(partition: str, file: str, sparse_size: Optional[str] = None, confirm: bool = False): + cmd = [ 'fastboot', *(['-S', sparse_size] if sparse_size is not None else []), 'flash', partition, file, - ]) + ] + if confirm: + if not confirm_cmd(cmd): + raise Exception("user aborted") + logging.info(f"Fastboot: Flashing {file} to {partition}") + result = run_cmd(cmd) assert isinstance(result, CompletedProcess) if result.returncode != 0: raise Exception(f'Failed to flash {file}') -def fastboot_boot(file): - logging.info(f"Fastboot: booting {file}") - result = run_cmd([ +def fastboot_boot(file, confirm: bool = False): + cmd = [ 'fastboot', 'boot', file, - ]) + ] + if confirm: + if not confirm_cmd(cmd): + raise Exception("user aborted") + logging.info(f"Fastboot: booting {file}") + result = run_cmd(cmd) assert isinstance(result, CompletedProcess) if result.returncode != 0: raise Exception(f'Failed to boot {file} using fastboot') diff --git a/image/flash.py b/image/flash.py index afa8f60..da8c9ad 100644 --- a/image/flash.py +++ b/image/flash.py @@ -63,8 +63,9 @@ def prepare_minimal_image(source_path: str, sector_size: int) -> str: @profile_option @click.option('-m', '--method', type=click.Choice(FLASH_METHODS)) @click.option('--split-size', help='Chunk size when splitting the image into sparse files via fastboot') -@click.option('--shrink/--no-shrink', is_flag=True, default=True, help="Don't copy and shrink the image file to minimal size") +@click.option('--shrink/--no-shrink', is_flag=True, default=True, help="Copy and shrink the image file to minimal size") @click.option('-b', '--sector-size', type=int, help="Override the device's sector size", default=None) +@click.option('--confirm', is_flag=True, help="Ask for confirmation before executing fastboot commands") @click.argument('what', type=click.Choice(list(FLASH_PARTS.values()))) @click.argument('location', type=str, required=False) def cmd_flash( @@ -75,6 +76,7 @@ def cmd_flash( profile: Optional[str] = None, shrink: bool = True, sector_size: Optional[int] = None, + confirm: bool = False, ): """Flash a partition onto a device. `location` takes either a path to a block device or one of emmc, sdcard""" enforce_wrap() @@ -91,6 +93,12 @@ def cmd_flash( if what not in FLASH_PARTS.values(): raise Exception(f'Unknown what "{what}", must be one of {", ".join(FLASH_PARTS.values())}') + if location and location.startswith('aboot'): + raise Exception("You're trying to flash something to your aboot partition, " + "which contains the android bootloader itself.\n" + "This will brick your phone and is not what you want.\n" + 'Aborting.\nDid you mean "boot"?') + if what == ROOTFS: path = '' if method not in FLASH_METHODS: @@ -104,6 +112,7 @@ def cmd_flash( partition=location, file=image_path, sparse_size=split_size if split_size is not None else '100M', + confirm=confirm, ) elif method in [JUMPDRIVE, DD]: if method == DD or location.startswith("/") or (location not in LOCATIONS and os.path.exists(location)): @@ -121,12 +130,12 @@ def cmd_flash( loop_device = losetup_rootfs_image(device_image_path, sector_size) if what == ABOOT: path = dump_aboot(f'{loop_device}p1') - fastboot_flash('boot', path) + fastboot_flash(location or 'boot', path, confirm=confirm) elif what == LK2ND: path = dump_lk2nd(f'{loop_device}p1') - fastboot_flash('lk2nd', path) + fastboot_flash(location or 'lk2nd', path, confirm=confirm) elif what == QHYPSTUB: path = dump_qhypstub(f'{loop_device}p1') - fastboot_flash('qhypstub', path) + fastboot_flash(location or 'qhypstub', path, confirm=confirm) else: raise Exception(f'Unknown what "{what}", this must be a bug in kupferbootstrap!') From edcad72f7abbd0a34a62f69380357cedd9b8a554 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sun, 30 Apr 2023 03:29:52 +0200 Subject: [PATCH 40/82] image: use correct deviceinfo value for device sector size --- devices/device.py | 12 ++++++++++-- devices/deviceinfo.py | 22 +++++++++++++--------- dictscheme.py | 3 ++- image/flash.py | 4 +--- image/image.py | 43 +++++++++++++++++++------------------------ 5 files changed, 45 insertions(+), 39 deletions(-) diff --git a/devices/device.py b/devices/device.py index b53adbb..ce83d01 100644 --- a/devices/device.py +++ b/devices/device.py @@ -11,7 +11,7 @@ from distro.package import LocalPackage from packages.pkgbuild import Pkgbuild, _pkgbuilds_cache, discover_pkgbuilds, get_pkgbuild_by_path, init_pkgbuilds from utils import read_files_from_tar, color_str -from .deviceinfo import DeviceInfo, parse_deviceinfo +from .deviceinfo import DEFAULT_IMAGE_SECTOR_SIZE, DeviceInfo, parse_deviceinfo DEVICE_DEPRECATIONS = { "oneplus-enchilada": "sdm845-oneplus-enchilada", @@ -69,7 +69,7 @@ class Device(DictScheme): result["package_path"] = self.package.path if self.package else None return DeviceSummary(result) - def parse_deviceinfo(self, try_download: bool = True, lazy: bool = True): + def parse_deviceinfo(self, try_download: bool = True, lazy: bool = True) -> DeviceInfo: if not lazy or 'deviceinfo' not in self or self.deviceinfo is None: # avoid import loop from packages.build import check_package_version_built @@ -96,8 +96,16 @@ class Device(DictScheme): assert info.arch assert info.arch == self.arch self['deviceinfo'] = info + assert self.deviceinfo return self.deviceinfo + def get_image_sectorsize(self, **kwargs) -> Optional[int]: + """Gets the deviceinfo_rootfs_image_sector_size if defined, otherwise None""" + return self.parse_deviceinfo(**kwargs).get('rootfs_image_sector_size', None) + + def get_image_sectorsize_default(self, **kwargs) -> int: + return self.get_image_sectorsize(**kwargs) or DEFAULT_IMAGE_SECTOR_SIZE + def check_devicepkg_name(name: str, log_level: Optional[int] = None): valid = True diff --git a/devices/deviceinfo.py b/devices/deviceinfo.py index b9acf3a..2a86bf2 100644 --- a/devices/deviceinfo.py +++ b/devices/deviceinfo.py @@ -5,7 +5,7 @@ import copy import logging import os -from typing import Any, Mapping, Optional +from typing import Mapping, Optional from config.state import config from constants import Arch @@ -15,6 +15,8 @@ PMOS_ARCHES_OVERRIDES: dict[str, Arch] = { "armv7": 'armv7h', } +DEFAULT_IMAGE_SECTOR_SIZE = 512 + class DeviceInfo(DictScheme): arch: Arch @@ -24,10 +26,12 @@ class DeviceInfo(DictScheme): chassis: str flash_pagesize: int flash_method: str + rootfs_image_sector_size: Optional[int] @classmethod - def transform(cls, values: Mapping[str, str], validate: bool = True, allow_extra: bool = True, type_hints: Optional[dict[str, Any]] = None): - return super().transform(values, validate=validate, allow_extra=allow_extra) + def transform(cls, values: Mapping[str, Optional[str]], **kwargs): + kwargs = {'allow_extra': True} | kwargs + return super().transform(values, **kwargs) # Variables from deviceinfo. Reference: @@ -115,7 +119,7 @@ deviceinfo_chassis_types = [ ] -def sanity_check(deviceinfo: dict[str, str], device_name: str): +def sanity_check(deviceinfo: dict[str, Optional[str]], device_name: str): try: _pmos_sanity_check(deviceinfo, device_name) except RuntimeError as err: @@ -129,7 +133,7 @@ def sanity_check(deviceinfo: dict[str, str], device_name: str): f"{err}") -def _pmos_sanity_check(info: dict[str, str], device_name: str): +def _pmos_sanity_check(info: dict[str, Optional[str]], device_name: str): # Resolve path for more readable error messages path = os.path.join(config.get_path('pkgbuilds'), 'device', device_name, 'deviceinfo') @@ -194,7 +198,7 @@ def _pmos_sanity_check(info: dict[str, str], device_name: str): f" and try again: {path}") -def parse_kernel_suffix(deviceinfo: dict[str, str], kernel: str = 'mainline') -> dict[str, str]: +def parse_kernel_suffix(deviceinfo: dict[str, Optional[str]], kernel: str = 'mainline') -> dict[str, Optional[str]]: """ Remove the kernel suffix (as selected in 'pmbootstrap init') from deviceinfo variables. Related: @@ -240,7 +244,7 @@ def parse_deviceinfo(deviceinfo_lines: list[str], device_name: str, kernel='main :param device: defaults to args.device :param kernel: defaults to args.kernel """ - info = {} + info: dict[str, Optional[str]] = {} for line in deviceinfo_lines: line = line.strip() if line.startswith("#") or not line: @@ -258,12 +262,12 @@ def parse_deviceinfo(deviceinfo_lines: list[str], device_name: str, kernel='main # Assign empty string as default for key in deviceinfo_attributes: if key not in info: - info[key] = "" + info[key] = None info = parse_kernel_suffix(info, kernel) sanity_check(info, device_name) if 'arch' in info: arch = info['arch'] - info['arch'] = PMOS_ARCHES_OVERRIDES.get(arch, arch) + info['arch'] = PMOS_ARCHES_OVERRIDES.get(arch, arch) # type: ignore[arg-type] dev = DeviceInfo.fromDict(info) return dev diff --git a/dictscheme.py b/dictscheme.py index 25578bb..c5537d3 100644 --- a/dictscheme.py +++ b/dictscheme.py @@ -58,6 +58,7 @@ class DictScheme(Munch): def transform( cls, values: Mapping[str, Any], + *, validate: bool = True, allow_extra: bool = False, type_hints: Optional[dict[str, Any]] = None, @@ -251,7 +252,7 @@ class DictScheme(Munch): return result def update(self, d: Mapping[str, Any], validate: bool = True): - Munch.update(self, type(self).transform(d, validate)) + Munch.update(self, type(self).transform(d, validate=validate)) def __init_subclass__(cls): super().__init_subclass__() diff --git a/image/flash.py b/image/flash.py index da8c9ad..40b6ef8 100644 --- a/image/flash.py +++ b/image/flash.py @@ -85,10 +85,8 @@ def cmd_flash( device_image_path = get_image_path(device, flavour) deviceinfo = device.parse_deviceinfo() - sector_size = sector_size or deviceinfo.flash_pagesize + sector_size = sector_size or device.get_image_sectorsize_default() method = method or deviceinfo.flash_method - if not sector_size: - raise Exception(f"Device {device.name} has no flash_pagesize specified") if what not in FLASH_PARTS.values(): raise Exception(f'Unknown what "{what}", must be one of {", ".join(FLASH_PARTS.values())}') diff --git a/image/image.py b/image/image.py index a6e73e9..d332ac1 100644 --- a/image/image.py +++ b/image/image.py @@ -274,30 +274,31 @@ def partition_device(device: str): raise Exception(f'Failed to create partitions on {device}') -def create_filesystem(device: str, blocksize: int = 4096, label=None, options=[], fstype='ext4'): - # blocksize can be 4k max due to pagesize - blocksize = min(blocksize, 4096) - if fstype.startswith('ext'): - # blocksize for ext-fs must be >=1024 - blocksize = max(blocksize, 1024) - +def create_filesystem(device: str, blocksize: Optional[int], label=None, options=[], fstype='ext4'): + """Creates a new filesystem. Blocksize defaults""" labels = ['-L', label] if label else [] - cmd = [ - f'mkfs.{fstype}', - '-F', - '-b', - str(blocksize), - ] + labels + [device] + cmd = [f'mkfs.{fstype}', '-F', *labels] + if blocksize: + # blocksize can be 4k max due to pagesize + blocksize = min(blocksize, 4096) + if fstype.startswith('ext'): + # blocksize for ext-fs must be >=1024 + blocksize = max(blocksize, 1024) + cmd += [ + '-b', + str(blocksize), + ] + cmd.append(device) result = run_root_cmd(cmd) if result.returncode != 0: raise Exception(f'Failed to create {fstype} filesystem on {device} with CMD: {cmd}') -def create_root_fs(device: str, blocksize: int): +def create_root_fs(device: str, blocksize: Optional[int]): create_filesystem(device, blocksize=blocksize, label='kupfer_root', options=['-O', '^metadata_csum', '-N', '100000']) -def create_boot_fs(device: str, blocksize: int): +def create_boot_fs(device: str, blocksize: Optional[int]): create_filesystem(device, blocksize=blocksize, label='kupfer_boot', fstype='ext2') @@ -447,10 +448,7 @@ def cmd_build( pkgbuilds |= set(filter_pkgbuilds(packages_extra, arch=arch, allow_empty_results=True, use_paths=False)) build_packages(pkgbuilds, arch, try_download=not no_download_pkgs) - deviceinfo = device.parse_deviceinfo() - sector_size = sector_size or deviceinfo.flash_pagesize - if not sector_size: - raise Exception(f"Device {device.name} has no flash_pagesize specified") + sector_size = sector_size or device.get_image_sectorsize() image_path = block_target or get_image_path(device, flavour.name) @@ -459,7 +457,7 @@ def cmd_build( logging.info(f'Creating new file at {image_path}') create_img_file(image_path, f"{rootfs_size_mb}M") - loop_device = losetup_rootfs_image(image_path, sector_size) + loop_device = losetup_rootfs_image(image_path, sector_size or device.get_image_sectorsize_default()) partition_device(loop_device) partprobe(loop_device) @@ -512,10 +510,7 @@ def cmd_inspect(profile: Optional[str] = None, shell: bool = False, sector_size: device = get_profile_device(profile) arch = device.arch flavour = get_profile_flavour(profile).name - deviceinfo = device.parse_deviceinfo() - sector_size = sector_size or deviceinfo.flash_pagesize - if not sector_size: - raise Exception(f"Device {device.name} has no flash_pagesize specified") + sector_size = sector_size or device.get_image_sectorsize_default() chroot = get_device_chroot(device.name, flavour, arch) image_path = get_image_path(device, flavour) From de76641fa1048870acae94430c071f5209ab4557 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sun, 30 Apr 2023 04:05:59 +0200 Subject: [PATCH 41/82] image: dump_file_from_image(): try to detect debugfs failure Try to detect missing file after supposedly dumping it since debugfs doesn't always error out correctly --- image/image.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/image/image.py b/image/image.py index d332ac1..f766a8b 100644 --- a/image/image.py +++ b/image/image.py @@ -15,7 +15,7 @@ from constants import Arch, BASE_LOCAL_PACKAGES, BASE_PACKAGES, POST_INSTALL_CMD from distro.distro import get_base_distro, get_kupfer_https from devices.device import Device, get_profile_device from exec.cmd import run_root_cmd, generate_cmd_su -from exec.file import root_write_file, root_makedir, makedir +from exec.file import get_temp_dir, root_write_file, root_makedir, makedir from flavours.flavour import Flavour, get_profile_flavour from net.ssh import copy_ssh_keys from packages.build import build_enable_qemu_binfmt, build_packages, filter_pkgbuilds @@ -220,14 +220,14 @@ def mount_chroot(rootfs_source: str, boot_src: str, chroot: DeviceChroot): def dump_file_from_image(image_path: str, file_path: str, target_path: Optional[str] = None): - target_path = target_path or os.path.join('/tmp', os.path.basename(file_path)) + target_path = target_path or os.path.join(get_temp_dir(), os.path.basename(file_path)) result = run_root_cmd([ 'debugfs', image_path, '-R', f'\'dump /{file_path.lstrip("/")} {target_path}\'', ]) - if result.returncode != 0: + if result.returncode != 0 or not os.path.exists(target_path): raise Exception(f'Failed to dump {file_path} from /boot') return target_path From efe4bf085d9c4f7fdec9bf9795d7bfee486f867a Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sun, 30 Apr 2023 07:08:07 +0200 Subject: [PATCH 42/82] image: shrink_filesystem(): align file end to 4096b otherwise fastboot seems to get upset --- image/image.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/image/image.py b/image/image.py index f766a8b..0cb0bcc 100644 --- a/image/image.py +++ b/image/image.py @@ -69,6 +69,13 @@ def get_fs_size(partition: str) -> tuple[int, int]: return fs_blocks, fs_block_size +def align_bytes(size_bytes: int, alignment: int = 4096) -> int: + rest = size_bytes % alignment + if rest: + size_bytes += alignment - rest + return size_bytes + + def shrink_fs(loop_device: str, file: str, sector_size: int): partprobe(loop_device) logging.debug(f"Checking filesystem at {loop_device}p2") @@ -130,7 +137,7 @@ def shrink_fs(loop_device: str, file: str, sector_size: int): if end_sector == 0: raise Exception(f'Failed to find end sector of {loop_device}p2') - end_size = (end_sector + 1) * sector_size + end_size = align_bytes((end_sector + 1) * sector_size, 4096) logging.debug(f'({end_sector} + 1) sectors * {sector_size} bytes/sector = {end_size} bytes') logging.info(f'Truncating {file} to {end_size} bytes') From ad80b3e889b3e40b7372a28c3d9e19e5513d6eee Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sun, 30 Apr 2023 16:58:33 +0200 Subject: [PATCH 43/82] image/flash: give user output while copying image for shrinking --- image/flash.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/image/flash.py b/image/flash.py index 40b6ef8..f43a889 100644 --- a/image/flash.py +++ b/image/flash.py @@ -1,6 +1,7 @@ import shutil import os import click +import logging from typing import Optional @@ -49,7 +50,7 @@ def test_blockdev(path: str): def prepare_minimal_image(source_path: str, sector_size: int) -> str: minimal_image_dir = get_temp_dir(register_cleanup=True) minimal_image_path = os.path.join(minimal_image_dir, f'minimal-{os.path.basename(source_path)}') - + logging.info(f"Copying image {os.path.basename(source_path)} to {minimal_image_dir} for shrinking") shutil.copyfile(source_path, minimal_image_path) loop_device = losetup_rootfs_image(minimal_image_path, sector_size) From 379e951526c45297602a69d2296174f8eea919d9 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sat, 10 Jun 2023 20:19:33 +0200 Subject: [PATCH 44/82] packages/cli: cmd_list(): print package mode --- packages/cli.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/cli.py b/packages/cli.py index 2e72be0..c300111 100644 --- a/packages/cli.py +++ b/packages/cli.py @@ -313,7 +313,7 @@ def cmd_list(): logging.info(f'Done! {len(packages)} Pkgbuilds:') for name in sorted(packages.keys()): p = packages[name] - print(f'name: {p.name}; ver: {p.version}; provides: {p.provides}; replaces: {p.replaces};' + print(f'name: {p.name}; ver: {p.version}; mode: {p.mode}; provides: {p.provides}; replaces: {p.replaces};' f'local_depends: {p.local_depends}; depends: {p.depends}') From 407d8893a32c9eefcf002bc65650ff861f4dd1db Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Mon, 12 Jun 2023 00:59:28 +0200 Subject: [PATCH 45/82] image/cli: cmd_flash: improve log messages and order of partitions in CLI help --- image/boot.py | 2 +- image/flash.py | 16 +++++++++++++--- 2 files changed, 14 insertions(+), 4 deletions(-) diff --git a/image/boot.py b/image/boot.py index 73ea6fd..33e1f8b 100644 --- a/image/boot.py +++ b/image/boot.py @@ -18,7 +18,7 @@ from .image import get_device_name, losetup_rootfs_image, get_image_path, dump_a LK2ND = FLASH_PARTS['LK2ND'] ABOOT = FLASH_PARTS['ABOOT'] -BOOT_TYPES = [LK2ND, JUMPDRIVE, ABOOT] +BOOT_TYPES = [ABOOT, LK2ND, JUMPDRIVE] @click.command(name='boot') diff --git a/image/flash.py b/image/flash.py index f43a889..876900a 100644 --- a/image/flash.py +++ b/image/flash.py @@ -79,7 +79,16 @@ def cmd_flash( sector_size: Optional[int] = None, confirm: bool = False, ): - """Flash a partition onto a device. `location` takes either a path to a block device or one of emmc, sdcard""" + """ + Flash a partition onto a device. + + The syntax of LOCATION depends on the flashing method and is usually only required for flashing "rootfs": + + \b + - fastboot: the regular fastboot partition identifier. Usually "userdata" + - dd: a path to a block device + - jumpdrive: one of "emmc", "sdcard" or a path to a block device + """ enforce_wrap() device = get_profile_device(profile) flavour = get_profile_flavour(profile).name @@ -93,10 +102,11 @@ def cmd_flash( raise Exception(f'Unknown what "{what}", must be one of {", ".join(FLASH_PARTS.values())}') if location and location.startswith('aboot'): - raise Exception("You're trying to flash something to your aboot partition, " + raise Exception("You're trying to flash something " + f"to your aboot partition ({location!r}), " "which contains the android bootloader itself.\n" "This will brick your phone and is not what you want.\n" - 'Aborting.\nDid you mean "boot"?') + 'Aborting.\nDid you mean to flash to "boot"?') if what == ROOTFS: path = '' From 3c9b96f03f570c17405f55f8a1059287b8aef587 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Mon, 12 Jun 2023 01:10:54 +0200 Subject: [PATCH 46/82] image: rename `aboot` to `abootimg` and `rootfs` to `full` --- constants.py | 4 ++-- image/flash.py | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/constants.py b/constants.py index d881b9f..723dcc0 100644 --- a/constants.py +++ b/constants.py @@ -2,8 +2,8 @@ from typehelpers import TypeAlias FASTBOOT = 'fastboot' FLASH_PARTS = { - 'ROOTFS': 'rootfs', - 'ABOOT': 'aboot', + 'FULL': 'full', + 'ABOOT': 'abootimg', 'LK2ND': 'lk2nd', 'QHYPSTUB': 'qhypstub', } diff --git a/image/flash.py b/image/flash.py index 876900a..5def17c 100644 --- a/image/flash.py +++ b/image/flash.py @@ -19,7 +19,7 @@ from .image import dd_image, dump_aboot, dump_lk2nd, dump_qhypstub, get_image_pa ABOOT = FLASH_PARTS['ABOOT'] LK2ND = FLASH_PARTS['LK2ND'] QHYPSTUB = FLASH_PARTS['QHYPSTUB'] -ROOTFS = FLASH_PARTS['ROOTFS'] +FULL_IMG = FLASH_PARTS['FULL'] DD = 'dd' @@ -82,7 +82,7 @@ def cmd_flash( """ Flash a partition onto a device. - The syntax of LOCATION depends on the flashing method and is usually only required for flashing "rootfs": + The syntax of LOCATION depends on the flashing method and is usually only required for flashing "full": \b - fastboot: the regular fastboot partition identifier. Usually "userdata" @@ -108,7 +108,7 @@ def cmd_flash( "This will brick your phone and is not what you want.\n" 'Aborting.\nDid you mean to flash to "boot"?') - if what == ROOTFS: + if what == FULL_IMG: path = '' if method not in FLASH_METHODS: raise Exception(f"Flash method {method} not supported!") From 0d866c6287930cdc3b7c1b86d588562f940849b0 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Thu, 22 Dec 2022 01:27:33 +0100 Subject: [PATCH 47/82] binfmt: pass through chroot properly --- binfmt.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/binfmt.py b/binfmt.py index 44770f0..018bb81 100644 --- a/binfmt.py +++ b/binfmt.py @@ -81,10 +81,10 @@ def register(arch: Arch, chroot: Optional[Chroot] = None): register_path = binfmt_path + '/register' is_arch_known(arch, True, 'register') qemu_arch = QEMU_ARCHES[arch] - if binfmt_is_registered(arch): + if binfmt_is_registered(arch, chroot=chroot): return - lines = binfmt_info() + lines = binfmt_info(chroot=chroot) _runcmd = run_root_cmd if chroot: @@ -102,7 +102,7 @@ def register(arch: Arch, chroot: Optional[Chroot] = None): # Register in binfmt_misc logging.info(f"Registering qemu binfmt ({arch})") _runcmd(f'echo "{code}" > "{register_path}" 2>/dev/null') # use path without chroot path prefix - if not binfmt_is_registered(arch): + if not binfmt_is_registered(arch, chroot=chroot): logging.debug(f'binfmt line: {code}') raise Exception(f'Failed to register qemu-user for {arch} with binfmt_misc, {binfmt_path}/{info["name"]} not found') From 46507f8dbe048e0fc8dfa67c24b921c358e88f07 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Thu, 22 Dec 2022 01:33:30 +0100 Subject: [PATCH 48/82] binfmt: rename {,un}register() to binfmt_{,un}register() --- binfmt.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/binfmt.py b/binfmt.py index 018bb81..4e672e2 100644 --- a/binfmt.py +++ b/binfmt.py @@ -76,7 +76,7 @@ def binfmt_ensure_mounted(chroot: Optional[Chroot] = None): raise Exception(f'Failed mounting binfmt_misc to {binfmt_path}') -def register(arch: Arch, chroot: Optional[Chroot] = None): +def binfmt_register(arch: Arch, chroot: Optional[Chroot] = None): binfmt_path = '/proc/sys/fs/binfmt_misc' register_path = binfmt_path + '/register' is_arch_known(arch, True, 'register') @@ -107,7 +107,7 @@ def register(arch: Arch, chroot: Optional[Chroot] = None): raise Exception(f'Failed to register qemu-user for {arch} with binfmt_misc, {binfmt_path}/{info["name"]} not found') -def unregister(arch, chroot: Optional[Chroot] = None): +def binfmt_unregister(arch, chroot: Optional[Chroot] = None): is_arch_known(arch, True, 'unregister') qemu_arch = QEMU_ARCHES[arch] binfmt_ensure_mounted(chroot) From c86ce577d14ad86d887082940f221990aab78249 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Thu, 22 Dec 2022 02:14:56 +0100 Subject: [PATCH 49/82] binfmt: move to own module, add cmd_register(), cmd_unregister() to cli --- binfmt/__init__.py | 0 binfmt.py => binfmt/binfmt.py | 0 binfmt/cli.py | 21 +++++++++++++++++++++ docs/source/cmd.md | 1 + main.py | 2 ++ packages/build.py | 2 +- 6 files changed, 25 insertions(+), 1 deletion(-) create mode 100644 binfmt/__init__.py rename binfmt.py => binfmt/binfmt.py (100%) create mode 100644 binfmt/cli.py diff --git a/binfmt/__init__.py b/binfmt/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/binfmt.py b/binfmt/binfmt.py similarity index 100% rename from binfmt.py rename to binfmt/binfmt.py diff --git a/binfmt/cli.py b/binfmt/cli.py new file mode 100644 index 0000000..7188d66 --- /dev/null +++ b/binfmt/cli.py @@ -0,0 +1,21 @@ +import click + +from constants import Arch, ARCHES + +from .binfmt import binfmt_unregister + +cmd_binfmt = click.Group('binfmt', help='Manage qemu binfmt for executing foreign architecture binaries') +arch_arg = click.argument('arch', type=click.Choice(ARCHES)) + + +@cmd_binfmt.command('register', help='Register a binfmt handler with the kernel') +@arch_arg +def cmd_register(arch: Arch, disable_chroot: bool = False): + from packages.build import build_enable_qemu_binfmt + build_enable_qemu_binfmt(arch) + + +@cmd_binfmt.command('unregister', help='Unregister a binfmt handler from the kernel') +@arch_arg +def cmd_unregister(arch: Arch): + binfmt_unregister(arch) diff --git a/docs/source/cmd.md b/docs/source/cmd.md index a892022..646372d 100644 --- a/docs/source/cmd.md +++ b/docs/source/cmd.md @@ -11,6 +11,7 @@ only used to trigger builds of the submodule docs! :template: command.rst :recursive: + binfmt cache chroot config diff --git a/main.py b/main.py index 46e51c0..f212197 100755 --- a/main.py +++ b/main.py @@ -11,6 +11,7 @@ from logger import color_option, logging, quiet_option, setup_logging, verbose_o from wrapper import get_wrapper_type, enforce_wrap, nowrapper_option from progressbar import progress_bars_option +from binfmt.cli import cmd_binfmt from config.cli import config, config_option, cmd_config from packages.cli import cmd_packages from flavours.cli import cmd_flavours @@ -77,6 +78,7 @@ def main(): exit(1) +cli.add_command(cmd_binfmt) cli.add_command(cmd_cache) cli.add_command(cmd_chroot) cli.add_command(cmd_config) diff --git a/packages/build.py b/packages/build.py index a7a2fe9..2fb9ccd 100644 --- a/packages/build.py +++ b/packages/build.py @@ -9,7 +9,7 @@ from copy import deepcopy from urllib.error import HTTPError from typing import Iterable, Iterator, Optional -from binfmt import register as binfmt_register +from binfmt.binfmt import binfmt_register from constants import CROSSDIRECT_PKGS, QEMU_BINFMT_PKGS, GCC_HOSTSPECS, ARCHES, Arch, CHROOT_PATHS, MAKEPKG_CMD from config.state import config from exec.cmd import run_cmd, run_root_cmd From 933b7c42ef270f7c45f20c6d9e75b326d704b22d Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Mon, 1 May 2023 05:25:32 +0200 Subject: [PATCH 50/82] binfmt: binfmt_ensure_mounted(): use chroot.mount() with chroots --- binfmt/binfmt.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/binfmt/binfmt.py b/binfmt/binfmt.py index 4e672e2..686a8b5 100644 --- a/binfmt/binfmt.py +++ b/binfmt/binfmt.py @@ -7,7 +7,7 @@ from typing import Optional from chroot.abstract import Chroot from constants import Arch, QEMU_ARCHES -from exec.cmd import run_root_cmd +from exec.cmd import run_root_cmd, CompletedProcess from utils import mount @@ -66,13 +66,11 @@ def binfmt_ensure_mounted(chroot: Optional[Chroot] = None): binfmt_path = '/proc/sys/fs/binfmt_misc' register_path = binfmt_path + '/register' if chroot: - binfmt_path = chroot.get_path(binfmt_path) register_path = chroot.get_path(register_path) - chroot.activate() if not os.path.exists(register_path): logging.info('mounting binfmt_misc') - result = mount('binfmt_misc', binfmt_path, options=[], fs_type='binfmt_misc') - if result.returncode != 0: + result = (chroot.mount if chroot else mount)('binfmt_misc', binfmt_path, options=[], fs_type='binfmt_misc') # type: ignore[operator] + if (isinstance(result, CompletedProcess) and result.returncode != 0) or not result: raise Exception(f'Failed mounting binfmt_misc to {binfmt_path}') From fd4495dd580658c285d2f5198e10fe72fbbe8ff8 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Mon, 1 May 2023 05:28:34 +0200 Subject: [PATCH 51/82] binfmt: improve logging --- binfmt/binfmt.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/binfmt/binfmt.py b/binfmt/binfmt.py index 686a8b5..bccd93b 100644 --- a/binfmt/binfmt.py +++ b/binfmt/binfmt.py @@ -97,6 +97,10 @@ def binfmt_register(arch: Arch, chroot: Optional[Chroot] = None): info = lines[qemu_arch] code = info['line'] + if arch == os.uname().machine: + logging.fatal("Attempted to register qemu binfmt for host architecture, skipping!") + return + # Register in binfmt_misc logging.info(f"Registering qemu binfmt ({arch})") _runcmd(f'echo "{code}" > "{register_path}" 2>/dev/null') # use path without chroot path prefix @@ -113,6 +117,9 @@ def binfmt_unregister(arch, chroot: Optional[Chroot] = None): if chroot: binfmt_file = chroot.get_path(binfmt_file) if not os.path.exists(binfmt_file): + logging.debug(f"qemu binfmt for {arch} not registered") return logging.info(f"Unregistering qemu binfmt ({arch})") run_root_cmd(f"echo -1 > {binfmt_file}") + if binfmt_is_registered(arch, chroot=chroot): + raise Exception(f'Failed to UNregister qemu-user for {arch} with binfmt_misc, {chroot=}') From eb2b0a6c7509ce41e11c3c5e715a3ae5055eaa23 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Mon, 1 May 2023 05:28:54 +0200 Subject: [PATCH 52/82] binfmt/cli: add cmd_status(), improve logging --- binfmt/cli.py | 39 +++++++++++++++++++++++++++++++-------- 1 file changed, 31 insertions(+), 8 deletions(-) diff --git a/binfmt/cli.py b/binfmt/cli.py index 7188d66..134a966 100644 --- a/binfmt/cli.py +++ b/binfmt/cli.py @@ -1,21 +1,44 @@ import click +import os + +from typing import Optional from constants import Arch, ARCHES -from .binfmt import binfmt_unregister +from .binfmt import binfmt_unregister, binfmt_is_registered cmd_binfmt = click.Group('binfmt', help='Manage qemu binfmt for executing foreign architecture binaries') -arch_arg = click.argument('arch', type=click.Choice(ARCHES)) +arches_arg = click.argument('arches', type=click.Choice(ARCHES), nargs=-1, required=True) +arches_arg_optional = click.argument('arches', type=click.Choice(ARCHES), nargs=-1, required=False) @cmd_binfmt.command('register', help='Register a binfmt handler with the kernel') -@arch_arg -def cmd_register(arch: Arch, disable_chroot: bool = False): +@arches_arg +def cmd_register(arches: list[Arch], disable_chroot: bool = False): from packages.build import build_enable_qemu_binfmt - build_enable_qemu_binfmt(arch) + for arch in arches: + build_enable_qemu_binfmt(arch) @cmd_binfmt.command('unregister', help='Unregister a binfmt handler from the kernel') -@arch_arg -def cmd_unregister(arch: Arch): - binfmt_unregister(arch) +@arches_arg_optional +def cmd_unregister(arches: Optional[list[Arch]]): + for arch in arches or ARCHES: + binfmt_unregister(arch) + + +@cmd_binfmt.command('status', help='Get the status of a binfmt handler from the kernel') +@arches_arg_optional +def cmd_status(arches: Optional[list[Arch]]): + for arch in arches or ARCHES: + native = arch == os.uname().machine + active = binfmt_is_registered(arch) + if native and not active: + # boooring + continue + verb = click.style( + "is" if active else "is NOT", + fg='green' if (active ^ native) else 'red', + bold=True, + ) + click.echo(f'Binfmt for {arch} {verb} set up! {"(host architecture!)" if native else ""}') From fc690eca8afb54fc120b3bbf725399aae785a260 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Mon, 1 May 2023 03:32:10 +0200 Subject: [PATCH 53/82] packages: build_enable_qemu_binfmt(): only show message and enable when not already active --- packages/build.py | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/packages/build.py b/packages/build.py index 2fb9ccd..72c9152 100644 --- a/packages/build.py +++ b/packages/build.py @@ -9,7 +9,7 @@ from copy import deepcopy from urllib.error import HTTPError from typing import Iterable, Iterator, Optional -from binfmt.binfmt import binfmt_register +from binfmt.binfmt import binfmt_is_registered, binfmt_register from constants import CROSSDIRECT_PKGS, QEMU_BINFMT_PKGS, GCC_HOSTSPECS, ARCHES, Arch, CHROOT_PATHS, MAKEPKG_CMD from config.state import config from exec.cmd import run_cmd, run_root_cmd @@ -427,7 +427,7 @@ def setup_build_chroot( ) -> BuildChroot: assert config.runtime.arch if arch != config.runtime.arch: - build_enable_qemu_binfmt(arch) + build_enable_qemu_binfmt(arch, lazy=False) init_prebuilts(arch) chroot = get_build_chroot(arch, add_kupfer_repos=add_kupfer_repos) chroot.mount_packages() @@ -797,16 +797,23 @@ _qemu_enabled: dict[Arch, bool] = {arch: False for arch in ARCHES} def build_enable_qemu_binfmt(arch: Arch, repo: Optional[dict[str, Pkgbuild]] = None, lazy: bool = True, native_chroot: Optional[BuildChroot] = None): + """ + Build and enable qemu-user-static, binfmt and crossdirect + Specify lazy=False to force building the packages. + """ if arch not in ARCHES: - raise Exception(f'Unknown architecture "{arch}". Choices: {", ".join(ARCHES)}') - logging.info('Installing qemu-user (building if necessary)') - if lazy and _qemu_enabled[arch]: - _qemu_enabled[arch] = True + raise Exception(f'Unknown binfmt architecture "{arch}". Choices: {", ".join(ARCHES)}') + if _qemu_enabled[arch] or (lazy and binfmt_is_registered(arch)): + if not _qemu_enabled[arch]: + logging.info(f"qemu binfmt for {arch} was already enabled!") return native = config.runtime.arch assert native if arch == native: + _qemu_enabled[arch] = True + logging.warning("Not enabling binfmt for host architecture!") return + logging.info('Installing qemu-user (building if necessary)') check_programs_wrap(['pacman', 'makepkg', 'pacstrap']) # build qemu-user, binfmt, crossdirect build_packages_by_paths( From 8b0ca115a7ecf863cabf49568c0fd37d5afe529b Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sun, 25 Jun 2023 00:18:23 +0200 Subject: [PATCH 54/82] config/cli: prompt_profile_{flavour,device}: improve device/flavour printing and wrapper-mode warning --- config/cli.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/config/cli.py b/config/cli.py index 1cf7375..57ead20 100644 --- a/config/cli.py +++ b/config/cli.py @@ -123,24 +123,24 @@ def prompt_choice(current: Optional[Any], key: str, choices: Iterable[Any], allo def prompt_profile_device(current: Optional[str], profile_name: str) -> tuple[str, bool]: - print(click.style("Pick your device!\nThese are the available devices:", bold=True)) + click.echo(click.style("Pick your device!\nThese are the available devices:", bold=True)) devices = execute_without_exit(get_devices, ['devices']) if devices is None: - print("(wrapper mode, input for this field will not be checked for correctness)") + logging.warning("(wrapper mode, input for this field will not be checked for correctness)") return prompt_config(text=f'{profile_name}.device', default=current) for dev in sorted(devices.keys()): - print(f"{devices[dev]}\n") + click.echo(devices[dev].nice_str(newlines=True, colors=True)+"\n") return prompt_choice(current, f'profiles.{profile_name}.device', devices.keys()) def prompt_profile_flavour(current: Optional[str], profile_name: str) -> tuple[str, bool]: - print(click.style("Pick your flavour!\nThese are the available flavours:", bold=True)) + click.echo(click.style("Pick your flavour!\nThese are the available flavours:", bold=True)) flavours = execute_without_exit(get_flavours, ['flavours']) if flavours is None: - print("(wrapper mode, input for this field will not be checked for correctness)") + logging.warning("(wrapper mode, input for this field will not be checked for correctness)") return prompt_config(text=f'{profile_name}.flavour', default=current) for f in sorted(flavours.keys()): - print(flavours[f]) + click.echo(flavours[f].nice_str(newlines=True, colors=True)+"\n") return prompt_choice(current, f'profiles.{profile_name}.flavour', flavours.keys()) From 9bd2bd46a9d9fe5865b077c8e2fc773189940419 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sun, 25 Jun 2023 03:43:10 +0200 Subject: [PATCH 55/82] wrapper/wrapper: handle unset self.wrapped_config_path --- wrapper/wrapper.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/wrapper/wrapper.py b/wrapper/wrapper.py index a65021a..7f00eca 100644 --- a/wrapper/wrapper.py +++ b/wrapper/wrapper.py @@ -35,7 +35,7 @@ class Wrapper(WrapperProtocol): uuid: str identifier: str type: str - wrapped_config_path: str + wrapped_config_path: Optional[str] argv_override: Optional[list[str]] should_exit: bool atexit_registered: bool @@ -46,6 +46,7 @@ class Wrapper(WrapperProtocol): self.argv_override = None self.should_exit = True self.atexit_registered = False + self.wrapped_config_path = None def filter_args_wrapper(self, args): """filter out -c/--config since it doesn't apply in wrapper""" @@ -89,7 +90,8 @@ class Wrapper(WrapperProtocol): return wrapped_config def at_exit(self): - os.remove(self.wrapped_config_path) + if self.wrapped_config_path: + os.remove(self.wrapped_config_path) self.stop() self.atexit_registered = False From 1374e2be741f11137347f8493f89868dc8c2a76d Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sun, 25 Jun 2023 03:43:45 +0200 Subject: [PATCH 56/82] wrapper/docker: fix logging of docker build failures --- wrapper/docker.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/wrapper/docker.py b/wrapper/docker.py index 52da4c1..13c0bde 100644 --- a/wrapper/docker.py +++ b/wrapper/docker.py @@ -39,14 +39,15 @@ class DockerWrapper(Wrapper): tag, ] + (['-q'] if not config.runtime.verbose else []) logging.debug('Running docker cmd: ' + ' '.join(cmd)) + mute_docker = not config.runtime.verbose result = subprocess.run( cmd, cwd=script_path, - stdout=(sys.stdout if config.runtime.verbose else subprocess.PIPE), - stderr=(sys.stderr if config.runtime.verbose else subprocess.PIPE), + capture_output=mute_docker, ) if result.returncode != 0: - logging.fatal('Failed to build docker image:\n' + result.stderr.decode()) + error_msg = ('\n' + result.stderr.decode() + '\n') if mute_docker else '' + logging.fatal(f'Docker error: {error_msg}Failed to build docker image: see errors above: ^^^^') exit(1) else: # Check if the image for the version already exists From e6f4a68c6b8c2c99dbe80c714ea1e93c4f4b52e0 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sun, 25 Jun 2023 03:44:26 +0200 Subject: [PATCH 57/82] utils: add color_mark_selected() --- utils.py | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/utils.py b/utils.py index 3fc157e..bf049c0 100644 --- a/utils.py +++ b/utils.py @@ -12,7 +12,7 @@ import tarfile from dateutil.parser import parse as parsedate from shutil import which -from typing import Generator, IO, Optional, Union, Sequence +from typing import Any, Generator, IO, Optional, Union, Sequence from exec.cmd import run_cmd, run_root_cmd @@ -201,3 +201,20 @@ def color_str(s: str, use_colors: Optional[bool] = None, **kwargs) -> str: if colors_supported(use_colors): return click.style(s, **kwargs) return s + + +def color_mark_selected( + item: str, + msg_items: str | tuple, + msg_fmt: str = 'Currently selected by profile %s', + marker: str = '>>> ', + marker_config: dict[str, Any] = dict(bold=True, fg="bright_green"), + split_on: str = '\n', + suffix: str = '\n\n', + use_colors: Optional[bool] = None, +) -> str: + marker_full = color_str(marker, use_colors=use_colors, **marker_config) + if isinstance(msg_items, str): + msg_items = (msg_items,) + output = f'{item}{suffix}{msg_fmt % msg_items}' + return '\n'.join([(marker_full + o) for o in output.split(split_on)]) From 0951865868e99ffa29ab1f8240166561223c34aa Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sun, 25 Jun 2023 03:45:26 +0200 Subject: [PATCH 58/82] config/profile: add resolve_profile_attr() --- config/profile.py | 41 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) diff --git a/config/profile.py b/config/profile.py index 5d7db80..ff0ba95 100644 --- a/config/profile.py +++ b/config/profile.py @@ -21,6 +21,10 @@ PROFILE_DEFAULTS = Profile.fromDict(PROFILE_DEFAULTS_DICT) PROFILE_EMPTY: Profile = {key: None for key in PROFILE_DEFAULTS.keys()} # type: ignore +class ProfileNotFoundException(Exception): + pass + + def resolve_profile( name: str, sparse_profiles: dict[str, SparseProfile], @@ -85,3 +89,40 @@ def resolve_profile( resolved[name] = Profile.fromDict(full) return resolved + + +def resolve_profile_attr( + profile_name: str, + attr_name: str, + profiles_sparse: dict[str, SparseProfile], +) -> tuple[str, str]: + """ + This function tries to resolve a profile attribute recursively, + and throws KeyError if the key is not found anywhere in the hierarchy. + Throws a ProfileNotFoundException if the profile is not in profiles_sparse + """ + if profile_name not in profiles_sparse: + raise ProfileNotFoundException(f"Unknown profile {profile_name}") + profile: Profile = profiles_sparse[profile_name] + if attr_name in profile: + return profile[attr_name], profile_name + + if 'parent' not in profile: + raise KeyError(f'Profile attribute {attr_name} not found in {profile_name} and no parents') + parent = profile + parent_name = profile_name + seen = [] + while True: + if attr_name in parent: + return parent[attr_name], parent_name + + seen.append(parent_name) + + if not parent.get('parent', None): + raise KeyError(f'Profile attribute {attr_name} not found in inheritance chain, ' + f'we went down to {parent_name}.') + parent_name = parent['parent'] + if parent_name in seen: + raise RecursionError(f"Profile recursion loop: profile {profile_name} couldn't be resolved" + f"because of a dependency loop:\n{' -> '.join([*seen, parent_name])}") + parent = profiles_sparse[parent_name] From 5b2f36c74d7a74a8395b716bd429b756b64ebb6b Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sun, 25 Jun 2023 03:55:06 +0200 Subject: [PATCH 59/82] config/cli: highlight currently selected devices --- config/cli.py | 72 +++++++++++++++++++++++++++++++++++++-------------- 1 file changed, 52 insertions(+), 20 deletions(-) diff --git a/config/cli.py b/config/cli.py index 57ead20..ec05226 100644 --- a/config/cli.py +++ b/config/cli.py @@ -2,14 +2,15 @@ import click import logging from copy import deepcopy -from typing import Any, Iterable, Optional, Union +from typing import Any, Callable, Iterable, Mapping, Optional, Union from devices.device import get_devices from flavours.flavour import get_flavours +from utils import color_str, colors_supported, color_mark_selected from wrapper import execute_without_exit from .scheme import Profile -from .profile import PROFILE_EMPTY, PROFILE_DEFAULTS +from .profile import PROFILE_EMPTY, PROFILE_DEFAULTS, resolve_profile_attr, SparseProfile from .state import config, CONFIG_DEFAULTS, CONFIG_SECTIONS, merge_configs @@ -87,6 +88,7 @@ def prompt_profile( raise Exception("profile name 'current' not allowed") # don't use get_profile() here because we need the sparse profile if name in config.file.profiles: + logging.debug(f"Merging with existing profile config for {name}") profile |= config.file.profiles[name] elif create: logging.info(f"Profile {name} doesn't exist yet, creating new profile.") @@ -105,7 +107,7 @@ def prompt_profile( parse_prompt = prompt_profile_flavour else: raise Exception(f'config: Unhandled parseable field {key}, this is a bug in kupferbootstrap.') - result, _changed = parse_prompt(current, name) # type: ignore + result, _changed = parse_prompt(current=current, profile_name=name, sparse_profiles=config.file.profiles) # type: ignore else: result, _changed = prompt_config(text=text, default=current, field_type=type(PROFILE_DEFAULTS[key])) # type: ignore if _changed: @@ -122,26 +124,56 @@ def prompt_choice(current: Optional[Any], key: str, choices: Iterable[Any], allo return res, res == current -def prompt_profile_device(current: Optional[str], profile_name: str) -> tuple[str, bool]: - click.echo(click.style("Pick your device!\nThese are the available devices:", bold=True)) - devices = execute_without_exit(get_devices, ['devices']) - if devices is None: - logging.warning("(wrapper mode, input for this field will not be checked for correctness)") - return prompt_config(text=f'{profile_name}.device', default=current) - for dev in sorted(devices.keys()): - click.echo(devices[dev].nice_str(newlines=True, colors=True)+"\n") - return prompt_choice(current, f'profiles.{profile_name}.device', devices.keys()) +def resolve_profile_field(current: Any, *kargs): + try: + return resolve_profile_attr(*kargs) + except KeyError as err: + logging.debug(err) + return current, None -def prompt_profile_flavour(current: Optional[str], profile_name: str) -> tuple[str, bool]: - click.echo(click.style("Pick your flavour!\nThese are the available flavours:", bold=True)) - flavours = execute_without_exit(get_flavours, ['flavours']) - if flavours is None: +def prompt_wrappable( + attr_name: str, + native_cmd: Callable, + cli_cmd: list[str], + current: Optional[str], + profile_name: str, + sparse_profiles: Mapping[str, SparseProfile], + use_colors: Optional[bool] = None, +) -> tuple[str, bool]: + use_colors = colors_supported(use_colors) + + def bold(s: str, _bold=True, **kwargs): + return color_str(s, use_colors=use_colors, bold=_bold, **kwargs) + + def green(s: str, _bold=True): + return bold(s, fg="bright_green", _bold=_bold) + + print(bold(f"Pick your {attr_name}!\nThese are the available choices:")) + items = execute_without_exit(native_cmd, cli_cmd) + selected, inherited_from = resolve_profile_field(current, profile_name, attr_name, sparse_profiles) + logging.debug(f"Acquired {attr_name=}={selected} from {inherited_from}") + if items is None: logging.warning("(wrapper mode, input for this field will not be checked for correctness)") - return prompt_config(text=f'{profile_name}.flavour', default=current) - for f in sorted(flavours.keys()): - click.echo(flavours[f].nice_str(newlines=True, colors=True)+"\n") - return prompt_choice(current, f'profiles.{profile_name}.flavour', flavours.keys()) + return prompt_config(text=f'{profile_name}.{attr_name}', default=current) + for key in sorted(items.keys()): + text = items[key].nice_str(newlines=True, colors=use_colors) + if key == selected: + inherit_suffix = '' + if inherited_from not in [None, profile_name]: + quote = '"' + inherit_suffix = f'{bold(" (inherited from profile "+quote)}{green(inherited_from)}{bold(quote+")")}' + text = color_mark_selected(text, f'"{green(profile_name)}"{inherit_suffix}') + print(text + '\n') + return prompt_choice(current, f'profiles.{profile_name}.{attr_name}', items.keys()) + + +def prompt_profile_device(*kargs, **kwargs) -> tuple[str, bool]: + return prompt_wrappable('device', get_devices, ['devices'], *kargs, **kwargs) + + +def prompt_profile_flavour(*kargs, **kwargs) -> tuple[str, bool]: + return prompt_wrappable('flavour', get_flavours, ['flavours'], *kargs, **kwargs) def config_dot_name_get(name: str, config: dict[str, Any], prefix: str = '') -> Any: From 8376725652bc785cd02e6d43922f462e0bdde865 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sun, 25 Jun 2023 03:58:09 +0200 Subject: [PATCH 60/82] test_requirements.txt: add formatters and mypy for easier development --- test_requirements.txt | 3 +++ 1 file changed, 3 insertions(+) diff --git a/test_requirements.txt b/test_requirements.txt index 9955dec..8caeaa9 100644 --- a/test_requirements.txt +++ b/test_requirements.txt @@ -1,2 +1,5 @@ +autoflake +mypy +yapf pytest pytest-cov From 16f351a41c140af11755adadba078792add33dd2 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sun, 25 Jun 2023 03:58:27 +0200 Subject: [PATCH 61/82] docker, gitlab-ci: use pip --break-system-packages until we figure out pip packaging --- .gitlab-ci.yml | 8 ++++---- Dockerfile | 3 ++- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 2327114..b1c0fa1 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -7,7 +7,7 @@ format: stage: check image: python before_script: - - pip install yapf autoflake + - pip install yapf autoflake --break-system-packages script: - ./format.sh --check @@ -15,7 +15,7 @@ typecheck: stage: check image: python before_script: - - pip install mypy + - pip install mypy --break-system-packages script: - ./typecheck.sh --non-interactive --junit-xml mypy-report.xml artifacts: @@ -27,7 +27,7 @@ pytest: image: archlinux before_script: - pacman -Sy --noconfirm --needed archlinux-keyring && pacman -Su --noconfirm python python-pip sudo git base-devel arch-install-scripts rsync - - pip install -r test_requirements.txt -r requirements.txt + - pip install -r test_requirements.txt -r requirements.txt --break-system-packages - 'echo "kupfer ALL = (ALL) NOPASSWD: ALL" > /etc/sudoers.d/kupfer_all' - useradd -m kupfer - chmod 777 . @@ -77,7 +77,7 @@ push_docker: DOCS_MAKE_TARGET: "html" DOCS_MAKE_THREADS: 6 before_script: &docs_before_script - - pip install -r requirements.txt -r docs/requirements.txt + - pip install -r requirements.txt -r docs/requirements.txt --break-system-packages script: &docs_script - make -C docs -j$DOCS_MAKE_THREADS SPHINXARGS="$DOCS_SPHINXARGS" $DOCS_MAKE_TARGET - mv "docs/$DOCS_MAKE_TARGET" public diff --git a/Dockerfile b/Dockerfile index e393f73..5289aa1 100644 --- a/Dockerfile +++ b/Dockerfile @@ -21,7 +21,8 @@ ENV PATH=/app/bin:/app/local/bin:$PATH WORKDIR /app COPY requirements.txt . -RUN pip install -r requirements.txt +# TODO: pip packaging so we don't need --break-system-packages +RUN pip install -r requirements.txt --break-system-packages COPY . . From bfce7c466d8804c93fdca242b543b1ddd099e9be Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sun, 25 Jun 2023 16:03:08 +0200 Subject: [PATCH 62/82] utils: add color_mark_selected() to add ">>> selected by profile" msgs --- utils.py | 31 +++++++++++++++++++++++++++---- 1 file changed, 27 insertions(+), 4 deletions(-) diff --git a/utils.py b/utils.py index bf049c0..f07c2c9 100644 --- a/utils.py +++ b/utils.py @@ -203,18 +203,41 @@ def color_str(s: str, use_colors: Optional[bool] = None, **kwargs) -> str: return s +def color_green(s: str, **kwargs): + return color_str(s, fg="bright_green", **kwargs) + + +def color_bold(s: str, **kwargs): + return color_str(s, bold=True, **kwargs) + + def color_mark_selected( item: str, - msg_items: str | tuple, - msg_fmt: str = 'Currently selected by profile %s', + profile_name: str, + inherited_from: Optional[str] = None, + msg_fmt: str = 'Currently selected by profile "%s"%s', + msg_item_colors: dict[str, Any] = dict(bold=True, fg="bright_green"), marker: str = '>>> ', marker_config: dict[str, Any] = dict(bold=True, fg="bright_green"), split_on: str = '\n', suffix: str = '\n\n', use_colors: Optional[bool] = None, ) -> str: + + def bold(s: str, _bold=True, **kwargs): + return color_bold(s, use_colors=use_colors, **kwargs) + + def green(s: str, **kwargs): + return color_green(s, use_colors=use_colors, **kwargs) + marker_full = color_str(marker, use_colors=use_colors, **marker_config) - if isinstance(msg_items, str): - msg_items = (msg_items,) + + msg_items = (color_str(profile_name, use_colors=use_colors, **msg_item_colors),) + if inherited_from and inherited_from != profile_name: + msg_items = msg_items + (''.join([ + bold(' (inherited from profile "'), + green(inherited_from, bold=True), + bold('")'), + ]),) # type: ignore[assignment] output = f'{item}{suffix}{msg_fmt % msg_items}' return '\n'.join([(marker_full + o) for o in output.split(split_on)]) From 7425356f1070aa35f51b6301ee50e6a66a937b26 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sun, 25 Jun 2023 16:04:00 +0200 Subject: [PATCH 63/82] devices/device: extract sanitize_device_name() into own function --- devices/device.py | 23 +++++++++++++++-------- 1 file changed, 15 insertions(+), 8 deletions(-) diff --git a/devices/device.py b/devices/device.py index ce83d01..c386053 100644 --- a/devices/device.py +++ b/devices/device.py @@ -135,6 +135,20 @@ def parse_device_pkg(pkgbuild: Pkgbuild) -> Device: return Device(name=name, arch=arch, package=pkgbuild, deviceinfo=None) +def sanitize_device_name(name: str, warn: bool = True) -> str: + if name not in DEVICE_DEPRECATIONS: + return name + warning = f"Deprecated device {name}" + replacement = DEVICE_DEPRECATIONS[name] + if replacement: + warning += (f': Device has been renamed to {replacement}! Please adjust your profile config!\n' + 'This will become an error in a future version!') + name = replacement + if warn: + logging.warning(warning) + return name + + _device_cache: dict[str, Device] = {} _device_cache_populated: bool = False @@ -159,14 +173,7 @@ def get_devices(pkgbuilds: Optional[dict[str, Pkgbuild]] = None, lazy: bool = Tr def get_device(name: str, pkgbuilds: Optional[dict[str, Pkgbuild]] = None, lazy: bool = True, scan_all=False) -> Device: global _device_cache, _device_cache_populated assert lazy or pkgbuilds - if name in DEVICE_DEPRECATIONS: - warning = f"Deprecated device {name}" - replacement = DEVICE_DEPRECATIONS[name] - if replacement: - warning += (f': Device has been renamed to {replacement}! Please adjust your profile config!\n' - 'This will become an error in a future version!') - name = replacement - logging.warning(warning) + name = sanitize_device_name(name) if lazy and name in _device_cache: return _device_cache[name] if scan_all: From 60b38d895c4ff91157cf4b5011caa78643400538 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sun, 25 Jun 2023 16:04:35 +0200 Subject: [PATCH 64/82] devices, flavours, config: add "inherited from ABC" to "selected by XYZ" output --- config/cli.py | 38 +++++++++++++++++++------------------- devices/cli.py | 26 +++++++++++--------------- flavours/cli.py | 25 +++++++++++++------------ 3 files changed, 43 insertions(+), 46 deletions(-) diff --git a/config/cli.py b/config/cli.py index ec05226..ba1ada5 100644 --- a/config/cli.py +++ b/config/cli.py @@ -4,9 +4,9 @@ import logging from copy import deepcopy from typing import Any, Callable, Iterable, Mapping, Optional, Union -from devices.device import get_devices +from devices.device import get_devices, sanitize_device_name from flavours.flavour import get_flavours -from utils import color_str, colors_supported, color_mark_selected +from utils import color_bold, colors_supported, color_mark_selected from wrapper import execute_without_exit from .scheme import Profile @@ -98,16 +98,24 @@ def prompt_profile( changed = False for key, current in profile.items(): current = profile[key] - text = f'{name}.{key}' + text = f'profiles.{name}.{key}' if not no_parse and key in PARSEABLE_FIELDS: parse_prompt = None + sanitize_func = None if key == 'device': parse_prompt = prompt_profile_device + sanitize_func = sanitize_device_name elif key == 'flavour': parse_prompt = prompt_profile_flavour else: raise Exception(f'config: Unhandled parseable field {key}, this is a bug in kupferbootstrap.') - result, _changed = parse_prompt(current=current, profile_name=name, sparse_profiles=config.file.profiles) # type: ignore + result, _changed = parse_prompt( + current=current, + profile_name=name, + sparse_profiles=config.file.profiles, + use_colors=config.runtime.colors, + sanitize_func=sanitize_func, + ) # type: ignore else: result, _changed = prompt_config(text=text, default=current, field_type=type(PROFILE_DEFAULTS[key])) # type: ignore if _changed: @@ -139,31 +147,23 @@ def prompt_wrappable( current: Optional[str], profile_name: str, sparse_profiles: Mapping[str, SparseProfile], + sanitize_func: Optional[Callable[[str], str]] = None, use_colors: Optional[bool] = None, ) -> tuple[str, bool]: use_colors = colors_supported(use_colors) - def bold(s: str, _bold=True, **kwargs): - return color_str(s, use_colors=use_colors, bold=_bold, **kwargs) - - def green(s: str, _bold=True): - return bold(s, fg="bright_green", _bold=_bold) - - print(bold(f"Pick your {attr_name}!\nThese are the available choices:")) + print(color_bold(f"Pick your {attr_name}!\nThese are the available choices:", use_colors=use_colors)) items = execute_without_exit(native_cmd, cli_cmd) - selected, inherited_from = resolve_profile_field(current, profile_name, attr_name, sparse_profiles) - logging.debug(f"Acquired {attr_name=}={selected} from {inherited_from}") if items is None: logging.warning("(wrapper mode, input for this field will not be checked for correctness)") - return prompt_config(text=f'{profile_name}.{attr_name}', default=current) + return prompt_config(text=f'profiles.{profile_name}.{attr_name}', default=current) + selected, inherited_from = resolve_profile_field(current, profile_name, attr_name, sparse_profiles) + if selected and sanitize_func: + selected = sanitize_func(selected) for key in sorted(items.keys()): text = items[key].nice_str(newlines=True, colors=use_colors) if key == selected: - inherit_suffix = '' - if inherited_from not in [None, profile_name]: - quote = '"' - inherit_suffix = f'{bold(" (inherited from profile "+quote)}{green(inherited_from)}{bold(quote+")")}' - text = color_mark_selected(text, f'"{green(profile_name)}"{inherit_suffix}') + text = color_mark_selected(text, profile_name, inherited_from) print(text + '\n') return prompt_choice(current, f'profiles.{profile_name}.{attr_name}', items.keys()) diff --git a/devices/cli.py b/devices/cli.py index c35811e..56cfd61 100644 --- a/devices/cli.py +++ b/devices/cli.py @@ -5,9 +5,10 @@ from json import dumps as json_dump from typing import Optional from config.state import config -from utils import colors_supported, color_str +from config.cli import resolve_profile_field +from utils import color_mark_selected, colors_supported -from .device import get_devices, get_profile_device +from .device import get_devices, get_device @click.command(name='devices') @@ -36,12 +37,14 @@ def cmd_devices( if not devices: raise Exception("No devices found!") profile_device = None + profile_name = config.file.profiles.current + selected, inherited_from = None, None try: - dev = get_profile_device() - assert dev - profile_device = dev + selected, inherited_from = resolve_profile_field(None, profile_name, 'device', config.file.profiles) + if selected: + profile_device = get_device(selected) except Exception as ex: - logging.debug(f"Failed to get profile device for visual highlighting, not a problem: {ex}") + logging.debug(f"Failed to get profile device for marking as currently selected, continuing anyway. Exception: {ex}") output = [''] json_output = {} interactive_json = json and not output_file @@ -49,8 +52,6 @@ def cmd_devices( json = True use_colors = colors_supported(False if interactive_json else config.runtime.colors) for name in sorted(devices.keys()): - prefix = '' - suffix = '' device = devices[name] assert device if force_parse_deviceinfo in [None, True]: @@ -66,14 +67,9 @@ def cmd_devices( json_output[name] = device.get_summary().toDict() if interactive_json: continue + snippet = device.nice_str(colors=use_colors, newlines=True) if profile_device and profile_device.name == device.name: - prefix = color_str('>>> ', bold=True, fg="bright_green", use_colors=use_colors) - suffix = '\n\n' - suffix += color_str('Currently selected by profile', bold=True, use_colors=use_colors) + " " - suffix += color_str(f'"{config.file.profiles.current}"', bold=True, fg="bright_green", use_colors=use_colors) - snippet = f'{device.nice_str(colors=use_colors, newlines=True)}{suffix}' - # prefix each line in the snippet - snippet = '\n'.join([f'{prefix}{line}' for line in snippet.split('\n')]) + snippet = color_mark_selected(snippet, profile_name or '[unknown]', inherited_from) output.append(f"{snippet}\n") if interactive_json: output = ['\n' + json_dump(json_output, indent=4)] diff --git a/flavours/cli.py b/flavours/cli.py index a9ccdb7..a05bf3c 100644 --- a/flavours/cli.py +++ b/flavours/cli.py @@ -4,10 +4,11 @@ import logging from json import dumps as json_dump from typing import Optional +from config.cli import resolve_profile_field from config.state import config -from utils import colors_supported, color_str +from utils import color_mark_selected, colors_supported -from .flavour import get_flavours, get_profile_flavour +from .flavour import get_flavours, get_flavour profile_option = click.option('-p', '--profile', help="name of the profile to use", required=False, default=None) @@ -23,13 +24,17 @@ def cmd_flavours(json: bool = False, output_file: Optional[str] = None): flavours = get_flavours() interactive_json = json and not output_file use_colors = colors_supported(config.runtime.colors) and not interactive_json + profile_name = config.file.profiles.current + selected, inherited_from = None, None if output_file: json = True if not flavours: raise Exception("No flavours found!") if not interactive_json: try: - profile_flavour = get_profile_flavour() + selected, inherited_from = resolve_profile_field(None, profile_name, 'flavour', config.file.profiles) + if selected: + profile_flavour = get_flavour(selected) except Exception as ex: logging.debug(f"Failed to get profile flavour for marking as currently selected, continuing anyway. Exception: {ex}") for name in sorted(flavours.keys()): @@ -39,15 +44,11 @@ def cmd_flavours(json: bool = False, output_file: Optional[str] = None): except Exception as ex: logging.debug(f"A problem happened while parsing flavourinfo for {name}, continuing anyway. Exception: {ex}") if not interactive_json: - block = [*f.nice_str(newlines=True, colors=use_colors).split('\n'), ''] + snippet = f.nice_str(newlines=True, colors=use_colors) if profile_flavour == f: - prefix = color_str('>>> ', bold=True, fg='bright_green', use_colors=use_colors) - block += [ - color_str("Currently selected by profile ", bold=True, use_colors=use_colors) + - color_str(f'"{config.file.profiles.current}"\n', bold=True, fg="bright_green") - ] - block = [prefix + line for line in block] - results += block + snippet = color_mark_selected(snippet, profile_name or '[unknown]', inherited_from) + snippet += '\n' + results += snippet.split('\n') if json: d = dict(f) d["description"] = f.flavour_info.description if (f.flavour_info and f.flavour_info.description) else f.description @@ -58,7 +59,7 @@ def cmd_flavours(json: bool = False, output_file: Optional[str] = None): d["pkgbuild"] = f.pkgbuild.path if f.pkgbuild else None d["package"] = f.pkgbuild.name d["arches"] = sorted(f.pkgbuild.arches) if f.pkgbuild else None - json_results[d["name"]] = d + json_results[name] = d print() if output_file: with open(output_file, 'w') as fd: From c70b52e5c1192f9ac271dd104c1745395979acee Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sun, 9 Jul 2023 03:13:54 +0200 Subject: [PATCH 65/82] utils: color_mark_selected: fix msg_items tuple size to 2 --- utils.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/utils.py b/utils.py index f07c2c9..df40a97 100644 --- a/utils.py +++ b/utils.py @@ -232,12 +232,12 @@ def color_mark_selected( marker_full = color_str(marker, use_colors=use_colors, **marker_config) - msg_items = (color_str(profile_name, use_colors=use_colors, **msg_item_colors),) + msg_items = [color_str(profile_name, use_colors=use_colors, **msg_item_colors), ''] if inherited_from and inherited_from != profile_name: - msg_items = msg_items + (''.join([ + msg_items[1] = ''.join([ bold(' (inherited from profile "'), green(inherited_from, bold=True), bold('")'), - ]),) # type: ignore[assignment] - output = f'{item}{suffix}{msg_fmt % msg_items}' + ]) + output = f'{item}{suffix}{msg_fmt % tuple(msg_items)}' return '\n'.join([(marker_full + o) for o in output.split(split_on)]) From 6bcd132b5374b9c0ee280d13c6bbc7cc667b5cd7 Mon Sep 17 00:00:00 2001 From: Hacker1245 Date: Wed, 12 Jul 2023 18:23:30 +0000 Subject: [PATCH 66/82] docs: update device names in profiles --- docs/source/config.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/source/config.md b/docs/source/config.md index bf6e0f5..561d24c 100644 --- a/docs/source/config.md +++ b/docs/source/config.md @@ -64,7 +64,7 @@ current = "graphical" [profiles.default] parent = "" -device = "oneplus-enchilada" +device = "sdm845-oneplus-enchilada" flavour = "barebone" pkgs_include = [ "wget", "rsync", "nano", "tmux", "zsh", "pv", ] pkgs_exclude = [] @@ -89,7 +89,7 @@ flavour = "debug-shell" [profiles.beryllium] parent = "graphical" -device = "xiaomi-beryllium-ebbg" +device = "sdm845-xiaomi-beryllium-ebbg" flavour = "gnome" hostname = "pocof1" ``` From 4c5fe2cb1c926d90eca64aa23fcb1f5259f612e0 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Tue, 29 Aug 2023 02:13:41 +0200 Subject: [PATCH 67/82] config/cli: prompt_choice(): fix change detection logical inversion --- config/cli.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config/cli.py b/config/cli.py index ba1ada5..740ab17 100644 --- a/config/cli.py +++ b/config/cli.py @@ -129,7 +129,7 @@ def prompt_choice(current: Optional[Any], key: str, choices: Iterable[Any], allo res, _ = prompt_config(text=key, default=current, field_type=click.Choice(choices), show_choices=show_choices) if allow_none and res == '': res = None - return res, res == current + return res, res != current def resolve_profile_field(current: Any, *kargs): From a0c40363903c5e1d89d6c799aa8abd125b9c2e15 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Sun, 29 Oct 2023 16:32:36 +0100 Subject: [PATCH 68/82] packages: try_download_package(): check pacman cache if file in db but doesn't exist in db folder --- packages/build.py | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/packages/build.py b/packages/build.py index 72c9152..190e588 100644 --- a/packages/build.py +++ b/packages/build.py @@ -290,7 +290,8 @@ def try_download_package(dest_file_path: str, package: Pkgbuild, arch: Arch) -> return None repo_pkg: RemotePackage = repo.packages[pkgname] if repo_pkg.version != package.version: - logging.debug(f"Package {pkgname} versions differ: local: {package.version}, remote: {repo_pkg.version}. Building instead.") + logging.debug(f"Package {pkgname} versions differ: local: {package.version}, " + f"remote: {repo_pkg.version}. Building instead.") return None if repo_pkg.filename != filename: versions_str = f"local: {filename}, remote: {repo_pkg.filename}" @@ -298,6 +299,19 @@ def try_download_package(dest_file_path: str, package: Pkgbuild, arch: Arch) -> logging.debug(f"package filenames don't match: {versions_str}") return None logging.debug(f"ignoring compression extension difference: {versions_str}") + cache_file = os.path.join(config.get_path('pacman'), arch, repo_pkg.filename) + if os.path.exists(cache_file): + if not repo_pkg._desc or 'SHA256SUM' not in repo_pkg._desc: + cache_matches = False + extra_msg = ". However, we can't validate it, as the https repo doesnt provide a SHA256SUM for it." + else: + cache_matches = sha256sum(cache_file) == repo_pkg._desc['SHA256SUM'] + extra_msg = (". However its checksum doesn't match." if not cache_matches else " and its checksum matches.") + logging.debug(f"While checking the HTTPS repo DB, we found a matching filename in the pacman cache{extra_msg}") + if cache_matches: + logging.info(f'copying cache file {cache_file} to repo as verified by remote checksum') + shutil.move(cache_file, dest_file_path) + return dest_file_path url = repo_pkg.resolved_url assert url try: From 2e504b7b00f40b33cd583d5c246cda751fcdd8a0 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Mon, 11 Dec 2023 12:49:28 +0100 Subject: [PATCH 69/82] dictscheme: fix type hinting --- dictscheme.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/dictscheme.py b/dictscheme.py index c5537d3..ca7c12c 100644 --- a/dictscheme.py +++ b/dictscheme.py @@ -52,7 +52,7 @@ class DictScheme(Munch): _sparse: ClassVar[bool] = False def __init__(self, d: Mapping = {}, validate: bool = True, **kwargs): - self.update(d | kwargs, validate=validate) + self.update(dict(d) | kwargs, validate=validate) @classmethod def transform( @@ -269,10 +269,13 @@ class DictScheme(Munch): ) -> str: import yaml yaml_args = {'sort_keys': False} | yaml_args - return yaml.dump( + dumped = yaml.dump( self.toDict(strip_hidden=strip_hidden, sparse=sparse), **yaml_args, ) + if dumped is None: + raise Exception(f"Failed to yaml-serialse {self}") + return dumped def toToml( self, From ff8a529690da1732f1b6bffd67c2b101b2cbf6ff Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Mon, 11 Dec 2023 16:37:48 +0100 Subject: [PATCH 70/82] docs: move usage guides to usage/, add quickstart and porting --- docs/source/index.md | 3 +- docs/source/{ => usage}/config.md | 6 +- docs/source/usage/faq.md | 39 +++++++++++++ docs/source/usage/index.md | 9 +++ docs/source/{ => usage}/install.md | 0 docs/source/usage/porting.md | 94 ++++++++++++++++++++++++++++++ docs/source/usage/quickstart.md | 9 +++ 7 files changed, 155 insertions(+), 5 deletions(-) rename docs/source/{ => usage}/config.md (94%) create mode 100644 docs/source/usage/faq.md create mode 100644 docs/source/usage/index.md rename docs/source/{ => usage}/install.md (100%) create mode 100644 docs/source/usage/porting.md create mode 100644 docs/source/usage/quickstart.md diff --git a/docs/source/index.md b/docs/source/index.md index 82a3e72..2cde0d6 100644 --- a/docs/source/index.md +++ b/docs/source/index.md @@ -6,7 +6,6 @@ a tool to build and flash packages and images for the [Kupfer](https://gitlab.co ## Documentation pages ```{toctree} -install -config +usage/index cli ``` diff --git a/docs/source/config.md b/docs/source/usage/config.md similarity index 94% rename from docs/source/config.md rename to docs/source/usage/config.md index 561d24c..c850cc7 100644 --- a/docs/source/config.md +++ b/docs/source/usage/config.md @@ -2,7 +2,7 @@ Kupferbootstrap uses [toml](https://en.wikipedia.org/wiki/TOML) for its configuration file. -The file can either be edited manually or managed via the {doc}`cli/config` subcommand. +The file can either be edited manually or managed via the {doc}`../cli/config` subcommand. You can quickly generate a default config by running {code}`kupferbootstrap config init -N`. @@ -54,7 +54,7 @@ This allows you to easily keep a number of slight variations of the same target without the need to constantly modify your Kupferbootstrap configuration file. You can easily create new profiles with -[kupferbootstrap config profile init](../cli/config/#kupferbootstrap-config-profile-init). +[kupferbootstrap config profile init](/cli/config/#kupferbootstrap-config-profile-init). Here's an example: @@ -97,7 +97,7 @@ hostname = "pocof1" The `current` key in the `profiles` section controlls which profile gets used by Kupferbootstrap by default. The first subsection (`profiles.default`) describes the `default` profile -which gets created by [config init](../cli/config/#kupferbootstrap-config-init). +which gets created by [config init](/cli/config/#kupferbootstrap-config-init). Next, we have a `graphical` profile that defines a couple of graphical programs for all but the `recovery` profile, since that doesn't have a GUI. diff --git a/docs/source/usage/faq.md b/docs/source/usage/faq.md new file mode 100644 index 0000000..441bef2 --- /dev/null +++ b/docs/source/usage/faq.md @@ -0,0 +1,39 @@ +# FAQ + + +```{contents} Table of Contents +:class: this-will-duplicate-information-and-it-is-still-useful-here +:depth: 3 +``` + + +## Which devices are currently supported? + +Currently very few! +See [the `devices` repo](https://gitlab.com/kupfer/packages/pkgbuilds/-/tree/dev/device). We use the same codenames as [postmarketOS](https://wiki.postmarketos.org/wiki/Devices) (although we prefix them with the SoC) + + +## How to port a new device or package? + +See [Porting](../porting) + +## How to build a specific package + +See also: The full [`kupferbootstrap packages build` docs](/cli/packages#kupferbootstrap-packages-build) + +### Example + +For rebuilding `kupfer-config` and `crossdirect`, defaulting to your device's architecture + +```sh +kupferbootstrap packages build [--force] [--arch $target_arch] kupfer-config crossdirect +``` + + +### By package path +You can also use the a path snippet (`$repo/$pkgbase`) to the PKGBUILD folder as seen inside your pkgbuilds.git: + +```sh +kupferbootstrap packages build [--force] main/kupfer-config cross/crossdirect +``` + diff --git a/docs/source/usage/index.md b/docs/source/usage/index.md new file mode 100644 index 0000000..d21c193 --- /dev/null +++ b/docs/source/usage/index.md @@ -0,0 +1,9 @@ +# Usage + +```{toctree} +quickstart +faq +install +config +porting +``` diff --git a/docs/source/install.md b/docs/source/usage/install.md similarity index 100% rename from docs/source/install.md rename to docs/source/usage/install.md diff --git a/docs/source/usage/porting.md b/docs/source/usage/porting.md new file mode 100644 index 0000000..b99303a --- /dev/null +++ b/docs/source/usage/porting.md @@ -0,0 +1,94 @@ +# Porting +## Porting devices + +### Homework +Before you can get started porting a device, you'll need to do some research: + +1. Familiarize yourself with git basics. +1. Familiarize yourself with Arch Linux packaging, i.e. `PKGBUILD`s and `makepkg` +1. Familiarize yourself with the postmarketOS port of the device. + ```{warning} + If there is no postmarketOS port yet, you'll probably need to get deep into kernel development. + We suggest [starting with a port to pmOS](https://wiki.postmarketos.org/wiki/Porting_to_a_new_device) then, especially if you're not familiar with the process already. + ``` + +### Porting +1. Navigate to your pkgbuilds checkout +1. Follow the [general package porting guidelines](#porting-packages) to create a device-, kernel- and probably also a firmware-package for the device and SoC. Usually this roughly means porting the postmarketOS APKBUILDs to our PKGBUILD scheme. + You can get inspiration by comparing existing Kupfer ports (e.g. one of the SDM845 devices) to the [postmarketOS packages](https://gitlab.com/postmarketOS/pmaports/-/tree/master/device) for that device. + Usually you should start out by copying and then customizing the Kupfer packages for a device that's as similar to yours as possible, i.e. uses the same or a related SoC, if something like that is already available in Kupfer. + ```{hint} Package Repos: + Device packages belong into `device/`, kernels into `linux/` and firmware into `firmware/`. + ``` +1. When submitting your MR, please include some information: + - what you have found to be working, broken, and not tested (and why) + - any necessary instructions for testing + - whether you'd be willing to maintain the device long-term (test kernel upgrades, submit device package updates, etc.) + + +### Gotchas + +Please be aware of these gotchas: +- As of now, Kupfer only really supports platforms using Android's `aboot` bootloader, i.e. ex-Android phones. In order to support other boot modes (e.g. uboot on the Librem5 and Pine devices), we'll need to port and switch to postmarketOS's [boot-deploy](https://gitlab.com/postmarketOS/boot-deploy) first and add support for EFI setups to Kupferbootstrap. + + +## Porting packages + +### Homework +Before you can get started, you'll need to do some research: + +1. Familiarize yourself with git basics. +1. Familiarize yourself with Arch Linux packaging, i.e. `PKGBUILD`s and `makepkg` + +### Development + +```{warning} +Throughout the process, use git to version your changes. +- Don't procrastinate using git or committing until you're "done" or "have got something working", you'll regret it. +- Don't worry about a "clean" git history while you're developing; we can squash it up later. +- \[Force-]Push your changes regularly, just like committing. Don't wait for perfection. +``` +1. Create a new git branch for your package locally. + ```{hint} + It might be a good ideaa to get into the habit of prefixing branch names with \[a part of] your username and a slash like so: + `myNickname/myFeatureNme` + This makes it easier to work in the same remote repo with multiple people. + ``` +1. + ```{note} + The pkgbuilds git repo contains multiple package repositories, represented by folders at the top level (`main`, `cross`, `phosh`, etc.). + ``` + Try to choose a sensible package repo for your new packages and create new folders for each `pkgbase` inside the repo folder. +1. Navigate into the folder of the new package and create a new `PKGBUILD`; fill it with life! +1. **`_mode`**: Add the build mode at the top of the PKGBUILD. + ```{hint} + If you're unsure what to pick, go with `_mode=host`. It'll use `crossdirect` to get speeds close to proper cross-compiling. + ``` + This determines whether it's built using a foreign-arch chroot (`_mode=host`) executed with qemu-user, or using real cross-compilation (`_mode=cross`) from a host-architecture chroot, but the package's build tooling has to specifically support the latter, so it's mostly useful for kernels and uncompiled packages. +1. **`_nodeps`**: (Optional) If your package doesn't require its listed dependencies to build + (usually because you're packaging a meta-package or only configs or scripts) + you can add `_nodeps=true` as the next line after the `_mode=` line to speed up packaging. + `makedeps` are still installed anyway. +1. Test building it with `kupferbootstrap packages build $pkgbname` +1. For any files and git repos downloaded by your PKGBUILD, + add them to a new `.gitignore` file in the same directory as your `PKGBUILD`. + ```{hint} + Don't forget to `git add` the new `.gitignore` file! + ``` +1. Run `kupferbootstrap packages check` to make sure the formatting for your PKGBUILDs is okay. + ```{warning} + This is **not** optional. MRs with failing CI will **not** be merged. + ``` + +### Pushing +1. Fork the Kupfer pkgbuilds repo on Gitlab using the Fork button +1. Add your fork's **SSH** URI to your local git repo as a **new remote**: `git remote add fork git@gitlab...` +1. `git push -u fork $branchname` it + +### Submitting the MR +When you're ready, open a Merge Request on the Kupfer pkgbuilds repo. + +```{hint} +Prefix the MR title with `Draft: ` to indicate a Work In Progress state. +``` + diff --git a/docs/source/usage/quickstart.md b/docs/source/usage/quickstart.md new file mode 100644 index 0000000..fc5a4d5 --- /dev/null +++ b/docs/source/usage/quickstart.md @@ -0,0 +1,9 @@ +# Quickstart + +1. [Install](../install) Kupferbootstrap +1. [Configure](../config) it: `kuperbootstrap config init` +1. [Update your PKGBUILDs + SRCINFO cache](/cli/packages#kupferbootstrap-packages-update): `kupferbootstrap packages update` +1. [Build an image](/cli/image#kupferbootstrap-image-build): `kupferbootstrap image build` +1. [Flash the image](/cli/image#kupferbootstrap-image-flash): `kupferbootstrap image flash abootimg && kupferbootstrap image flash full userdata` + +See also: [Frequently Asked Questions](../faq) From 95147ceceac6d8417f55df9bcbd881f8e85577d1 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Tue, 19 Dec 2023 23:34:33 +0100 Subject: [PATCH 71/82] docs: convert absolute links to relative --- docs/source/usage/config.md | 10 +++++++--- docs/source/usage/faq.md | 2 +- docs/source/usage/quickstart.md | 6 +++--- 3 files changed, 11 insertions(+), 7 deletions(-) diff --git a/docs/source/usage/config.md b/docs/source/usage/config.md index c850cc7..9c259c0 100644 --- a/docs/source/usage/config.md +++ b/docs/source/usage/config.md @@ -2,10 +2,14 @@ Kupferbootstrap uses [toml](https://en.wikipedia.org/wiki/TOML) for its configuration file. -The file can either be edited manually or managed via the {doc}`../cli/config` subcommand. +The file can either be edited manually or managed via the [`kupferbootstrap config`](../../cli/config) subcommand. +```{hint} You can quickly generate a default config by running {code}`kupferbootstrap config init -N`. +For an interactive dialogue, omit the `-N`. +``` + ## File Location The configuration is stored in `~/.config/kupfer/kupferbootstrap.toml`, where `~` is your user's home folder. @@ -54,7 +58,7 @@ This allows you to easily keep a number of slight variations of the same target without the need to constantly modify your Kupferbootstrap configuration file. You can easily create new profiles with -[kupferbootstrap config profile init](/cli/config/#kupferbootstrap-config-profile-init). +[kupferbootstrap config profile init](../../cli/config/#kupferbootstrap-config-profile-init). Here's an example: @@ -97,7 +101,7 @@ hostname = "pocof1" The `current` key in the `profiles` section controlls which profile gets used by Kupferbootstrap by default. The first subsection (`profiles.default`) describes the `default` profile -which gets created by [config init](/cli/config/#kupferbootstrap-config-init). +which gets created by [`kupferbootstrap config init`](../../cli/config/#kupferbootstrap-config-init). Next, we have a `graphical` profile that defines a couple of graphical programs for all but the `recovery` profile, since that doesn't have a GUI. diff --git a/docs/source/usage/faq.md b/docs/source/usage/faq.md index 441bef2..53b1818 100644 --- a/docs/source/usage/faq.md +++ b/docs/source/usage/faq.md @@ -19,7 +19,7 @@ See [Porting](../porting) ## How to build a specific package -See also: The full [`kupferbootstrap packages build` docs](/cli/packages#kupferbootstrap-packages-build) +See also: The full [`kupferbootstrap packages build` docs](../../cli/packages#kupferbootstrap-packages-build) ### Example diff --git a/docs/source/usage/quickstart.md b/docs/source/usage/quickstart.md index fc5a4d5..0076b58 100644 --- a/docs/source/usage/quickstart.md +++ b/docs/source/usage/quickstart.md @@ -2,8 +2,8 @@ 1. [Install](../install) Kupferbootstrap 1. [Configure](../config) it: `kuperbootstrap config init` -1. [Update your PKGBUILDs + SRCINFO cache](/cli/packages#kupferbootstrap-packages-update): `kupferbootstrap packages update` -1. [Build an image](/cli/image#kupferbootstrap-image-build): `kupferbootstrap image build` -1. [Flash the image](/cli/image#kupferbootstrap-image-flash): `kupferbootstrap image flash abootimg && kupferbootstrap image flash full userdata` +1. [Update your PKGBUILDs + SRCINFO cache](../../cli/packages#kupferbootstrap-packages-update): `kupferbootstrap packages update` +1. [Build an image](../../cli/image#kupferbootstrap-image-build): `kupferbootstrap image build` +1. [Flash the image](../../cli/image#kupferbootstrap-image-flash): `kupferbootstrap image flash abootimg && kupferbootstrap image flash full userdata` See also: [Frequently Asked Questions](../faq) From 4cce7e57aea2f4a0904943b9d57c6460ee210b44 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Wed, 20 Dec 2023 00:28:26 +0100 Subject: [PATCH 72/82] constants: use ALARM's aarch64 gcc that we package --- constants.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/constants.py b/constants.py index 723dcc0..2ddd686 100644 --- a/constants.py +++ b/constants.py @@ -89,7 +89,7 @@ COMPILE_ARCHES: dict[Arch, str] = { GCC_HOSTSPECS: dict[DistroArch, dict[TargetArch, str]] = { 'x86_64': { 'x86_64': 'x86_64-pc-linux-gnu', - 'aarch64': 'aarch64-linux-gnu', + 'aarch64': 'aarch64-unknown-linux-gnu', 'armv7h': 'arm-unknown-linux-gnueabihf' }, 'aarch64': { From a75f32b4b159eb65fd3b26158333e13530b272c8 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Wed, 20 Dec 2023 01:55:16 +0100 Subject: [PATCH 73/82] chroot/build: mount_crossdirect(): fix symlink creation if link exists --- chroot/build.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/chroot/build.py b/chroot/build.py index 2520d6a..40b123d 100644 --- a/chroot/build.py +++ b/chroot/build.py @@ -82,6 +82,7 @@ class BuildChroot(Chroot): native_chroot.mount_pacman_cache() native_chroot.mount_packages() native_chroot.activate() + logging.debug(f"Installing {CROSSDIRECT_PKGS=} + {gcc=}") results = dict(native_chroot.try_install_packages( CROSSDIRECT_PKGS + [gcc], refresh=True, @@ -103,8 +104,8 @@ class BuildChroot(Chroot): target_include_dir = os.path.join(self.path, 'include') for target, source in {cc_path: gcc, target_lib_dir: 'lib', target_include_dir: 'usr/include'}.items(): - if not os.path.exists(target): - logging.debug(f'Symlinking {source} at {target}') + if not (os.path.exists(target) or os.path.islink(target)): + logging.debug(f'Symlinking {source=} at {target=}') symlink(source, target) ld_so = os.path.basename(glob(f"{os.path.join(native_chroot.path, 'usr', 'lib', 'ld-linux-')}*")[0]) ld_so_target = os.path.join(target_lib_dir, ld_so) From c074fbe42c7c05c25e89df96224a66bcef645519 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Wed, 20 Dec 2023 03:33:28 +0100 Subject: [PATCH 74/82] packages/pkgbuild: parse_pkgbuild(): inherit depends, makedepends, provides, replaces from pkgbase unless overriden --- packages/pkgbuild.py | 24 ++++++++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/packages/pkgbuild.py b/packages/pkgbuild.py index 65722ba..baeea83 100644 --- a/packages/pkgbuild.py +++ b/packages/pkgbuild.py @@ -310,8 +310,11 @@ class SubPkgbuild(Pkgbuild): self.sources_refreshed = False self.update(pkgbase) - self.provides = {} - self.replaces = [] + # set to None - will be replaced with base_pkg if still None after parsing + self.depends = None # type: ignore[assignment] + self.makedepends = None # type: ignore[assignment] + self.provides = None # type: ignore[assignment] + self.replaces = None # type: ignore[assignment] def refresh_sources(self, lazy: bool = True): assert self.pkgbase @@ -383,13 +386,21 @@ def parse_pkgbuild( elif line.startswith('arch'): current.arches.append(splits[1]) elif line.startswith('provides'): + if not current.provides: + current.provides = {} current.provides = get_version_specs(splits[1], current.provides) elif line.startswith('replaces'): + if not current.replaces: + current.replaces = [] current.replaces.append(splits[1]) elif splits[0] in ['depends', 'makedepends', 'checkdepends', 'optdepends']: spec = splits[1].split(': ', 1)[0] + if not current.depends: + current.depends = {} current.depends = get_version_specs(spec, current.depends) if splits[0] == 'makedepends': + if not current.makedepends: + current.makedepends = {} current.makedepends = get_version_specs(spec, current.makedepends) results: list[Pkgbuild] = list(base_package.subpackages) @@ -402,6 +413,15 @@ def parse_pkgbuild( pkg.update_version() if not (pkg.version == base_package.version): raise Exception(f'Subpackage malformed! Versions differ! base: {base_package}, subpackage: {pkg}') + if isinstance(pkg, SubPkgbuild): + if pkg.depends is None: + pkg.depends = base_package.depends + if pkg.makedepends is None: + pkg.makedepends = base_package.makedepends + if pkg.replaces is None: + pkg.replaces = base_package.replaces + if pkg.provides is None: + pkg.provides = base_package.provides return results From eaac9195ea584964785055b4b8e66fe46b5a596b Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Wed, 20 Dec 2023 03:36:13 +0100 Subject: [PATCH 75/82] packages/build: build_enable_qemu_binfmt(): also build gcc package if available --- packages/build.py | 21 +++++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/packages/build.py b/packages/build.py index 190e588..455126b 100644 --- a/packages/build.py +++ b/packages/build.py @@ -438,10 +438,11 @@ def setup_build_chroot( extra_packages: list[str] = [], add_kupfer_repos: bool = True, clean_chroot: bool = False, + repo: Optional[dict[str, Pkgbuild]] = None, ) -> BuildChroot: assert config.runtime.arch if arch != config.runtime.arch: - build_enable_qemu_binfmt(arch, lazy=False) + build_enable_qemu_binfmt(arch, repo=repo or discover_pkgbuilds(), lazy=False) init_prebuilts(arch) chroot = get_build_chroot(arch, add_kupfer_repos=add_kupfer_repos) chroot.mount_packages() @@ -510,6 +511,7 @@ def build_package( enable_ccache: bool = True, clean_chroot: bool = False, build_user: str = 'kupfer', + repo: Optional[dict[str, Pkgbuild]] = None, ): makepkg_compile_opts = ['--holdver'] makepkg_conf_path = 'etc/makepkg.conf' @@ -529,6 +531,7 @@ def build_package( arch=arch, extra_packages=deps, clean_chroot=clean_chroot, + repo=repo, ) assert config.runtime.arch native_chroot = target_chroot @@ -538,6 +541,7 @@ def build_package( arch=config.runtime.arch, extra_packages=['base-devel'] + CROSSDIRECT_PKGS, clean_chroot=clean_chroot, + repo=repo, ) if not package.mode: logging.warning(f'Package {package.path} has no _mode set, assuming "host"') @@ -756,6 +760,7 @@ def build_packages( enable_crossdirect=enable_crossdirect, enable_ccache=enable_ccache, clean_chroot=clean_chroot, + repo=repo, ) files += add_package_to_repo(package, arch) updated_repos.add(package.repo) @@ -830,8 +835,20 @@ def build_enable_qemu_binfmt(arch: Arch, repo: Optional[dict[str, Pkgbuild]] = N logging.info('Installing qemu-user (building if necessary)') check_programs_wrap(['pacman', 'makepkg', 'pacstrap']) # build qemu-user, binfmt, crossdirect + packages = list(CROSSDIRECT_PKGS) + hostspec = GCC_HOSTSPECS[arch][arch] + cross_gcc = f"{hostspec}-gcc" + if repo: + for pkg in repo.values(): + if (pkg.name == cross_gcc or cross_gcc in pkg.provides): + if config.runtime.arch not in pkg.arches: + logging.debug(f"Package {pkg.path} matches {cross_gcc=} name but not arch: {pkg.arches=}") + continue + packages.append(pkg.path) + logging.debug(f"Adding gcc package {pkg.path} to the necessary crosscompilation tools") + break build_packages_by_paths( - CROSSDIRECT_PKGS, + packages, native, repo=repo, try_download=True, From 4b2150940d0fcedcc1a86d63e4c8ece7a54af519 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Fri, 22 Dec 2023 05:07:55 +0100 Subject: [PATCH 76/82] packages/build: use copy && remove_file() instead of shutil.move() --- packages/build.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/build.py b/packages/build.py index 455126b..bcd5403 100644 --- a/packages/build.py +++ b/packages/build.py @@ -310,7 +310,8 @@ def try_download_package(dest_file_path: str, package: Pkgbuild, arch: Arch) -> logging.debug(f"While checking the HTTPS repo DB, we found a matching filename in the pacman cache{extra_msg}") if cache_matches: logging.info(f'copying cache file {cache_file} to repo as verified by remote checksum') - shutil.move(cache_file, dest_file_path) + shutil.copy(cache_file, dest_file_path) + remove_file(cache_file) return dest_file_path url = repo_pkg.resolved_url assert url From b006cd8f4da8c621370b2bf73762b248ddd74dbc Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Mon, 8 Jan 2024 02:30:57 +0100 Subject: [PATCH 77/82] packages/pkgbuild: support new key "_crossdirect" to enable/disable crossdirect for single packages --- packages/build.py | 2 +- packages/cli.py | 4 +++- packages/pkgbuild.py | 7 +++++++ packages/srcinfo_cache.py | 20 +++++++++++++++----- 4 files changed, 26 insertions(+), 7 deletions(-) diff --git a/packages/build.py b/packages/build.py index bcd5403..b388221 100644 --- a/packages/build.py +++ b/packages/build.py @@ -575,7 +575,7 @@ def build_package( build_root = target_chroot makepkg_compile_opts += ['--nodeps' if package.nodeps else '--syncdeps'] env = deepcopy(get_makepkg_env(arch)) - if foreign_arch and enable_crossdirect and package.name not in CROSSDIRECT_PKGS: + if foreign_arch and package.crossdirect and enable_crossdirect and package.name not in CROSSDIRECT_PKGS: env['PATH'] = f"/native/usr/lib/crossdirect/{arch}:{env['PATH']}" target_chroot.mount_crossdirect(native_chroot) else: diff --git a/packages/cli.py b/packages/cli.py index c300111..3878ba4 100644 --- a/packages/cli.py +++ b/packages/cli.py @@ -313,7 +313,7 @@ def cmd_list(): logging.info(f'Done! {len(packages)} Pkgbuilds:') for name in sorted(packages.keys()): p = packages[name] - print(f'name: {p.name}; ver: {p.version}; mode: {p.mode}; provides: {p.provides}; replaces: {p.replaces};' + print(f'name: {p.name}; ver: {p.version}; mode: {p.mode}; crossdirect: {p.crossdirect} provides: {p.provides}; replaces: {p.replaces};' f'local_depends: {p.local_depends}; depends: {p.depends}') @@ -346,6 +346,7 @@ def cmd_check(paths): mode_key = '_mode' nodeps_key = '_nodeps' + crossdirect_key = '_crossdirect' pkgbase_key = 'pkgbase' pkgname_key = 'pkgname' arches_key = '_arches' @@ -356,6 +357,7 @@ def cmd_check(paths): required = { mode_key: True, nodeps_key: False, + crossdirect_key: False, pkgbase_key: False, pkgname_key: True, 'pkgdesc': False, diff --git a/packages/pkgbuild.py b/packages/pkgbuild.py index baeea83..3b89558 100644 --- a/packages/pkgbuild.py +++ b/packages/pkgbuild.py @@ -156,6 +156,7 @@ class Pkgbuild(PackageInfo): repo: str mode: str nodeps: bool + crossdirect: bool path: str pkgver: str pkgrel: str @@ -190,6 +191,7 @@ class Pkgbuild(PackageInfo): self.repo = repo or '' self.mode = '' self.nodeps = False + self.crossdirect = True self.path = relative_path self.pkgver = '' self.pkgrel = '' @@ -223,6 +225,7 @@ class Pkgbuild(PackageInfo): self.repo = pkg.repo self.mode = pkg.mode self.nodeps = pkg.nodeps + self.crossdirect = pkg.crossdirect self.path = pkg.path self.pkgver = pkg.pkgver self.pkgrel = pkg.pkgrel @@ -357,7 +360,11 @@ def parse_pkgbuild( else: raise Exception(msg) + # if _crossdirect is unset (None), it defaults to True + crossdirect_enabled = srcinfo_cache.build_crossdirect in (None, True) + base_package = Pkgbase(relative_pkg_dir, sources_refreshed=sources_refreshed, srcinfo_cache=srcinfo_cache) + base_package.crossdirect = crossdirect_enabled base_package.mode = mode base_package.nodeps = nodeps base_package.repo = relative_pkg_dir.split('/')[0] diff --git a/packages/srcinfo_cache.py b/packages/srcinfo_cache.py index 5cb2373..3d9737b 100644 --- a/packages/srcinfo_cache.py +++ b/packages/srcinfo_cache.py @@ -68,11 +68,19 @@ class SrcInitialisedFile(JsonFile): raise ex +srcinfo_meta_defaults = { + 'build_mode': None, + "build_nodeps": None, + "build_crossdirect": None, +} + + class SrcinfoMetaFile(JsonFile): checksums: dict[str, str] build_mode: Optional[str] build_nodeps: Optional[bool] + build_crossdirect: Optional[bool] _changed: bool _filename: ClassVar[str] = SRCINFO_METADATA_FILE @@ -92,9 +100,8 @@ class SrcinfoMetaFile(JsonFile): s = SrcinfoMetaFile({ '_relative_path': relative_pkg_dir, '_changed': True, - 'build_mode': '', - 'build_nodeps': None, 'checksums': {}, + **srcinfo_meta_defaults, }) return s, s.refresh_all() @@ -120,9 +127,11 @@ class SrcinfoMetaFile(JsonFile): if not force_refresh: logging.debug(f'{metadata._relative_path}: srcinfo checksums match!') lines = lines or metadata.read_srcinfo_file() - for build_field in ['build_mode', 'build_nodeps']: + for build_field in srcinfo_meta_defaults.keys(): if build_field not in metadata: metadata.refresh_build_fields() + if write: + metadata.write() break else: lines = metadata.refresh_all(write=write) @@ -143,8 +152,7 @@ class SrcinfoMetaFile(JsonFile): self._changed = True def refresh_build_fields(self): - self['build_mode'] = None - self['build_nodeps'] = None + self.update(srcinfo_meta_defaults) with open(os.path.join(config.get_path('pkgbuilds'), self._relative_path, 'PKGBUILD'), 'r') as file: lines = file.read().split('\n') for line in lines: @@ -156,6 +164,8 @@ class SrcinfoMetaFile(JsonFile): self.build_mode = val elif key == '_nodeps': self.build_nodeps = val.lower() == 'true' + elif key == '_crossdirect': + self.build_crossdirect = val.lower() == 'true' else: continue From f05de7738ae67e2a2b393b4437dbe43384159825 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Mon, 8 Jan 2024 04:25:42 +0100 Subject: [PATCH 78/82] integration_tests: test importing main.cli --- integration_tests.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/integration_tests.py b/integration_tests.py index 2b6c526..bc4eeb7 100644 --- a/integration_tests.py +++ b/integration_tests.py @@ -37,6 +37,11 @@ def ctx() -> click.Context: return click.Context(click.Command('integration_tests')) +def test_main_import(): + from main import cli + assert cli + + def test_config_load(ctx: click.Context): path = config.runtime.config_file assert path From cebac831864a2eaeaa86e3586f778e9aafb43d6f Mon Sep 17 00:00:00 2001 From: Syboxez Blank Date: Sat, 23 Mar 2024 17:48:38 +0000 Subject: [PATCH 79/82] packages/pkgbuild: parse_pkgbuild(): Reuse pkgbase's `makedepends` as dependencies Authored-by: InsanePrawn --- packages/pkgbuild.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/pkgbuild.py b/packages/pkgbuild.py index 3b89558..9be89c3 100644 --- a/packages/pkgbuild.py +++ b/packages/pkgbuild.py @@ -403,7 +403,7 @@ def parse_pkgbuild( elif splits[0] in ['depends', 'makedepends', 'checkdepends', 'optdepends']: spec = splits[1].split(': ', 1)[0] if not current.depends: - current.depends = {} + current.depends = (base_package.makedepends or {}).copy() current.depends = get_version_specs(spec, current.depends) if splits[0] == 'makedepends': if not current.makedepends: From a4cfc3c3e51dc21e2e19729154afa7869e0a1698 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Wed, 20 Mar 2024 16:42:19 +0100 Subject: [PATCH 80/82] exec/file: makedir(): add mode=None arg --- exec/file.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/exec/file.py b/exec/file.py index 852ad48..00653aa 100644 --- a/exec/file.py +++ b/exec/file.py @@ -144,7 +144,13 @@ def remove_file(path: str, recursive=False): raise Exception(f"Unable to remove {path}: cmd returned {rc}") -def makedir(path, user: Optional[Union[str, int]] = None, group: Optional[Union[str, int]] = None, parents: bool = True): +def makedir( + path, + user: Optional[Union[str, int]] = None, + group: Optional[Union[str, int]] = None, + parents: bool = True, + mode: Optional[Union[int, str]] = None, +): if not root_check_exists(path): try: if parents: @@ -153,6 +159,8 @@ def makedir(path, user: Optional[Union[str, int]] = None, group: Optional[Union[ os.mkdir(path) except: run_root_cmd(['mkdir'] + (['-p'] if parents else []) + [path]) + if mode is not None: + chmod(path, mode=mode) chown(path, user, group) From a176fad05a358bcbb83d4e2207f8ae2a1c7abbee Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Wed, 20 Mar 2024 16:43:08 +0100 Subject: [PATCH 81/82] net/ssh: copy_ssh_keys(): pass chroot for uid resolution --- image/image.py | 2 +- net/ssh.py | 52 ++++++++++++++++++++++++++++++-------------------- 2 files changed, 32 insertions(+), 22 deletions(-) diff --git a/image/image.py b/image/image.py index 0cb0bcc..afb3ddb 100644 --- a/image/image.py +++ b/image/image.py @@ -333,7 +333,7 @@ def install_rootfs( ) chroot.add_sudo_config(config_name='wheel', privilegee='%wheel', password_required=True) copy_ssh_keys( - chroot.path, + chroot, user=user, ) files = { diff --git a/net/ssh.py b/net/ssh.py index 2a5ef7f..cf1ed37 100644 --- a/net/ssh.py +++ b/net/ssh.py @@ -6,7 +6,9 @@ import click from config.state import config from constants import SSH_COMMON_OPTIONS, SSH_DEFAULT_HOST, SSH_DEFAULT_PORT +from chroot.abstract import Chroot from exec.cmd import run_cmd +from exec.file import write_file from wrapper import check_programs_wrap @@ -83,21 +85,16 @@ def find_ssh_keys(): return keys -def copy_ssh_keys(root_dir: str, user: str): +def copy_ssh_keys(chroot: Chroot, user: str): check_programs_wrap(['ssh-keygen']) - authorized_keys_file = os.path.join( - root_dir, - 'home', - user, - '.ssh', - 'authorized_keys', - ) - if os.path.exists(authorized_keys_file): - os.unlink(authorized_keys_file) + ssh_dir_relative = os.path.join('/home', user, '.ssh') + ssh_dir = chroot.get_path(ssh_dir_relative) + authorized_keys_file_rel = os.path.join(ssh_dir_relative, 'authorized_keys') + authorized_keys_file = chroot.get_path(authorized_keys_file_rel) keys = find_ssh_keys() if len(keys) == 0: - logging.info("Could not find any ssh key to copy") + logging.warning("Could not find any ssh key to copy") create = click.confirm("Do you want me to generate an ssh key for you?", True) if not create: return @@ -116,15 +113,28 @@ def copy_ssh_keys(root_dir: str, user: str): logging.fatal("Failed to generate ssh key") keys = find_ssh_keys() - ssh_dir = os.path.join(root_dir, 'home', user, '.ssh') - if not os.path.exists(ssh_dir): - os.makedirs(ssh_dir, exist_ok=True, mode=0o700) + if not keys: + logging.warning("No SSH keys to be copied. Skipping.") + return - with open(authorized_keys_file, 'a') as authorized_keys: - for key in keys: - pub = f'{key}.pub' - if not os.path.exists(pub): - logging.debug(f'Skipping key {key}: {pub} not found') - continue + auth_key_lines = [] + for key in keys: + pub = f'{key}.pub' + if not os.path.exists(pub): + logging.debug(f'Skipping key {key}: {pub} not found') + continue + try: with open(pub, 'r') as file: - authorized_keys.write(file.read()) + contents = file.read() + if not contents.strip(): + continue + auth_key_lines.append(contents) + except Exception as ex: + logging.warning(f"Could not read ssh pub key {pub}", exc_info=ex) + continue + + if not os.path.exists(ssh_dir): + logging.info(f"Creating {ssh_dir_relative} dir in chroot {chroot.path}") + chroot.run_cmd(["mkdir", "-p", "-m", "700", ssh_dir_relative], switch_user=user) + logging.info(f"Writing SSH pub keys to {authorized_keys_file}") + write_file(authorized_keys_file, "\n".join(auth_key_lines), user=chroot.get_uid(user), mode="644") From a28550825f89673635e36456da06c5a23c43ece8 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Wed, 20 Mar 2024 20:56:17 +0100 Subject: [PATCH 82/82] image/image: tolerate pub-key copying to fail during image build --- image/image.py | 1 + net/ssh.py | 12 +++++++++--- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/image/image.py b/image/image.py index afb3ddb..6532af7 100644 --- a/image/image.py +++ b/image/image.py @@ -335,6 +335,7 @@ def install_rootfs( copy_ssh_keys( chroot, user=user, + allow_fail=True, ) files = { 'etc/pacman.conf': get_base_distro(arch).get_pacman_conf( diff --git a/net/ssh.py b/net/ssh.py index cf1ed37..6eb7294 100644 --- a/net/ssh.py +++ b/net/ssh.py @@ -85,7 +85,7 @@ def find_ssh_keys(): return keys -def copy_ssh_keys(chroot: Chroot, user: str): +def copy_ssh_keys(chroot: Chroot, user: str, allow_fail: bool = False): check_programs_wrap(['ssh-keygen']) ssh_dir_relative = os.path.join('/home', user, '.ssh') ssh_dir = chroot.get_path(ssh_dir_relative) @@ -134,7 +134,13 @@ def copy_ssh_keys(chroot: Chroot, user: str): continue if not os.path.exists(ssh_dir): - logging.info(f"Creating {ssh_dir_relative} dir in chroot {chroot.path}") + logging.info(f"Creating {ssh_dir_relative!r} dir in chroot {chroot.path!r}") chroot.run_cmd(["mkdir", "-p", "-m", "700", ssh_dir_relative], switch_user=user) logging.info(f"Writing SSH pub keys to {authorized_keys_file}") - write_file(authorized_keys_file, "\n".join(auth_key_lines), user=chroot.get_uid(user), mode="644") + try: + write_file(authorized_keys_file, "\n".join(auth_key_lines), user=str(chroot.get_uid(user)), mode="644") + except Exception as ex: + logging.error(f"Failed to write SSH authorized_keys_file at {authorized_keys_file!r}:", exc_info=ex) + if allow_fail: + return + raise ex from ex