From e068b3587e9aa67ffc23c8ff6745c5e63ff0a8b5 Mon Sep 17 00:00:00 2001 From: InsanePrawn Date: Mon, 17 Apr 2023 16:27:36 +0200 Subject: [PATCH] WIP: keyring init --- constants.py | 20 ++++++++++++-------- distro/package.py | 1 + distro/repo_config.py | 14 ++++++++------ utils.py | 36 ++++++++++++++++++++++++++++++++++++ 4 files changed, 57 insertions(+), 14 deletions(-) diff --git a/constants.py b/constants.py index 2ddd686..0f32cde 100644 --- a/constants.py +++ b/constants.py @@ -1,4 +1,4 @@ -from typehelpers import TypeAlias +from typehelpers import TypeAlias, Union FASTBOOT = 'fastboot' FLASH_PARTS = { @@ -56,6 +56,8 @@ ARCHES = [ DistroArch: TypeAlias = Arch TargetArch: TypeAlias = Arch +KEYRINGS_KEY = 'keyrings' + ALARM_REPOS = { 'core': 'http://mirror.archlinuxarm.org/$arch/$repo', 'extra': 'http://mirror.archlinuxarm.org/$arch/$repo', @@ -64,20 +66,22 @@ ALARM_REPOS = { 'aur': 'http://mirror.archlinuxarm.org/$arch/$repo', } -BASE_DISTROS: dict[DistroArch, dict[str, dict[str, str]]] = { +ALARM_DISTRO: dict[str, Union[dict[str, str], list[str]]] = { + 'repos': ALARM_REPOS, + KEYRINGS_KEY: ['archlinuxarm-keyring'], +} + +BASE_DISTROS: dict[DistroArch, dict[str, Union[dict[str, str], list[str]]]] = { 'x86_64': { 'repos': { 'core': 'https://geo.mirror.pkgbuild.com/$repo/os/$arch', 'extra': 'https://geo.mirror.pkgbuild.com/$repo/os/$arch', 'community': 'https://geo.mirror.pkgbuild.com/$repo/os/$arch', }, + KEYRINGS_KEY: ['archlinux-keyring'], }, - 'aarch64': { - 'repos': ALARM_REPOS, - }, - 'armv7h': { - 'repos': ALARM_REPOS, - }, + 'aarch64': ALARM_DISTRO, + 'armv7h': ALARM_DISTRO, } COMPILE_ARCHES: dict[Arch, str] = { diff --git a/distro/package.py b/distro/package.py index a6c06a9..200a7ef 100644 --- a/distro/package.py +++ b/distro/package.py @@ -72,6 +72,7 @@ class LocalPackage(BinaryPackage): assert self.resolved_url and self.filename and self.filename in self.resolved_url path = f'{self.resolved_url.split("file://")[1]}' if dest_dir: + makedir(dest_dir) target = os.path.join(dest_dir, filename or self.filename) if os.path.getsize(path) != os.path.getsize(target) or sha256sum(path) != sha256sum(target): copy_file(path, target, follow_symlinks=True) diff --git a/distro/repo_config.py b/distro/repo_config.py index 5f32d2b..341a573 100644 --- a/distro/repo_config.py +++ b/distro/repo_config.py @@ -9,7 +9,7 @@ from copy import deepcopy from typing import ClassVar, Optional, Mapping, Union from config.state import config -from constants import Arch, BASE_DISTROS, KUPFER_HTTPS, REPOS_CONFIG_FILE, REPOSITORIES +from constants import Arch, BASE_DISTROS, KUPFER_HTTPS, KEYRINGS_KEY, REPOS_CONFIG_FILE, REPOSITORIES from dictscheme import DictScheme, toml_inline_dicts, TomlPreserveInlineDictEncoder from utils import sha256sum @@ -39,11 +39,13 @@ class RepoConfig(AbstrRepoConfig): class BaseDistro(DictScheme): remote_url: Optional[str] + keyrings: Optional[list[str]] repos: dict[str, BaseDistroRepo] class ReposConfigFile(DictScheme): remote_url: Optional[str] + keyrings: Optional[list[str]] repos: dict[str, RepoConfig] base_distros: dict[Arch, BaseDistro] _path: Optional[str] @@ -106,6 +108,7 @@ REPOS_CONFIG_DEFAULT = ReposConfigFile({ '_path': '__DEFAULTS__', '_checksum': None, REMOTEURL_KEY: KUPFER_HTTPS, + KEYRINGS_KEY: [], REPOS_KEY: { 'kupfer_local': REPO_DEFAULTS | { LOCALONLY_KEY: True @@ -117,11 +120,10 @@ REPOS_CONFIG_DEFAULT = ReposConfigFile({ BASEDISTROS_KEY: { arch: { REMOTEURL_KEY: None, - 'repos': { - k: { - 'remote_url': v - } for k, v in arch_def['repos'].items() - }, + KEYRINGS_KEY: arch_def.get(KEYRINGS_KEY, None), + 'repos': {k: { + 'remote_url': v + } for k, v in arch_def['repos'].items()}, # type: ignore[union-attr] } for arch, arch_def in BASE_DISTROS.items() }, }) diff --git a/utils.py b/utils.py index df40a97..02048ac 100644 --- a/utils.py +++ b/utils.py @@ -138,6 +138,42 @@ def read_files_from_tar(tar_file: str, files: Sequence[str]) -> Generator[tuple[ yield path, fd +def read_files_from_tar_recursive(tar_file: str, paths: Sequence[str], append_slash: bool = True) -> Generator[tuple[str, IO], None, None]: + """ + Returns tar FDs to files that lie under the directories specified in paths. + HINT: deactivate append_slash to get glob-like behaviour, as if all paths ended with * + """ + assert os.path.exists(tar_file) + paths = [f"{p.strip('/')}/" for p in paths] + with tarfile.open(tar_file) as index: + for member in index.getmembers(): + for path in paths: + if member.isfile() and member.path.startswith(path): + fd = index.extractfile(member) + assert fd + yield member.path, fd + break + continue + + +def extract_files_from_tar_generator( + tar_generator: Generator[tuple[str, IO], None, None], + output_dir: str, + remove_prefix: str = '', + append_slash: bool = True, +): + assert os.path.exists(output_dir) + remove_prefix = remove_prefix.strip('/') + if append_slash and remove_prefix: + remove_prefix += '/' + for file_path, fd in tar_generator: + assert file_path.startswith(remove_prefix) + output_path = os.path.join(output_dir, file_path[len(remove_prefix):].lstrip('/')) + os.makedirs(os.path.dirname(output_path), exist_ok=True) + with open(output_path, 'wb') as f: + f.write(fd.read()) + + def download_file(path: str, url: str, update: bool = True): """Download a file over http[s]. With `update`, tries to use mtime timestamps to download only changed files.""" url_time = None