WIP: keyring init

This commit is contained in:
InsanePrawn 2023-04-17 16:27:36 +02:00
parent 0c56038ed6
commit e068b3587e
4 changed files with 57 additions and 14 deletions

View file

@ -1,4 +1,4 @@
from typehelpers import TypeAlias from typehelpers import TypeAlias, Union
FASTBOOT = 'fastboot' FASTBOOT = 'fastboot'
FLASH_PARTS = { FLASH_PARTS = {
@ -56,6 +56,8 @@ ARCHES = [
DistroArch: TypeAlias = Arch DistroArch: TypeAlias = Arch
TargetArch: TypeAlias = Arch TargetArch: TypeAlias = Arch
KEYRINGS_KEY = 'keyrings'
ALARM_REPOS = { ALARM_REPOS = {
'core': 'http://mirror.archlinuxarm.org/$arch/$repo', 'core': 'http://mirror.archlinuxarm.org/$arch/$repo',
'extra': 'http://mirror.archlinuxarm.org/$arch/$repo', 'extra': 'http://mirror.archlinuxarm.org/$arch/$repo',
@ -64,20 +66,22 @@ ALARM_REPOS = {
'aur': 'http://mirror.archlinuxarm.org/$arch/$repo', 'aur': 'http://mirror.archlinuxarm.org/$arch/$repo',
} }
BASE_DISTROS: dict[DistroArch, dict[str, dict[str, str]]] = { ALARM_DISTRO: dict[str, Union[dict[str, str], list[str]]] = {
'repos': ALARM_REPOS,
KEYRINGS_KEY: ['archlinuxarm-keyring'],
}
BASE_DISTROS: dict[DistroArch, dict[str, Union[dict[str, str], list[str]]]] = {
'x86_64': { 'x86_64': {
'repos': { 'repos': {
'core': 'https://geo.mirror.pkgbuild.com/$repo/os/$arch', 'core': 'https://geo.mirror.pkgbuild.com/$repo/os/$arch',
'extra': 'https://geo.mirror.pkgbuild.com/$repo/os/$arch', 'extra': 'https://geo.mirror.pkgbuild.com/$repo/os/$arch',
'community': 'https://geo.mirror.pkgbuild.com/$repo/os/$arch', 'community': 'https://geo.mirror.pkgbuild.com/$repo/os/$arch',
}, },
KEYRINGS_KEY: ['archlinux-keyring'],
}, },
'aarch64': { 'aarch64': ALARM_DISTRO,
'repos': ALARM_REPOS, 'armv7h': ALARM_DISTRO,
},
'armv7h': {
'repos': ALARM_REPOS,
},
} }
COMPILE_ARCHES: dict[Arch, str] = { COMPILE_ARCHES: dict[Arch, str] = {

View file

@ -72,6 +72,7 @@ class LocalPackage(BinaryPackage):
assert self.resolved_url and self.filename and self.filename in self.resolved_url assert self.resolved_url and self.filename and self.filename in self.resolved_url
path = f'{self.resolved_url.split("file://")[1]}' path = f'{self.resolved_url.split("file://")[1]}'
if dest_dir: if dest_dir:
makedir(dest_dir)
target = os.path.join(dest_dir, filename or self.filename) target = os.path.join(dest_dir, filename or self.filename)
if os.path.getsize(path) != os.path.getsize(target) or sha256sum(path) != sha256sum(target): if os.path.getsize(path) != os.path.getsize(target) or sha256sum(path) != sha256sum(target):
copy_file(path, target, follow_symlinks=True) copy_file(path, target, follow_symlinks=True)

View file

@ -9,7 +9,7 @@ from copy import deepcopy
from typing import ClassVar, Optional, Mapping, Union from typing import ClassVar, Optional, Mapping, Union
from config.state import config from config.state import config
from constants import Arch, BASE_DISTROS, KUPFER_HTTPS, REPOS_CONFIG_FILE, REPOSITORIES from constants import Arch, BASE_DISTROS, KUPFER_HTTPS, KEYRINGS_KEY, REPOS_CONFIG_FILE, REPOSITORIES
from dictscheme import DictScheme, toml_inline_dicts, TomlPreserveInlineDictEncoder from dictscheme import DictScheme, toml_inline_dicts, TomlPreserveInlineDictEncoder
from utils import sha256sum from utils import sha256sum
@ -39,11 +39,13 @@ class RepoConfig(AbstrRepoConfig):
class BaseDistro(DictScheme): class BaseDistro(DictScheme):
remote_url: Optional[str] remote_url: Optional[str]
keyrings: Optional[list[str]]
repos: dict[str, BaseDistroRepo] repos: dict[str, BaseDistroRepo]
class ReposConfigFile(DictScheme): class ReposConfigFile(DictScheme):
remote_url: Optional[str] remote_url: Optional[str]
keyrings: Optional[list[str]]
repos: dict[str, RepoConfig] repos: dict[str, RepoConfig]
base_distros: dict[Arch, BaseDistro] base_distros: dict[Arch, BaseDistro]
_path: Optional[str] _path: Optional[str]
@ -106,6 +108,7 @@ REPOS_CONFIG_DEFAULT = ReposConfigFile({
'_path': '__DEFAULTS__', '_path': '__DEFAULTS__',
'_checksum': None, '_checksum': None,
REMOTEURL_KEY: KUPFER_HTTPS, REMOTEURL_KEY: KUPFER_HTTPS,
KEYRINGS_KEY: [],
REPOS_KEY: { REPOS_KEY: {
'kupfer_local': REPO_DEFAULTS | { 'kupfer_local': REPO_DEFAULTS | {
LOCALONLY_KEY: True LOCALONLY_KEY: True
@ -117,11 +120,10 @@ REPOS_CONFIG_DEFAULT = ReposConfigFile({
BASEDISTROS_KEY: { BASEDISTROS_KEY: {
arch: { arch: {
REMOTEURL_KEY: None, REMOTEURL_KEY: None,
'repos': { KEYRINGS_KEY: arch_def.get(KEYRINGS_KEY, None),
k: { 'repos': {k: {
'remote_url': v 'remote_url': v
} for k, v in arch_def['repos'].items() } for k, v in arch_def['repos'].items()}, # type: ignore[union-attr]
},
} for arch, arch_def in BASE_DISTROS.items() } for arch, arch_def in BASE_DISTROS.items()
}, },
}) })

View file

@ -138,6 +138,42 @@ def read_files_from_tar(tar_file: str, files: Sequence[str]) -> Generator[tuple[
yield path, fd yield path, fd
def read_files_from_tar_recursive(tar_file: str, paths: Sequence[str], append_slash: bool = True) -> Generator[tuple[str, IO], None, None]:
"""
Returns tar FDs to files that lie under the directories specified in paths.
HINT: deactivate append_slash to get glob-like behaviour, as if all paths ended with *
"""
assert os.path.exists(tar_file)
paths = [f"{p.strip('/')}/" for p in paths]
with tarfile.open(tar_file) as index:
for member in index.getmembers():
for path in paths:
if member.isfile() and member.path.startswith(path):
fd = index.extractfile(member)
assert fd
yield member.path, fd
break
continue
def extract_files_from_tar_generator(
tar_generator: Generator[tuple[str, IO], None, None],
output_dir: str,
remove_prefix: str = '',
append_slash: bool = True,
):
assert os.path.exists(output_dir)
remove_prefix = remove_prefix.strip('/')
if append_slash and remove_prefix:
remove_prefix += '/'
for file_path, fd in tar_generator:
assert file_path.startswith(remove_prefix)
output_path = os.path.join(output_dir, file_path[len(remove_prefix):].lstrip('/'))
os.makedirs(os.path.dirname(output_path), exist_ok=True)
with open(output_path, 'wb') as f:
f.write(fd.read())
def download_file(path: str, url: str, update: bool = True): def download_file(path: str, url: str, update: bool = True):
"""Download a file over http[s]. With `update`, tries to use mtime timestamps to download only changed files.""" """Download a file over http[s]. With `update`, tries to use mtime timestamps to download only changed files."""
url_time = None url_time = None