mirror of
https://gitlab.com/kupfer/kupferbootstrap.git
synced 2025-02-23 13:45:45 -05:00
WIP: keyring init
This commit is contained in:
parent
0c56038ed6
commit
e068b3587e
4 changed files with 57 additions and 14 deletions
20
constants.py
20
constants.py
|
@ -1,4 +1,4 @@
|
|||
from typehelpers import TypeAlias
|
||||
from typehelpers import TypeAlias, Union
|
||||
|
||||
FASTBOOT = 'fastboot'
|
||||
FLASH_PARTS = {
|
||||
|
@ -56,6 +56,8 @@ ARCHES = [
|
|||
DistroArch: TypeAlias = Arch
|
||||
TargetArch: TypeAlias = Arch
|
||||
|
||||
KEYRINGS_KEY = 'keyrings'
|
||||
|
||||
ALARM_REPOS = {
|
||||
'core': 'http://mirror.archlinuxarm.org/$arch/$repo',
|
||||
'extra': 'http://mirror.archlinuxarm.org/$arch/$repo',
|
||||
|
@ -64,20 +66,22 @@ ALARM_REPOS = {
|
|||
'aur': 'http://mirror.archlinuxarm.org/$arch/$repo',
|
||||
}
|
||||
|
||||
BASE_DISTROS: dict[DistroArch, dict[str, dict[str, str]]] = {
|
||||
ALARM_DISTRO: dict[str, Union[dict[str, str], list[str]]] = {
|
||||
'repos': ALARM_REPOS,
|
||||
KEYRINGS_KEY: ['archlinuxarm-keyring'],
|
||||
}
|
||||
|
||||
BASE_DISTROS: dict[DistroArch, dict[str, Union[dict[str, str], list[str]]]] = {
|
||||
'x86_64': {
|
||||
'repos': {
|
||||
'core': 'https://geo.mirror.pkgbuild.com/$repo/os/$arch',
|
||||
'extra': 'https://geo.mirror.pkgbuild.com/$repo/os/$arch',
|
||||
'community': 'https://geo.mirror.pkgbuild.com/$repo/os/$arch',
|
||||
},
|
||||
KEYRINGS_KEY: ['archlinux-keyring'],
|
||||
},
|
||||
'aarch64': {
|
||||
'repos': ALARM_REPOS,
|
||||
},
|
||||
'armv7h': {
|
||||
'repos': ALARM_REPOS,
|
||||
},
|
||||
'aarch64': ALARM_DISTRO,
|
||||
'armv7h': ALARM_DISTRO,
|
||||
}
|
||||
|
||||
COMPILE_ARCHES: dict[Arch, str] = {
|
||||
|
|
|
@ -72,6 +72,7 @@ class LocalPackage(BinaryPackage):
|
|||
assert self.resolved_url and self.filename and self.filename in self.resolved_url
|
||||
path = f'{self.resolved_url.split("file://")[1]}'
|
||||
if dest_dir:
|
||||
makedir(dest_dir)
|
||||
target = os.path.join(dest_dir, filename or self.filename)
|
||||
if os.path.getsize(path) != os.path.getsize(target) or sha256sum(path) != sha256sum(target):
|
||||
copy_file(path, target, follow_symlinks=True)
|
||||
|
|
|
@ -9,7 +9,7 @@ from copy import deepcopy
|
|||
from typing import ClassVar, Optional, Mapping, Union
|
||||
|
||||
from config.state import config
|
||||
from constants import Arch, BASE_DISTROS, KUPFER_HTTPS, REPOS_CONFIG_FILE, REPOSITORIES
|
||||
from constants import Arch, BASE_DISTROS, KUPFER_HTTPS, KEYRINGS_KEY, REPOS_CONFIG_FILE, REPOSITORIES
|
||||
from dictscheme import DictScheme, toml_inline_dicts, TomlPreserveInlineDictEncoder
|
||||
from utils import sha256sum
|
||||
|
||||
|
@ -39,11 +39,13 @@ class RepoConfig(AbstrRepoConfig):
|
|||
|
||||
class BaseDistro(DictScheme):
|
||||
remote_url: Optional[str]
|
||||
keyrings: Optional[list[str]]
|
||||
repos: dict[str, BaseDistroRepo]
|
||||
|
||||
|
||||
class ReposConfigFile(DictScheme):
|
||||
remote_url: Optional[str]
|
||||
keyrings: Optional[list[str]]
|
||||
repos: dict[str, RepoConfig]
|
||||
base_distros: dict[Arch, BaseDistro]
|
||||
_path: Optional[str]
|
||||
|
@ -106,6 +108,7 @@ REPOS_CONFIG_DEFAULT = ReposConfigFile({
|
|||
'_path': '__DEFAULTS__',
|
||||
'_checksum': None,
|
||||
REMOTEURL_KEY: KUPFER_HTTPS,
|
||||
KEYRINGS_KEY: [],
|
||||
REPOS_KEY: {
|
||||
'kupfer_local': REPO_DEFAULTS | {
|
||||
LOCALONLY_KEY: True
|
||||
|
@ -117,11 +120,10 @@ REPOS_CONFIG_DEFAULT = ReposConfigFile({
|
|||
BASEDISTROS_KEY: {
|
||||
arch: {
|
||||
REMOTEURL_KEY: None,
|
||||
'repos': {
|
||||
k: {
|
||||
KEYRINGS_KEY: arch_def.get(KEYRINGS_KEY, None),
|
||||
'repos': {k: {
|
||||
'remote_url': v
|
||||
} for k, v in arch_def['repos'].items()
|
||||
},
|
||||
} for k, v in arch_def['repos'].items()}, # type: ignore[union-attr]
|
||||
} for arch, arch_def in BASE_DISTROS.items()
|
||||
},
|
||||
})
|
||||
|
|
36
utils.py
36
utils.py
|
@ -138,6 +138,42 @@ def read_files_from_tar(tar_file: str, files: Sequence[str]) -> Generator[tuple[
|
|||
yield path, fd
|
||||
|
||||
|
||||
def read_files_from_tar_recursive(tar_file: str, paths: Sequence[str], append_slash: bool = True) -> Generator[tuple[str, IO], None, None]:
|
||||
"""
|
||||
Returns tar FDs to files that lie under the directories specified in paths.
|
||||
HINT: deactivate append_slash to get glob-like behaviour, as if all paths ended with *
|
||||
"""
|
||||
assert os.path.exists(tar_file)
|
||||
paths = [f"{p.strip('/')}/" for p in paths]
|
||||
with tarfile.open(tar_file) as index:
|
||||
for member in index.getmembers():
|
||||
for path in paths:
|
||||
if member.isfile() and member.path.startswith(path):
|
||||
fd = index.extractfile(member)
|
||||
assert fd
|
||||
yield member.path, fd
|
||||
break
|
||||
continue
|
||||
|
||||
|
||||
def extract_files_from_tar_generator(
|
||||
tar_generator: Generator[tuple[str, IO], None, None],
|
||||
output_dir: str,
|
||||
remove_prefix: str = '',
|
||||
append_slash: bool = True,
|
||||
):
|
||||
assert os.path.exists(output_dir)
|
||||
remove_prefix = remove_prefix.strip('/')
|
||||
if append_slash and remove_prefix:
|
||||
remove_prefix += '/'
|
||||
for file_path, fd in tar_generator:
|
||||
assert file_path.startswith(remove_prefix)
|
||||
output_path = os.path.join(output_dir, file_path[len(remove_prefix):].lstrip('/'))
|
||||
os.makedirs(os.path.dirname(output_path), exist_ok=True)
|
||||
with open(output_path, 'wb') as f:
|
||||
f.write(fd.read())
|
||||
|
||||
|
||||
def download_file(path: str, url: str, update: bool = True):
|
||||
"""Download a file over http[s]. With `update`, tries to use mtime timestamps to download only changed files."""
|
||||
url_time = None
|
||||
|
|
Loading…
Add table
Reference in a new issue