formatting cleanups

This commit is contained in:
InsanePrawn 2023-03-27 19:45:35 +02:00
parent 8ea1ae98be
commit 3ed0721d52
4 changed files with 31 additions and 47 deletions

View file

@ -59,7 +59,13 @@ class DataClass(Munch):
self.update(d | kwargs, validate=validate) self.update(d | kwargs, validate=validate)
@classmethod @classmethod
def transform(cls, values: Mapping[str, Any], validate: bool = True, allow_extra: bool = False, type_hints: Optional[dict[str, Any]] = None) -> Any: def transform(
cls,
values: Mapping[str, Any],
validate: bool = True,
allow_extra: bool = False,
type_hints: Optional[dict[str, Any]] = None,
) -> Any:
results = {} results = {}
values = dict(values) values = dict(values)
print(f"\ntransform function:\n{values}, {type_hints=}") print(f"\ntransform function:\n{values}, {type_hints=}")
@ -89,7 +95,8 @@ class DataClass(Munch):
key_type, value_type = dict_hints[0] key_type, value_type = dict_hints[0]
if not isinstance(value, Mapping): if not isinstance(value, Mapping):
if validate: if validate:
raise Exception(f"Got non-mapping {value!r} for expected dict type: {key_type} => {value_type}. Allowed classes: {_classes}") raise Exception(
f"Got non-mapping {value!r} for expected dict type: {key_type} => {value_type}. Allowed classes: {_classes}")
print(f"Got non-mapping {value!r} for expected dict type: {key_type} => {value_type}. Allowed classes: {_classes}") print(f"Got non-mapping {value!r} for expected dict type: {key_type} => {value_type}. Allowed classes: {_classes}")
results[key] = value results[key] = value
continue continue
@ -264,7 +271,7 @@ class DataClass(Munch):
self, self,
strip_hidden: Optional[bool] = None, strip_hidden: Optional[bool] = None,
sparse: Optional[bool] = None, sparse: Optional[bool] = None,
**yaml_args **yaml_args,
) -> str: ) -> str:
import yaml import yaml
yaml_args = {'sort_keys': False} | yaml_args yaml_args = {'sort_keys': False} | yaml_args
@ -277,7 +284,7 @@ class DataClass(Munch):
self, self,
strip_hidden: Optional[bool] = None, strip_hidden: Optional[bool] = None,
sparse: Optional[bool] = None, sparse: Optional[bool] = None,
encoder: Optional[TomlEncoder] = TomlPreserveInlineDictEncoder() encoder: Optional[TomlEncoder] = TomlPreserveInlineDictEncoder(),
) -> str: ) -> str:
return toml.dumps( return toml.dumps(
self.toDict(strip_hidden=strip_hidden, sparse=sparse), self.toDict(strip_hidden=strip_hidden, sparse=sparse),

View file

@ -1,7 +1,7 @@
from enum import IntFlag from enum import IntFlag
from typing import Generic, Mapping, Optional, TypeVar from typing import Generic, Mapping, Optional, TypeVar
from constants import Arch, ARCHES, BASE_DISTROS, REPOSITORIES, KUPFER_BRANCH_MARKER, KUPFER_HTTPS, CHROOT_PATHS from constants import Arch, ARCHES, REPOSITORIES, KUPFER_BRANCH_MARKER, KUPFER_HTTPS, CHROOT_PATHS
from generator import generate_pacman_conf_body from generator import generate_pacman_conf_body
from config.state import config from config.state import config
@ -211,4 +211,3 @@ def get_kupfer_local(arch: Optional[Arch] = None, in_chroot: bool = True, scan:
d = get_kupfer_distro(arch, location=DistroLocation.CHROOT if in_chroot else DistroLocation.LOCAL, scan=scan) d = get_kupfer_distro(arch, location=DistroLocation.CHROOT if in_chroot else DistroLocation.LOCAL, scan=scan)
assert isinstance(d, LocalDistro) assert isinstance(d, LocalDistro)
return d return d

View file

@ -6,14 +6,13 @@ import toml
import yaml import yaml
from copy import deepcopy from copy import deepcopy
from typing import Any, ClassVar, Optional, Mapping from typing import ClassVar, Optional, Mapping
from config.state import config from config.state import config
from constants import Arch, BASE_DISTROS, KUPFER_HTTPS, REPOS_CONFIG_FILE, REPOSITORIES from constants import Arch, BASE_DISTROS, KUPFER_HTTPS, REPOS_CONFIG_FILE, REPOSITORIES
from dataclass import DataClass, Munch, toml_inline_dicts, TomlEncoder, TomlPreserveInlineDictEncoder from dataclass import DataClass, toml_inline_dicts, TomlPreserveInlineDictEncoder
from utils import sha256sum from utils import sha256sum
REPOS_KEY = 'repos' REPOS_KEY = 'repos'
REMOTEURL_KEY = 'remote_url' REMOTEURL_KEY = 'remote_url'
LOCALONLY_KEY = 'local_only' LOCALONLY_KEY = 'local_only'
@ -56,10 +55,7 @@ class ReposConfigFile(DataClass):
super().__init__(d=d, **kwargs) super().__init__(d=d, **kwargs)
for repo_cls, defaults, repos, remote_url in [ for repo_cls, defaults, repos, remote_url in [
(RepoConfig, REPO_DEFAULTS, self.get(REPOS_KEY, {}), d.get(REMOTEURL_KEY, None)), (RepoConfig, REPO_DEFAULTS, self.get(REPOS_KEY, {}), d.get(REMOTEURL_KEY, None)),
*[ *[(BaseDistroRepo, BASE_DISTRO_DEFAULTS, _distro.repos, _distro.get(REMOTEURL_KEY, None)) for _distro in self.base_distros.values()],
(BaseDistroRepo, BASE_DISTRO_DEFAULTS, _distro.repos, _distro.get(REMOTEURL_KEY, None))
for _distro in self.base_distros.values()
],
]: ]:
if repos is None: if repos is None:
continue continue
@ -84,7 +80,7 @@ class ReposConfigFile(DataClass):
def toToml(self, strip_hidden=None, sparse=None, encoder=TomlPreserveInlineDictEncoder()): def toToml(self, strip_hidden=None, sparse=None, encoder=TomlPreserveInlineDictEncoder()):
d = self.toDict(strip_hidden=strip_hidden, sparse=sparse) d = self.toDict(strip_hidden=strip_hidden, sparse=sparse)
for key in [REPOS_KEY, ]: for key in [REPOS_KEY]:
if key not in d or not isinstance(d[key], Mapping): if key not in d or not isinstance(d[key], Mapping):
continue continue
inline = {name: {k: toml_inline_dicts(v) for k, v in value.items()} for name, value in d[key].items()} inline = {name: {k: toml_inline_dicts(v) for k, v in value.items()} for name, value in d[key].items()}
@ -92,33 +88,13 @@ class ReposConfigFile(DataClass):
d[key] = inline d[key] = inline
return toml.dumps(d, encoder=encoder) return toml.dumps(d, encoder=encoder)
def toToml_old(self, **kwargs):
"""Dumps specific TOML format, kwargs are ignored."""
def toml_line(k, v):
assert isinstance(k, str)
if isinstance(v, dict):
assert isinstance(v, Munch)
return f'{k} = ' + v.toYAML(default_flow_style=True).strip('\n')
#return toml.dumps({k: (v if not isinstance(v, dict) else toml.}, encoder=toml.encoder.TomlPreserveInlineDictEncoder).strip('\n')
res = ''
for k in self.keys():
if k == REPOS_KEY:
continue
res
for k, v in self.repos.items():
res += f"[repos.{k}]\n"
for subk, subv in v.items():
res += toml_line(subk, subv) + '\n'
res += '\n'
return res
REPO_DEFAULTS = { REPO_DEFAULTS = {
LOCALONLY_KEY: None, LOCALONLY_KEY: None,
REMOTEURL_KEY: None, REMOTEURL_KEY: None,
OPTIONS_KEY: {'SigLevel': 'Never'} OPTIONS_KEY: {
'SigLevel': 'Never'
},
} }
BASE_DISTRO_DEFAULTS = { BASE_DISTRO_DEFAULTS = {
@ -131,15 +107,16 @@ REPOS_CONFIG_DEFAULT = ReposConfigFile({
'_checksum': None, '_checksum': None,
REMOTEURL_KEY: KUPFER_HTTPS, REMOTEURL_KEY: KUPFER_HTTPS,
REPOS_KEY: { REPOS_KEY: {
'local': REPO_DEFAULTS | {LOCALONLY_KEY: True}, 'local': REPO_DEFAULTS | {
LOCALONLY_KEY: True
},
**{r: deepcopy(REPO_DEFAULTS) for r in REPOSITORIES}, **{r: deepcopy(REPO_DEFAULTS) for r in REPOSITORIES},
}, },
BASEDISTROS_KEY: { BASEDISTROS_KEY: {arch: {
arch: { 'repos': {k: {
'repos': {k: {'remote_url': v} for k, v in arch_def['repos'].items()}, 'remote_url': v
} } for k, v in arch_def['repos'].items()},
for arch, arch_def in BASE_DISTROS.items() } for arch, arch_def in BASE_DISTROS.items()},
},
}) })
_current_config = None _current_config = None

View file

@ -187,6 +187,7 @@ def cmd_init(
for arch in ARCHES: for arch in ARCHES:
init_prebuilts(arch) init_prebuilts(arch)
@cmd_packages.command(name='build') @cmd_packages.command(name='build')
@click.option('--force', is_flag=True, default=False, help='Rebuild even if package is already built') @click.option('--force', is_flag=True, default=False, help='Rebuild even if package is already built')
@click.option('--arch', default=None, required=False, type=click.Choice(ARCHES), help="The CPU architecture to build for") @click.option('--arch', default=None, required=False, type=click.Choice(ARCHES), help="The CPU architecture to build for")