TEMP: WIP: add repo_config
TEMP cause it spams a bunch of prints in dataclass handling
This commit is contained in:
parent
72f4d4948e
commit
ff1c31e157
10 changed files with 493 additions and 130 deletions
137
distro/distro.py
137
distro/distro.py
|
@ -1,3 +1,4 @@
|
|||
from enum import IntFlag
|
||||
from typing import Generic, Mapping, Optional, TypeVar
|
||||
|
||||
from constants import Arch, ARCHES, BASE_DISTROS, REPOSITORIES, KUPFER_HTTPS, CHROOT_PATHS
|
||||
|
@ -5,6 +6,14 @@ from generator import generate_pacman_conf_body
|
|||
from config.state import config
|
||||
|
||||
from .repo import BinaryPackageType, RepoInfo, Repo, LocalRepo, RemoteRepo
|
||||
from .repo_config import AbstrRepoConfig, BaseDistro, ReposConfigFile, REPOS_CONFIG_DEFAULT, get_repo_config as _get_repo_config
|
||||
|
||||
|
||||
class DistroLocation(IntFlag):
|
||||
REMOTE = 0
|
||||
LOCAL = 1
|
||||
CHROOT = 3
|
||||
|
||||
|
||||
RepoType = TypeVar('RepoType', bound=Repo)
|
||||
|
||||
|
@ -72,11 +81,6 @@ class RemoteDistro(Distro[RemoteRepo]):
|
|||
return RemoteRepo(**kwargs)
|
||||
|
||||
|
||||
def get_base_distro(arch: str, scan: bool = False) -> RemoteDistro:
|
||||
repos = {name: RepoInfo(url_template=url) for name, url in BASE_DISTROS[arch]['repos'].items()}
|
||||
return RemoteDistro(arch=arch, repo_infos=repos, scan=scan)
|
||||
|
||||
|
||||
def get_kupfer(arch: str, url_template: str, scan: bool = False) -> Distro:
|
||||
repos = {name: RepoInfo(url_template=url_template, options={'SigLevel': 'Never'}) for name in REPOSITORIES}
|
||||
remote = not url_template.startswith('file://')
|
||||
|
@ -90,9 +94,16 @@ def get_kupfer(arch: str, url_template: str, scan: bool = False) -> Distro:
|
|||
return distro
|
||||
|
||||
|
||||
_kupfer_https = dict[Arch, RemoteDistro]()
|
||||
_kupfer_local = dict[Arch, LocalDistro]()
|
||||
_kupfer_local_chroots = dict[Arch, LocalDistro]()
|
||||
_kupfer_https: dict[Arch, RemoteDistro] = {}
|
||||
_kupfer_local: dict[Arch, LocalDistro] = {}
|
||||
_kupfer_local_chroots: dict[Arch, LocalDistro] = {}
|
||||
|
||||
|
||||
def reset_distro_caches():
|
||||
global _kupfer_https, _kupfer_local, _kupfer_local_chroots
|
||||
for cache in _kupfer_https, _kupfer_local, _kupfer_local_chroots:
|
||||
assert isinstance(cache, dict)
|
||||
cache.clear()
|
||||
|
||||
|
||||
def get_kupfer_url(url: str = KUPFER_HTTPS, branch: Optional[str] = None) -> str:
|
||||
|
@ -101,29 +112,103 @@ def get_kupfer_url(url: str = KUPFER_HTTPS, branch: Optional[str] = None) -> str
|
|||
return url.replace('%branch%', branch)
|
||||
|
||||
|
||||
def get_kupfer_https(arch: Arch, scan: bool = False) -> RemoteDistro:
|
||||
global _kupfer_https
|
||||
if arch not in _kupfer_https or not _kupfer_https[arch]:
|
||||
kupfer = get_kupfer(arch, get_kupfer_url(), scan)
|
||||
assert isinstance(kupfer, RemoteDistro)
|
||||
_kupfer_https[arch] = kupfer
|
||||
item = _kupfer_https[arch]
|
||||
def get_repo_config(*args, **kwargs) -> ReposConfigFile:
|
||||
repo_config, changed = _get_repo_config(*args, **kwargs)
|
||||
if changed:
|
||||
reset_distro_caches()
|
||||
return repo_config
|
||||
|
||||
|
||||
def get_kupfer_repo_names(local) -> list[str]:
|
||||
configs = get_repo_config()
|
||||
results = []
|
||||
for repo, repo_config in configs.repos.items():
|
||||
if not local and repo_config.local_only:
|
||||
continue
|
||||
results.append(repo)
|
||||
return results
|
||||
|
||||
|
||||
def get_RepoInfo(arch: Arch, repo_config: AbstrRepoConfig, default_url: Optional[str]) -> RepoInfo:
|
||||
url = repo_config.remote_url or default_url
|
||||
if isinstance(url, dict):
|
||||
url = url.get(arch, default_url)
|
||||
assert url
|
||||
return RepoInfo(
|
||||
url_template=url,
|
||||
options=repo_config.get('options', None) or {},
|
||||
)
|
||||
|
||||
|
||||
def get_base_distro(arch: Arch, scan: bool = False) -> RemoteDistro:
|
||||
base_distros = get_repo_config().base_distros
|
||||
if base_distros is None or arch not in base_distros:
|
||||
base_distros = REPOS_CONFIG_DEFAULT.base_distros
|
||||
assert base_distros
|
||||
distro: BaseDistro
|
||||
distro = base_distros.get(arch) # type: ignore[assignment]
|
||||
repos = {}
|
||||
for repo, repo_config in distro.repos.items():
|
||||
repos[repo] = get_RepoInfo(arch, repo_config, default_url=distro.remote_url)
|
||||
|
||||
return RemoteDistro(arch=arch, repo_infos=repos, scan=scan)
|
||||
|
||||
|
||||
def get_kupfer_distro(
|
||||
arch: Arch,
|
||||
location: DistroLocation,
|
||||
scan: bool = False,
|
||||
) -> Distro:
|
||||
global _kupfer_https, _kupfer_local, _kupfer_local_chroots
|
||||
cls: type[Distro]
|
||||
cache: Mapping[str, Distro]
|
||||
repo_config = get_repo_config()
|
||||
if location == DistroLocation.REMOTE:
|
||||
cache = _kupfer_https
|
||||
default_url = get_kupfer_url(repo_config.remote_url or KUPFER_HTTPS)
|
||||
repos = {repo: get_RepoInfo(arch, conf, default_url) for repo, conf in repo_config.repos.items() if not conf.local_only}
|
||||
cls = RemoteDistro
|
||||
elif location in [DistroLocation.CHROOT, DistroLocation.LOCAL]:
|
||||
cache = _kupfer_local_chroots
|
||||
pkgdir = CHROOT_PATHS['packages'] if location == DistroLocation.CHROOT else config.get_path('packages')
|
||||
default_url = f"file://{pkgdir}/$arch/$repo"
|
||||
cls = LocalDistro
|
||||
repos = {}
|
||||
for name, repo in repo_config.repos.items():
|
||||
repo = repo.copy()
|
||||
repo.remote_url = default_url
|
||||
repos[name] = get_RepoInfo(arch, repo, default_url)
|
||||
else:
|
||||
raise Exception(f"Unknown location {location}")
|
||||
if cache is None:
|
||||
cache = {}
|
||||
assert arch
|
||||
assert isinstance(cache, dict)
|
||||
if arch not in cache or not cache[arch]:
|
||||
distro = cls(
|
||||
arch=arch,
|
||||
repo_infos=repos,
|
||||
scan=scan,
|
||||
)
|
||||
assert isinstance(distro, (LocalDistro, RemoteDistro))
|
||||
return distro
|
||||
cache[arch] = distro
|
||||
item: Distro = cache[arch]
|
||||
if scan and not item.is_scanned():
|
||||
item.scan()
|
||||
return item
|
||||
|
||||
|
||||
def get_kupfer_https(arch: Arch, scan: bool = False) -> RemoteDistro:
|
||||
d = get_kupfer_distro(arch, location=DistroLocation.REMOTE, scan=scan)
|
||||
assert isinstance(d, RemoteDistro)
|
||||
return d
|
||||
|
||||
|
||||
def get_kupfer_local(arch: Optional[Arch] = None, in_chroot: bool = True, scan: bool = False) -> LocalDistro:
|
||||
global _kupfer_local, _kupfer_local_chroots
|
||||
cache = _kupfer_local_chroots if in_chroot else _kupfer_local
|
||||
arch = arch or config.runtime.arch
|
||||
assert arch
|
||||
if arch not in cache or not cache[arch]:
|
||||
dir = CHROOT_PATHS['packages'] if in_chroot else config.get_path('packages')
|
||||
kupfer = get_kupfer(arch, f"file://{dir}/$arch/$repo")
|
||||
assert isinstance(kupfer, LocalDistro)
|
||||
cache[arch] = kupfer
|
||||
item = cache[arch]
|
||||
if scan and not item.is_scanned():
|
||||
item.scan()
|
||||
return item
|
||||
d = get_kupfer_distro(arch, location=DistroLocation.CHROOT if in_chroot else DistroLocation.LOCAL, scan=scan)
|
||||
assert isinstance(d, LocalDistro)
|
||||
return d
|
||||
|
||||
|
|
184
distro/repo_config.py
Normal file
184
distro/repo_config.py
Normal file
|
@ -0,0 +1,184 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
import toml
|
||||
import yaml
|
||||
|
||||
from copy import deepcopy
|
||||
from typing import Any, ClassVar, Optional, Mapping
|
||||
|
||||
from config.state import config
|
||||
from constants import Arch, BASE_DISTROS, KUPFER_HTTPS, REPOS_CONFIG_FILE, REPOSITORIES
|
||||
from dataclass import DataClass, Munch, toml_inline_dicts, TomlEncoder, TomlPreserveInlineDictEncoder
|
||||
from utils import sha256sum
|
||||
|
||||
|
||||
REPOS_KEY = 'repos'
|
||||
NAME_KEY = 'name'
|
||||
REMOTEURL_KEY = 'remote_url'
|
||||
LOCALONLY_KEY = 'local_only'
|
||||
OPTIONS_KEY = 'options'
|
||||
BASEDISTROS_KEY = 'base_distros'
|
||||
|
||||
_current_config: Optional[ReposConfigFile]
|
||||
|
||||
|
||||
class AbstrRepoConfig(DataClass):
|
||||
options: Optional[dict[str, str]]
|
||||
_strip_hidden: ClassVar[bool] = True
|
||||
_sparse: ClassVar[bool] = True
|
||||
|
||||
|
||||
class BaseDistroRepo(AbstrRepoConfig):
|
||||
remote_url: Optional[str]
|
||||
|
||||
|
||||
class RepoConfig(AbstrRepoConfig):
|
||||
name: str
|
||||
remote_url: Optional[str | dict[Arch, str]]
|
||||
local_only: Optional[bool]
|
||||
|
||||
|
||||
class BaseDistro(DataClass):
|
||||
remote_url: Optional[str]
|
||||
repos: dict[str, BaseDistroRepo]
|
||||
|
||||
|
||||
class ReposConfigFile(DataClass):
|
||||
remote_url: Optional[str]
|
||||
repos: dict[str, RepoConfig]
|
||||
base_distros: dict[Arch, BaseDistro]
|
||||
_path: Optional[str]
|
||||
_checksum: Optional[str]
|
||||
_strip_hidden: ClassVar[bool] = True
|
||||
_sparse: ClassVar[bool] = True
|
||||
|
||||
def __init__(self, d, **kwargs):
|
||||
remote_url = d.get(REMOTEURL_KEY, None)
|
||||
super().__init__(d=d, **kwargs)
|
||||
for repo_cls, defaults, repos in [
|
||||
(RepoConfig, REPO_DEFAULTS, self.get(REPOS_KEY, {})),
|
||||
*[(BaseDistroRepo, BASE_DISTRO_DEFAULTS, _distro.repos) for _distro in self.base_distros.values()],
|
||||
]:
|
||||
if repos is None:
|
||||
continue
|
||||
for name, repo in repos.items():
|
||||
_repo = defaults | (repo or {}) # type: ignore[operator]
|
||||
if REMOTEURL_KEY not in repo and not repo.get(LOCALONLY_KEY, None):
|
||||
_repo[REMOTEURL_KEY] = remote_url
|
||||
repos[name] = repo_cls(_repo, **kwargs)
|
||||
# self.repos = repos
|
||||
|
||||
def toDict(self, strip_hidden: Optional[bool] = None, sparse: Optional[bool] = None):
|
||||
d = super().toDict(strip_hidden=strip_hidden, sparse=sparse)
|
||||
if REPOS_KEY in d:
|
||||
for v in d[REPOS_KEY].values():
|
||||
if isinstance(v, dict) and NAME_KEY in v:
|
||||
v.pop(NAME_KEY)
|
||||
return d
|
||||
|
||||
@staticmethod
|
||||
def parse_config(path: str) -> ReposConfigFile:
|
||||
try:
|
||||
with open(path, 'r') as fd:
|
||||
data = toml.load(fd)
|
||||
data['_path'] = path
|
||||
data['_checksum'] = sha256sum(path)
|
||||
return ReposConfigFile(data, validate=True)
|
||||
except Exception as ex:
|
||||
logging.error(f'Error parsing repos config at "{path}":\n{ex}')
|
||||
raise ex
|
||||
|
||||
def toToml(self, strip_hidden=None, sparse=None, encoder=TomlPreserveInlineDictEncoder()):
|
||||
d = self.toDict(strip_hidden=strip_hidden, sparse=sparse)
|
||||
for key in [REPOS_KEY, ]:
|
||||
if key not in d or not isinstance(d[key], Mapping):
|
||||
continue
|
||||
inline = {name: {k: toml_inline_dicts(v) for k, v in value.items()} for name, value in d[key].items()}
|
||||
logging.info(f"Inlined {key}: {inline}")
|
||||
d[key] = inline
|
||||
return toml.dumps(d, encoder=encoder)
|
||||
|
||||
def toToml_old(self, **kwargs):
|
||||
"""Dumps specific TOML format, kwargs are ignored."""
|
||||
def toml_line(k, v):
|
||||
assert isinstance(k, str)
|
||||
if isinstance(v, dict):
|
||||
assert isinstance(v, Munch)
|
||||
return f'{k} = ' + v.toYAML(default_flow_style=True).strip('\n')
|
||||
#return toml.dumps({k: (v if not isinstance(v, dict) else toml.}, encoder=toml.encoder.TomlPreserveInlineDictEncoder).strip('\n')
|
||||
|
||||
res = ''
|
||||
for k in self.keys():
|
||||
if k == REPOS_KEY:
|
||||
continue
|
||||
res
|
||||
|
||||
for k, v in self.repos.items():
|
||||
res += f"[repos.{k}]\n"
|
||||
for subk, subv in v.items():
|
||||
res += toml_line(subk, subv) + '\n'
|
||||
res += '\n'
|
||||
return res
|
||||
|
||||
|
||||
REPO_DEFAULTS = {
|
||||
LOCALONLY_KEY: None,
|
||||
REMOTEURL_KEY: None,
|
||||
OPTIONS_KEY: {'SigLevel': 'Never'}
|
||||
}
|
||||
|
||||
BASE_DISTRO_DEFAULTS = {
|
||||
REMOTEURL_KEY: None,
|
||||
OPTIONS_KEY: None,
|
||||
}
|
||||
|
||||
REPOS_CONFIG_DEFAULT = ReposConfigFile({
|
||||
REMOTEURL_KEY: KUPFER_HTTPS,
|
||||
REPOS_KEY: {
|
||||
'local': REPO_DEFAULTS | {LOCALONLY_KEY: True},
|
||||
**{r: deepcopy(REPO_DEFAULTS) for r in REPOSITORIES},
|
||||
},
|
||||
BASEDISTROS_KEY: {
|
||||
arch: {
|
||||
'repos': {k: {'remote_url': v} for k, v in arch_def['repos'].items()},
|
||||
}
|
||||
for arch, arch_def in BASE_DISTROS.items()
|
||||
},
|
||||
})
|
||||
|
||||
|
||||
def get_repo_config(
|
||||
initialize_pkgbuilds: bool = False,
|
||||
repo_config_file: Optional[str] = None,
|
||||
) -> tuple[ReposConfigFile, bool]:
|
||||
global _current_config
|
||||
pkgbuilds_dir = config.get_path('pkgbuilds')
|
||||
repo_config_file_default = os.path.join(pkgbuilds_dir, REPOS_CONFIG_FILE)
|
||||
if repo_config_file is None:
|
||||
repo_config_file_path = repo_config_file_default
|
||||
else:
|
||||
repo_config_file_path = repo_config_file
|
||||
if not os.path.exists(repo_config_file_path):
|
||||
if repo_config_file is not None:
|
||||
raise Exception(f"Requested repo config {repo_config_file} doesn't exist")
|
||||
if not initialize_pkgbuilds:
|
||||
logging.warning(f"{repo_config_file_path} doesn't exist, using default Repositories")
|
||||
return deepcopy(REPOS_CONFIG_DEFAULT), False
|
||||
from packages.pkgbuild import init_pkgbuilds
|
||||
init_pkgbuilds()
|
||||
return get_repo_config(initialize_pkgbuilds=False, repo_config_file=repo_config_file_path)
|
||||
conf = _current_config
|
||||
changed = False
|
||||
if not conf or conf._path != repo_config_file_path or conf._checksum != sha256sum(repo_config_file_path):
|
||||
conf = ReposConfigFile.parse_config(repo_config_file_path)
|
||||
if repo_config_file_path == repo_config_file_default:
|
||||
_current_config = conf
|
||||
changed = True
|
||||
return conf, changed
|
||||
|
||||
|
||||
def get_repos(**kwargs) -> list[RepoConfig]:
|
||||
config, _ = get_repo_config(**kwargs)
|
||||
return list(config.repos.values())
|
Loading…
Add table
Add a link
Reference in a new issue