mirror of
https://gitlab.com/kupfer/kupferbootstrap.git
synced 2025-02-22 21:25:43 -05:00
global: refactor to use config.{file,runtime}.$member instead of config.file["$member"]
This commit is contained in:
parent
ba13293b93
commit
8ad18c00a6
15 changed files with 66 additions and 54 deletions
|
@ -50,7 +50,7 @@ def cmd_chroot(type: str = 'build', arch: str = None, enable_crossdirect=True):
|
|||
build_chroot.initialize()
|
||||
build_chroot.initialized = True
|
||||
build_chroot.mount_pkgbuilds()
|
||||
if config.file['build']['crossdirect'] and enable_crossdirect:
|
||||
if config.file.build.crossdirect and enable_crossdirect:
|
||||
build_chroot.mount_crossdirect()
|
||||
else:
|
||||
raise Exception('Really weird bug')
|
||||
|
|
|
@ -228,7 +228,8 @@ class Chroot(AbstractChroot):
|
|||
raise Exception(f'Chroot {self.name} is inactive, not running command! Hint: pass `fail_inactive=False`')
|
||||
if outer_env is None:
|
||||
outer_env = {}
|
||||
native = config.runtime['arch']
|
||||
native = config.runtime.arch
|
||||
assert native
|
||||
if self.arch != native and 'QEMU_LD_PREFIX' not in outer_env:
|
||||
outer_env = dict(outer_env) # copy dict for modification
|
||||
outer_env |= {'QEMU_LD_PREFIX': f'/usr/{GCC_HOSTSPECS[native][self.arch]}'}
|
||||
|
@ -285,7 +286,7 @@ class Chroot(AbstractChroot):
|
|||
user = None
|
||||
group = None
|
||||
if check_space is None:
|
||||
check_space = config.file['pacman']['check_space']
|
||||
check_space = config.file.pacman.check_space
|
||||
if not absolute_path:
|
||||
path = self.get_path('/etc')
|
||||
root_makedir(path)
|
||||
|
|
|
@ -69,7 +69,8 @@ class BuildChroot(Chroot):
|
|||
"""
|
||||
target_arch = self.arch
|
||||
if not native_chroot:
|
||||
native_chroot = get_build_chroot(config.runtime['arch'])
|
||||
assert config.runtime.arch
|
||||
native_chroot = get_build_chroot(config.runtime.arch)
|
||||
host_arch = native_chroot.arch
|
||||
hostspec = GCC_HOSTSPECS[host_arch][target_arch]
|
||||
cc = f'{hostspec}-cc'
|
||||
|
|
|
@ -66,8 +66,8 @@ def prompt_profile(name: str, create: bool = True, defaults: Union[Profile, dict
|
|||
|
||||
profile: Any = PROFILE_EMPTY | defaults
|
||||
# don't use get_profile() here because we need the sparse profile
|
||||
if name in config.file['profiles']:
|
||||
profile |= config.file['profiles'][name]
|
||||
if name in config.file.profiles:
|
||||
profile |= config.file.profiles[name]
|
||||
elif create:
|
||||
logging.info(f"Profile {name} doesn't exist yet, creating new profile.")
|
||||
else:
|
||||
|
@ -113,7 +113,7 @@ def prompt_for_save(retry_ctx: Optional[click.Context] = None):
|
|||
If `retry_ctx` is passed, the context's command will be reexecuted with the same arguments if the user chooses to retry.
|
||||
False will still be returned as the retry is expected to either save, perform another retry or arbort.
|
||||
"""
|
||||
if click.confirm(f'Do you want to save your changes to {config.runtime["config_file"]}?', default=True):
|
||||
if click.confirm(f'Do you want to save your changes to {config.runtime.config_file}?', default=True):
|
||||
return True
|
||||
if retry_ctx:
|
||||
if click.confirm('Retry? ("n" to quit without saving)', default=True):
|
||||
|
@ -171,7 +171,7 @@ def cmd_config_init(ctx, sections: list[str] = CONFIG_SECTIONS, non_interactive:
|
|||
|
||||
config.update(results)
|
||||
if 'profiles' in sections:
|
||||
current_profile = 'default' if 'current' not in config.file['profiles'] else config.file['profiles']['current']
|
||||
current_profile = 'default' if 'current' not in config.file.profiles else config.file.profiles.current
|
||||
new_current, _ = prompt_config('profile.current', default=current_profile, field_type=str)
|
||||
profile, changed = prompt_profile(new_current, create=True)
|
||||
config.update_profile(new_current, profile)
|
||||
|
@ -182,7 +182,7 @@ def cmd_config_init(ctx, sections: list[str] = CONFIG_SECTIONS, non_interactive:
|
|||
if not noop:
|
||||
config.write()
|
||||
else:
|
||||
logging.info(f'--noop passed, not writing to {config.runtime["config_file"]}!')
|
||||
logging.info(f'--noop passed, not writing to {config.runtime.config_file}!')
|
||||
|
||||
|
||||
@cmd_config.command(name='set')
|
||||
|
@ -250,8 +250,8 @@ def cmd_profile():
|
|||
def cmd_profile_init(ctx, name: str, non_interactive: bool = False, noop: bool = False):
|
||||
"""Create or edit a profile"""
|
||||
profile = deepcopy(PROFILE_EMPTY)
|
||||
if name in config.file['profiles']:
|
||||
profile |= config.file['profiles'][name]
|
||||
if name in config.file.profiles:
|
||||
profile |= config.file.profiles[name]
|
||||
|
||||
if not non_interactive:
|
||||
profile, _changed = prompt_profile(name, create=True)
|
||||
|
@ -262,4 +262,4 @@ def cmd_profile_init(ctx, name: str, non_interactive: bool = False, noop: bool =
|
|||
return
|
||||
config.write()
|
||||
else:
|
||||
logging.info(f'--noop passed, not writing to {config.runtime["config_file"]}!')
|
||||
logging.info(f'--noop passed, not writing to {config.runtime.config_file}!')
|
||||
|
|
|
@ -204,7 +204,7 @@ class ConfigStateHolder:
|
|||
|
||||
def try_load_file(self, config_file=None, base=CONFIG_DEFAULTS):
|
||||
config_file = config_file or CONFIG_DEFAULT_PATH
|
||||
self.runtime['config_file'] = config_file
|
||||
self.runtime.config_file = config_file
|
||||
self._profile_cache = None
|
||||
try:
|
||||
self.file = parse_file(config_file=config_file, base=base)
|
||||
|
@ -227,8 +227,8 @@ class ConfigStateHolder:
|
|||
raise ex
|
||||
|
||||
def get_profile(self, name: Optional[str] = None) -> Profile:
|
||||
name = name or self.file['profiles']['current']
|
||||
self._profile_cache = resolve_profile(name=name, sparse_profiles=self.file['profiles'], resolved=self._profile_cache)
|
||||
name = name or self.file.profiles.current
|
||||
self._profile_cache = resolve_profile(name=name, sparse_profiles=self.file.profiles, resolved=self._profile_cache)
|
||||
return self._profile_cache[name]
|
||||
|
||||
def enforce_profile_device_set(self, profile_name: Optional[str] = None, hint_or_set_arch: bool = False) -> Profile:
|
||||
|
@ -255,7 +255,7 @@ class ConfigStateHolder:
|
|||
return profile
|
||||
|
||||
def get_path(self, path_name: str) -> str:
|
||||
paths = self.file['paths']
|
||||
paths = self.file.paths
|
||||
return resolve_path_template(paths[path_name], paths)
|
||||
|
||||
def get_package_dir(self, arch: str):
|
||||
|
@ -268,7 +268,8 @@ class ConfigStateHolder:
|
|||
def write(self, path=None):
|
||||
"""write toml representation of `self.file` to `path`"""
|
||||
if path is None:
|
||||
path = self.runtime['config_file']
|
||||
path = self.runtime.config_file
|
||||
assert path
|
||||
os.makedirs(os.path.dirname(path), exist_ok=True)
|
||||
dump_file(path, self.file)
|
||||
logging.info(f'Created config file at {path}')
|
||||
|
@ -282,18 +283,18 @@ class ConfigStateHolder:
|
|||
merged = merge_configs(config_fragment, conf_base=self.file, warn_missing_defaultprofile=warn_missing_defaultprofile)
|
||||
changed = self.file != merged
|
||||
self.file.update(merged)
|
||||
if changed and 'profiles' in config_fragment and self.file['profiles'] != config_fragment['profiles']:
|
||||
if changed and 'profiles' in config_fragment and self.file.profiles != config_fragment['profiles']:
|
||||
self.invalidate_profile_cache()
|
||||
return changed
|
||||
|
||||
def update_profile(self, name: str, profile: Profile, merge: bool = False, create: bool = True, prune: bool = True):
|
||||
new = {}
|
||||
if name not in self.file['profiles']:
|
||||
if name not in self.file.profiles:
|
||||
if not create:
|
||||
raise Exception(f'Unknown profile: {name}')
|
||||
else:
|
||||
if merge:
|
||||
new = deepcopy(self.file['profiles'][name])
|
||||
new = deepcopy(self.file.profiles[name])
|
||||
|
||||
logging.debug(f'new: {new}')
|
||||
logging.debug(f'profile: {profile}')
|
||||
|
@ -301,5 +302,5 @@ class ConfigStateHolder:
|
|||
|
||||
if prune:
|
||||
new = {key: val for key, val in new.items() if val is not None}
|
||||
self.file['profiles'][name] = new
|
||||
self.file.profiles[name] = new
|
||||
self.invalidate_profile_cache()
|
||||
|
|
|
@ -53,7 +53,7 @@ def validate_ConfigStateHolder(c: ConfigStateHolder, should_load: Optional[bool]
|
|||
def test_fixture_configstate(conf_fixture: str, exists: bool, request):
|
||||
configstate = request.getfixturevalue(conf_fixture)
|
||||
assert 'config_file' in configstate.runtime
|
||||
confpath = configstate.runtime['config_file']
|
||||
confpath = configstate.runtime.config_file
|
||||
assert isinstance(confpath, str)
|
||||
assert confpath
|
||||
assert exists == os.path.exists(confpath)
|
||||
|
@ -124,12 +124,13 @@ def load_toml_file(path) -> dict:
|
|||
|
||||
|
||||
def get_path_from_stateholder(c: ConfigStateHolder):
|
||||
return c.runtime['config_file']
|
||||
return c.runtime.config_file
|
||||
|
||||
|
||||
def test_config_save_nonexistant(configstate_nonexistant: ConfigStateHolder):
|
||||
c = configstate_nonexistant
|
||||
confpath = c.runtime['config_file']
|
||||
confpath = c.runtime.config_file
|
||||
assert confpath
|
||||
assert not os.path.exists(confpath)
|
||||
c.write()
|
||||
assert confpath
|
||||
|
|
|
@ -75,7 +75,7 @@ _kupfer_local_chroots = dict[Arch, Distro]()
|
|||
def get_kupfer_https(arch: Arch, scan: bool = False) -> Distro:
|
||||
global _kupfer_https
|
||||
if arch not in _kupfer_https or not _kupfer_https[arch]:
|
||||
_kupfer_https[arch] = get_kupfer(arch, KUPFER_HTTPS.replace('%branch%', config.file['pacman']['repo_branch']), scan)
|
||||
_kupfer_https[arch] = get_kupfer(arch, KUPFER_HTTPS.replace('%branch%', config.file.pacman.repo_branch), scan)
|
||||
item = _kupfer_https[arch]
|
||||
if scan and not item.is_scanned():
|
||||
item.scan()
|
||||
|
@ -85,7 +85,8 @@ def get_kupfer_https(arch: Arch, scan: bool = False) -> Distro:
|
|||
def get_kupfer_local(arch: Optional[Arch] = None, in_chroot: bool = True, scan: bool = False) -> Distro:
|
||||
global _kupfer_local, _kupfer_local_chroots
|
||||
cache = _kupfer_local_chroots if in_chroot else _kupfer_local
|
||||
arch = arch or config.runtime['arch']
|
||||
arch = arch or config.runtime.arch
|
||||
assert arch
|
||||
if arch not in cache or not cache[arch]:
|
||||
dir = CHROOT_PATHS['packages'] if in_chroot else config.get_path('packages')
|
||||
cache[arch] = get_kupfer(arch, f"file://{dir}/$arch/$repo")
|
||||
|
|
|
@ -7,7 +7,8 @@ def generate_makepkg_conf(arch: Arch, cross: bool = False, chroot: str = None) -
|
|||
Generate a makepkg.conf. For use with crosscompiling, specify `cross=True` and pass as `chroot`
|
||||
the relative path inside the native chroot where the foreign chroot will be mounted.
|
||||
"""
|
||||
hostspec = GCC_HOSTSPECS[config.runtime['arch'] if cross else arch][arch]
|
||||
assert config.runtime.arch
|
||||
hostspec = GCC_HOSTSPECS[config.runtime.arch if cross else arch][arch]
|
||||
cflags = CFLAGS_ARCHES[arch] + CFLAGS_GENERAL
|
||||
if cross and not chroot:
|
||||
raise Exception('Cross-compile makepkg conf requested but no chroot path given: "{chroot}"')
|
||||
|
@ -233,7 +234,7 @@ Color
|
|||
#NoProgressBar
|
||||
{'' if check_space else '#'}CheckSpace
|
||||
VerbosePkgLists
|
||||
ParallelDownloads = {config.file['pacman']['parallel_downloads']}
|
||||
ParallelDownloads = {config.file.pacman.parallel_downloads}
|
||||
|
||||
# By default, pacman accepts packages signed by keys that its local keyring
|
||||
# trusts (see pacman-key and its man page), as well as unsigned packages.
|
||||
|
|
4
image.py
4
image.py
|
@ -399,7 +399,7 @@ def cmd_build(profile_name: str = None,
|
|||
|
||||
packages = BASE_PACKAGES + DEVICES[device] + FLAVOURS[flavour]['packages'] + profile['pkgs_include']
|
||||
|
||||
if arch != config.runtime['arch']:
|
||||
if arch != config.runtime.arch:
|
||||
build_enable_qemu_binfmt(arch)
|
||||
|
||||
if local_repos and build_pkgs:
|
||||
|
@ -475,7 +475,7 @@ def cmd_inspect(profile: str = None, shell: bool = False):
|
|||
if shell:
|
||||
chroot.initialized = True
|
||||
chroot.activate()
|
||||
if arch != config.runtime['arch']:
|
||||
if arch != config.runtime.arch:
|
||||
logging.info('Installing requisites for foreign-arch shell')
|
||||
build_enable_qemu_binfmt(arch)
|
||||
logging.info('Starting inspection shell')
|
||||
|
|
10
main.py
10
main.py
|
@ -27,9 +27,9 @@ from ssh import cmd_ssh
|
|||
@nowrapper_option
|
||||
def cli(verbose: bool = False, config_file: str = None, wrapper_override: Optional[bool] = None, error_shell: bool = False):
|
||||
setup_logging(verbose)
|
||||
config.runtime['verbose'] = verbose
|
||||
config.runtime['no_wrap'] = wrapper_override is False
|
||||
config.runtime['error_shell'] = error_shell
|
||||
config.runtime.verbose = verbose
|
||||
config.runtime.no_wrap = wrapper_override is False
|
||||
config.runtime.error_shell = error_shell
|
||||
config.try_load_file(config_file)
|
||||
if wrapper_override:
|
||||
enforce_wrap()
|
||||
|
@ -39,11 +39,11 @@ def main():
|
|||
try:
|
||||
return cli(prog_name='kupferbootstrap')
|
||||
except Exception as ex:
|
||||
if config.runtime['verbose']:
|
||||
if config.runtime.verbose:
|
||||
logging.fatal(get_trace())
|
||||
else:
|
||||
logging.fatal(ex)
|
||||
if config.runtime['error_shell']:
|
||||
if config.runtime.error_shell:
|
||||
logging.info('Starting error shell. Type exit to quit.')
|
||||
subprocess.call('/bin/bash')
|
||||
exit(1)
|
||||
|
|
|
@ -36,7 +36,7 @@ pacman_cmd = [
|
|||
|
||||
def get_makepkg_env(arch: Optional[Arch] = None):
|
||||
# has to be a function because calls to `config` must be done after config file was read
|
||||
threads = config.file['build']['threads'] or multiprocessing.cpu_count()
|
||||
threads = config.file.build.threads or multiprocessing.cpu_count()
|
||||
env = {key: val for key, val in os.environ.items() if not key.split('_', maxsplit=1)[0] in ['CI', 'GITLAB', 'FF']}
|
||||
env |= {
|
||||
'LANG': 'C',
|
||||
|
@ -333,7 +333,8 @@ def try_download_package(dest_file_path: str, package: Pkgbuild, arch: Arch) ->
|
|||
|
||||
def check_package_version_built(package: Pkgbuild, arch: Arch, try_download: bool = False) -> bool:
|
||||
enforce_wrap()
|
||||
native_chroot = setup_build_chroot(config.runtime['arch'])
|
||||
assert config.runtime.arch
|
||||
native_chroot = setup_build_chroot(config.runtime.arch)
|
||||
config_path = '/' + native_chroot.write_makepkg_conf(
|
||||
target_arch=arch,
|
||||
cross_chroot_relative=os.path.join('chroot', arch),
|
||||
|
@ -407,7 +408,8 @@ def setup_build_chroot(
|
|||
add_kupfer_repos: bool = True,
|
||||
clean_chroot: bool = False,
|
||||
) -> BuildChroot:
|
||||
if arch != config.runtime['arch']:
|
||||
assert config.runtime.arch
|
||||
if arch != config.runtime.arch:
|
||||
wrap_if_foreign_arch(arch)
|
||||
build_enable_qemu_binfmt(arch)
|
||||
init_prebuilts(arch)
|
||||
|
@ -467,15 +469,16 @@ def build_package(
|
|||
makepkg_compile_opts = ['--holdver']
|
||||
makepkg_conf_path = 'etc/makepkg.conf'
|
||||
repo_dir = repo_dir if repo_dir else config.get_path('pkgbuilds')
|
||||
foreign_arch = config.runtime['arch'] != arch
|
||||
foreign_arch = config.runtime.arch != arch
|
||||
deps = (list(set(package.depends) - set(package.names())))
|
||||
target_chroot = setup_build_chroot(
|
||||
arch=arch,
|
||||
extra_packages=deps,
|
||||
clean_chroot=clean_chroot,
|
||||
)
|
||||
assert config.runtime.arch
|
||||
native_chroot = target_chroot if not foreign_arch else setup_build_chroot(
|
||||
arch=config.runtime['arch'],
|
||||
arch=config.runtime.arch,
|
||||
extra_packages=['base-devel'] + CROSSDIRECT_PKGS,
|
||||
clean_chroot=clean_chroot,
|
||||
)
|
||||
|
@ -635,7 +638,8 @@ def build_packages_by_paths(
|
|||
if isinstance(paths, str):
|
||||
paths = [paths]
|
||||
|
||||
for _arch in set([arch, config.runtime['arch']]):
|
||||
assert config.runtime.arch
|
||||
for _arch in set([arch, config.runtime.arch]):
|
||||
init_prebuilts(_arch)
|
||||
packages = filter_packages(paths, repo=repo, allow_empty_results=False)
|
||||
return build_packages(
|
||||
|
@ -661,7 +665,8 @@ def build_enable_qemu_binfmt(arch: Arch, repo: Optional[dict[str, Pkgbuild]] = N
|
|||
logging.info('Installing qemu-user (building if necessary)')
|
||||
if lazy and _qemu_enabled[arch]:
|
||||
return
|
||||
native = config.runtime['arch']
|
||||
native = config.runtime.arch
|
||||
assert native
|
||||
if arch == native:
|
||||
return
|
||||
wrap_if_foreign_arch(arch)
|
||||
|
@ -735,10 +740,10 @@ def build(
|
|||
force=force,
|
||||
rebuild_dependants=rebuild_dependants,
|
||||
try_download=try_download,
|
||||
enable_crosscompile=config.file['build']['crosscompile'],
|
||||
enable_crossdirect=config.file['build']['crossdirect'],
|
||||
enable_ccache=config.file['build']['ccache'],
|
||||
clean_chroot=config.file['build']['clean_mode'],
|
||||
enable_crosscompile=config.file.build.crosscompile,
|
||||
enable_crossdirect=config.file.build.crossdirect,
|
||||
enable_ccache=config.file.build.ccache,
|
||||
clean_chroot=config.file.build.clean_mode,
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -47,8 +47,8 @@ def clone_pkbuilds(pkgbuilds_dir: str, repo_url: str, branch: str, interactive=F
|
|||
|
||||
def init_pkgbuilds(interactive=False):
|
||||
pkgbuilds_dir = config.get_path('pkgbuilds')
|
||||
repo_url = config.file['pkgbuilds']['git_repo']
|
||||
branch = config.file['pkgbuilds']['git_branch']
|
||||
repo_url = config.file.pkgbuilds.git_repo
|
||||
branch = config.file.pkgbuilds.git_branch
|
||||
clone_pkbuilds(pkgbuilds_dir, repo_url, branch, interactive=interactive, update=False)
|
||||
|
||||
|
||||
|
@ -193,7 +193,7 @@ def parse_pkgbuild(relative_pkg_dir: str, _config: Optional[ConfigStateHolder] =
|
|||
global config
|
||||
if _config:
|
||||
config = _config
|
||||
setup_logging(verbose=config.runtime['verbose'], log_setup=False) # different thread needs log setup.
|
||||
setup_logging(verbose=config.runtime.verbose, log_setup=False) # different thread needs log setup.
|
||||
logging.info(f"Parsing PKGBUILD for {relative_pkg_dir}")
|
||||
pkgbuilds_dir = config.get_path('pkgbuilds')
|
||||
pkgdir = os.path.join(pkgbuilds_dir, relative_pkg_dir)
|
||||
|
|
2
ssh.py
2
ssh.py
|
@ -32,7 +32,7 @@ def run_ssh_command(cmd: list[str] = [],
|
|||
extra_args = []
|
||||
if len(keys) > 0:
|
||||
extra_args += ['-i', keys[0]]
|
||||
if config.runtime['verbose']:
|
||||
if config.runtime.verbose:
|
||||
extra_args += ['-v']
|
||||
if alloc_tty:
|
||||
extra_args += ['-t']
|
||||
|
|
|
@ -15,7 +15,7 @@ wrapper_impls: dict[str, Wrapper] = {
|
|||
|
||||
|
||||
def get_wrapper_type(wrapper_type: str = None):
|
||||
return wrapper_type or config.file['wrapper']['type']
|
||||
return wrapper_type or config.file.wrapper.type
|
||||
|
||||
|
||||
def get_wrapper_impl(wrapper_type: str = None) -> Wrapper:
|
||||
|
@ -34,7 +34,7 @@ def is_wrapped(wrapper_type: str = None):
|
|||
|
||||
def enforce_wrap(no_wrapper=False):
|
||||
wrapper_type = get_wrapper_type()
|
||||
if wrapper_type != 'none' and not is_wrapped(wrapper_type) and not config.runtime['no_wrap'] and not no_wrapper:
|
||||
if wrapper_type != 'none' and not is_wrapped(wrapper_type) and not config.runtime.no_wrap and not no_wrapper:
|
||||
logging.info(f'Wrapping in {wrapper_type}')
|
||||
wrap()
|
||||
|
||||
|
|
|
@ -22,7 +22,8 @@ class DockerWrapper(BaseWrapper):
|
|||
type: str = 'docker'
|
||||
|
||||
def wrap(self):
|
||||
script_path = config.runtime['script_source_dir']
|
||||
script_path = config.runtime.script_source_dir
|
||||
assert script_path
|
||||
with open(os.path.join(script_path, 'version.txt')) as version_file:
|
||||
version = version_file.read().replace('\n', '')
|
||||
tag = f'registry.gitlab.com/kupfer/kupferbootstrap:{version}'
|
||||
|
@ -34,7 +35,7 @@ class DockerWrapper(BaseWrapper):
|
|||
'.',
|
||||
'-t',
|
||||
tag,
|
||||
] + (['-q'] if not config.runtime['verbose'] else [])
|
||||
] + (['-q'] if not config.runtime.verbose else [])
|
||||
logging.debug('Running docker cmd: ' + ' '.join(cmd))
|
||||
result = subprocess.run(cmd, cwd=script_path, capture_output=True)
|
||||
if result.returncode != 0:
|
||||
|
|
Loading…
Add table
Reference in a new issue