global: refactor to use config.{file,runtime}.$member instead of config.file["$member"]

This commit is contained in:
InsanePrawn 2022-08-27 16:48:50 +02:00
parent 13ad63446e
commit bef0efc637
15 changed files with 65 additions and 54 deletions

View file

@ -50,7 +50,7 @@ def cmd_chroot(type: str = 'build', arch: str = None, enable_crossdirect=True):
build_chroot.initialize() build_chroot.initialize()
build_chroot.initialized = True build_chroot.initialized = True
build_chroot.mount_pkgbuilds() build_chroot.mount_pkgbuilds()
if config.file['build']['crossdirect'] and enable_crossdirect: if config.file.build.crossdirect and enable_crossdirect:
build_chroot.mount_crossdirect() build_chroot.mount_crossdirect()
else: else:
raise Exception('Really weird bug') raise Exception('Really weird bug')

View file

@ -228,7 +228,8 @@ class Chroot(AbstractChroot):
raise Exception(f'Chroot {self.name} is inactive, not running command! Hint: pass `fail_inactive=False`') raise Exception(f'Chroot {self.name} is inactive, not running command! Hint: pass `fail_inactive=False`')
if outer_env is None: if outer_env is None:
outer_env = {} outer_env = {}
native = config.runtime['arch'] native = config.runtime.arch
assert native
if self.arch != native and 'QEMU_LD_PREFIX' not in outer_env: if self.arch != native and 'QEMU_LD_PREFIX' not in outer_env:
outer_env = dict(outer_env) # copy dict for modification outer_env = dict(outer_env) # copy dict for modification
outer_env |= {'QEMU_LD_PREFIX': f'/usr/{GCC_HOSTSPECS[native][self.arch]}'} outer_env |= {'QEMU_LD_PREFIX': f'/usr/{GCC_HOSTSPECS[native][self.arch]}'}
@ -285,7 +286,7 @@ class Chroot(AbstractChroot):
user = None user = None
group = None group = None
if check_space is None: if check_space is None:
check_space = config.file['pacman']['check_space'] check_space = config.file.pacman.check_space
if not absolute_path: if not absolute_path:
path = self.get_path('/etc') path = self.get_path('/etc')
root_makedir(path) root_makedir(path)

View file

@ -69,7 +69,8 @@ class BuildChroot(Chroot):
""" """
target_arch = self.arch target_arch = self.arch
if not native_chroot: if not native_chroot:
native_chroot = get_build_chroot(config.runtime['arch']) assert config.runtime.arch
native_chroot = get_build_chroot(config.runtime.arch)
host_arch = native_chroot.arch host_arch = native_chroot.arch
hostspec = GCC_HOSTSPECS[host_arch][target_arch] hostspec = GCC_HOSTSPECS[host_arch][target_arch]
cc = f'{hostspec}-cc' cc = f'{hostspec}-cc'

View file

@ -66,8 +66,8 @@ def prompt_profile(name: str, create: bool = True, defaults: Union[Profile, dict
profile: Any = PROFILE_EMPTY | defaults profile: Any = PROFILE_EMPTY | defaults
# don't use get_profile() here because we need the sparse profile # don't use get_profile() here because we need the sparse profile
if name in config.file['profiles']: if name in config.file.profiles:
profile |= config.file['profiles'][name] profile |= config.file.profiles[name]
elif create: elif create:
logging.info(f"Profile {name} doesn't exist yet, creating new profile.") logging.info(f"Profile {name} doesn't exist yet, creating new profile.")
else: else:
@ -113,7 +113,7 @@ def prompt_for_save(retry_ctx: Optional[click.Context] = None):
If `retry_ctx` is passed, the context's command will be reexecuted with the same arguments if the user chooses to retry. If `retry_ctx` is passed, the context's command will be reexecuted with the same arguments if the user chooses to retry.
False will still be returned as the retry is expected to either save, perform another retry or arbort. False will still be returned as the retry is expected to either save, perform another retry or arbort.
""" """
if click.confirm(f'Do you want to save your changes to {config.runtime["config_file"]}?', default=True): if click.confirm(f'Do you want to save your changes to {config.runtime.config_file}?', default=True):
return True return True
if retry_ctx: if retry_ctx:
if click.confirm('Retry? ("n" to quit without saving)', default=True): if click.confirm('Retry? ("n" to quit without saving)', default=True):
@ -171,7 +171,7 @@ def cmd_config_init(ctx, sections: list[str] = CONFIG_SECTIONS, non_interactive:
config.update(results) config.update(results)
if 'profiles' in sections: if 'profiles' in sections:
current_profile = 'default' if 'current' not in config.file['profiles'] else config.file['profiles']['current'] current_profile = 'default' if 'current' not in config.file.profiles else config.file.profiles.current
new_current, _ = prompt_config('profile.current', default=current_profile, field_type=str) new_current, _ = prompt_config('profile.current', default=current_profile, field_type=str)
profile, changed = prompt_profile(new_current, create=True) profile, changed = prompt_profile(new_current, create=True)
config.update_profile(new_current, profile) config.update_profile(new_current, profile)
@ -182,7 +182,7 @@ def cmd_config_init(ctx, sections: list[str] = CONFIG_SECTIONS, non_interactive:
if not noop: if not noop:
config.write() config.write()
else: else:
logging.info(f'--noop passed, not writing to {config.runtime["config_file"]}!') logging.info(f'--noop passed, not writing to {config.runtime.config_file}!')
@cmd_config.command(name='set') @cmd_config.command(name='set')
@ -250,8 +250,8 @@ def cmd_profile():
def cmd_profile_init(ctx, name: str, non_interactive: bool = False, noop: bool = False): def cmd_profile_init(ctx, name: str, non_interactive: bool = False, noop: bool = False):
"""Create or edit a profile""" """Create or edit a profile"""
profile = deepcopy(PROFILE_EMPTY) profile = deepcopy(PROFILE_EMPTY)
if name in config.file['profiles']: if name in config.file.profiles:
profile |= config.file['profiles'][name] profile |= config.file.profiles[name]
if not non_interactive: if not non_interactive:
profile, _changed = prompt_profile(name, create=True) profile, _changed = prompt_profile(name, create=True)
@ -262,4 +262,4 @@ def cmd_profile_init(ctx, name: str, non_interactive: bool = False, noop: bool =
return return
config.write() config.write()
else: else:
logging.info(f'--noop passed, not writing to {config.runtime["config_file"]}!') logging.info(f'--noop passed, not writing to {config.runtime.config_file}!')

View file

@ -204,7 +204,7 @@ class ConfigStateHolder:
def try_load_file(self, config_file=None, base=CONFIG_DEFAULTS): def try_load_file(self, config_file=None, base=CONFIG_DEFAULTS):
config_file = config_file or CONFIG_DEFAULT_PATH config_file = config_file or CONFIG_DEFAULT_PATH
self.runtime['config_file'] = config_file self.runtime.config_file = config_file
self._profile_cache = None self._profile_cache = None
try: try:
self.file = parse_file(config_file=config_file, base=base) self.file = parse_file(config_file=config_file, base=base)
@ -227,8 +227,8 @@ class ConfigStateHolder:
raise ex raise ex
def get_profile(self, name: Optional[str] = None) -> Profile: def get_profile(self, name: Optional[str] = None) -> Profile:
name = name or self.file['profiles']['current'] name = name or self.file.profiles.current
self._profile_cache = resolve_profile(name=name, sparse_profiles=self.file['profiles'], resolved=self._profile_cache) self._profile_cache = resolve_profile(name=name, sparse_profiles=self.file.profiles, resolved=self._profile_cache)
return self._profile_cache[name] return self._profile_cache[name]
def enforce_profile_device_set(self, profile_name: Optional[str] = None, hint_or_set_arch: bool = False) -> Profile: def enforce_profile_device_set(self, profile_name: Optional[str] = None, hint_or_set_arch: bool = False) -> Profile:
@ -255,7 +255,7 @@ class ConfigStateHolder:
return profile return profile
def get_path(self, path_name: str) -> str: def get_path(self, path_name: str) -> str:
paths = self.file['paths'] paths = self.file.paths
return resolve_path_template(paths[path_name], paths) return resolve_path_template(paths[path_name], paths)
def get_package_dir(self, arch: str): def get_package_dir(self, arch: str):
@ -268,7 +268,8 @@ class ConfigStateHolder:
def write(self, path=None): def write(self, path=None):
"""write toml representation of `self.file` to `path`""" """write toml representation of `self.file` to `path`"""
if path is None: if path is None:
path = self.runtime['config_file'] path = self.runtime.config_file
assert path
os.makedirs(os.path.dirname(path), exist_ok=True) os.makedirs(os.path.dirname(path), exist_ok=True)
dump_file(path, self.file) dump_file(path, self.file)
logging.info(f'Created config file at {path}') logging.info(f'Created config file at {path}')
@ -282,18 +283,18 @@ class ConfigStateHolder:
merged = merge_configs(config_fragment, conf_base=self.file, warn_missing_defaultprofile=warn_missing_defaultprofile) merged = merge_configs(config_fragment, conf_base=self.file, warn_missing_defaultprofile=warn_missing_defaultprofile)
changed = self.file != merged changed = self.file != merged
self.file.update(merged) self.file.update(merged)
if changed and 'profiles' in config_fragment and self.file['profiles'] != config_fragment['profiles']: if changed and 'profiles' in config_fragment and self.file.profiles != config_fragment['profiles']:
self.invalidate_profile_cache() self.invalidate_profile_cache()
return changed return changed
def update_profile(self, name: str, profile: Profile, merge: bool = False, create: bool = True, prune: bool = True): def update_profile(self, name: str, profile: Profile, merge: bool = False, create: bool = True, prune: bool = True):
new = {} new = {}
if name not in self.file['profiles']: if name not in self.file.profiles:
if not create: if not create:
raise Exception(f'Unknown profile: {name}') raise Exception(f'Unknown profile: {name}')
else: else:
if merge: if merge:
new = deepcopy(self.file['profiles'][name]) new = deepcopy(self.file.profiles[name])
logging.debug(f'new: {new}') logging.debug(f'new: {new}')
logging.debug(f'profile: {profile}') logging.debug(f'profile: {profile}')
@ -301,5 +302,5 @@ class ConfigStateHolder:
if prune: if prune:
new = {key: val for key, val in new.items() if val is not None} new = {key: val for key, val in new.items() if val is not None}
self.file['profiles'][name] = new self.file.profiles[name] = new
self.invalidate_profile_cache() self.invalidate_profile_cache()

View file

@ -53,7 +53,7 @@ def validate_ConfigStateHolder(c: ConfigStateHolder, should_load: Optional[bool]
def test_fixture_configstate(conf_fixture: str, exists: bool, request): def test_fixture_configstate(conf_fixture: str, exists: bool, request):
configstate = request.getfixturevalue(conf_fixture) configstate = request.getfixturevalue(conf_fixture)
assert 'config_file' in configstate.runtime assert 'config_file' in configstate.runtime
confpath = configstate.runtime['config_file'] confpath = configstate.runtime.config_file
assert isinstance(confpath, str) assert isinstance(confpath, str)
assert confpath assert confpath
assert exists == os.path.exists(confpath) assert exists == os.path.exists(confpath)
@ -124,12 +124,13 @@ def load_toml_file(path) -> dict:
def get_path_from_stateholder(c: ConfigStateHolder): def get_path_from_stateholder(c: ConfigStateHolder):
return c.runtime['config_file'] return c.runtime.config_file
def test_config_save_nonexistant(configstate_nonexistant: ConfigStateHolder): def test_config_save_nonexistant(configstate_nonexistant: ConfigStateHolder):
c = configstate_nonexistant c = configstate_nonexistant
confpath = c.runtime['config_file'] confpath = c.runtime.config_file
assert confpath
assert not os.path.exists(confpath) assert not os.path.exists(confpath)
c.write() c.write()
assert confpath assert confpath

View file

@ -75,7 +75,7 @@ _kupfer_local_chroots = dict[Arch, Distro]()
def get_kupfer_https(arch: Arch, scan: bool = False) -> Distro: def get_kupfer_https(arch: Arch, scan: bool = False) -> Distro:
global _kupfer_https global _kupfer_https
if arch not in _kupfer_https or not _kupfer_https[arch]: if arch not in _kupfer_https or not _kupfer_https[arch]:
_kupfer_https[arch] = get_kupfer(arch, KUPFER_HTTPS.replace('%branch%', config.file['pacman']['repo_branch']), scan) _kupfer_https[arch] = get_kupfer(arch, KUPFER_HTTPS.replace('%branch%', config.file.pacman.repo_branch), scan)
item = _kupfer_https[arch] item = _kupfer_https[arch]
if scan and not item.is_scanned(): if scan and not item.is_scanned():
item.scan() item.scan()
@ -85,7 +85,8 @@ def get_kupfer_https(arch: Arch, scan: bool = False) -> Distro:
def get_kupfer_local(arch: Optional[Arch] = None, in_chroot: bool = True, scan: bool = False) -> Distro: def get_kupfer_local(arch: Optional[Arch] = None, in_chroot: bool = True, scan: bool = False) -> Distro:
global _kupfer_local, _kupfer_local_chroots global _kupfer_local, _kupfer_local_chroots
cache = _kupfer_local_chroots if in_chroot else _kupfer_local cache = _kupfer_local_chroots if in_chroot else _kupfer_local
arch = arch or config.runtime['arch'] arch = arch or config.runtime.arch
assert arch
if arch not in cache or not cache[arch]: if arch not in cache or not cache[arch]:
dir = CHROOT_PATHS['packages'] if in_chroot else config.get_path('packages') dir = CHROOT_PATHS['packages'] if in_chroot else config.get_path('packages')
cache[arch] = get_kupfer(arch, f"file://{dir}/$arch/$repo") cache[arch] = get_kupfer(arch, f"file://{dir}/$arch/$repo")

View file

@ -7,7 +7,8 @@ def generate_makepkg_conf(arch: Arch, cross: bool = False, chroot: str = None) -
Generate a makepkg.conf. For use with crosscompiling, specify `cross=True` and pass as `chroot` Generate a makepkg.conf. For use with crosscompiling, specify `cross=True` and pass as `chroot`
the relative path inside the native chroot where the foreign chroot will be mounted. the relative path inside the native chroot where the foreign chroot will be mounted.
""" """
hostspec = GCC_HOSTSPECS[config.runtime['arch'] if cross else arch][arch] assert config.runtime.arch
hostspec = GCC_HOSTSPECS[config.runtime.arch if cross else arch][arch]
cflags = CFLAGS_ARCHES[arch] + CFLAGS_GENERAL cflags = CFLAGS_ARCHES[arch] + CFLAGS_GENERAL
if cross and not chroot: if cross and not chroot:
raise Exception('Cross-compile makepkg conf requested but no chroot path given: "{chroot}"') raise Exception('Cross-compile makepkg conf requested but no chroot path given: "{chroot}"')
@ -233,7 +234,7 @@ Color
#NoProgressBar #NoProgressBar
{'' if check_space else '#'}CheckSpace {'' if check_space else '#'}CheckSpace
VerbosePkgLists VerbosePkgLists
ParallelDownloads = {config.file['pacman']['parallel_downloads']} ParallelDownloads = {config.file.pacman.parallel_downloads}
# By default, pacman accepts packages signed by keys that its local keyring # By default, pacman accepts packages signed by keys that its local keyring
# trusts (see pacman-key and its man page), as well as unsigned packages. # trusts (see pacman-key and its man page), as well as unsigned packages.

View file

@ -399,7 +399,7 @@ def cmd_build(profile_name: str = None,
packages = BASE_PACKAGES + DEVICES[device] + FLAVOURS[flavour]['packages'] + profile['pkgs_include'] packages = BASE_PACKAGES + DEVICES[device] + FLAVOURS[flavour]['packages'] + profile['pkgs_include']
if arch != config.runtime['arch']: if arch != config.runtime.arch:
build_enable_qemu_binfmt(arch) build_enable_qemu_binfmt(arch)
if local_repos and build_pkgs: if local_repos and build_pkgs:
@ -475,7 +475,7 @@ def cmd_inspect(profile: str = None, shell: bool = False):
if shell: if shell:
chroot.initialized = True chroot.initialized = True
chroot.activate() chroot.activate()
if arch != config.runtime['arch']: if arch != config.runtime.arch:
logging.info('Installing requisites for foreign-arch shell') logging.info('Installing requisites for foreign-arch shell')
build_enable_qemu_binfmt(arch) build_enable_qemu_binfmt(arch)
logging.info('Starting inspection shell') logging.info('Starting inspection shell')

10
main.py
View file

@ -27,9 +27,9 @@ from ssh import cmd_ssh
@nowrapper_option @nowrapper_option
def cli(verbose: bool = False, config_file: str = None, wrapper_override: Optional[bool] = None, error_shell: bool = False): def cli(verbose: bool = False, config_file: str = None, wrapper_override: Optional[bool] = None, error_shell: bool = False):
setup_logging(verbose) setup_logging(verbose)
config.runtime['verbose'] = verbose config.runtime.verbose = verbose
config.runtime['no_wrap'] = wrapper_override is False config.runtime.no_wrap = wrapper_override is False
config.runtime['error_shell'] = error_shell config.runtime.error_shell = error_shell
config.try_load_file(config_file) config.try_load_file(config_file)
if wrapper_override: if wrapper_override:
enforce_wrap() enforce_wrap()
@ -39,11 +39,11 @@ def main():
try: try:
return cli(prog_name='kupferbootstrap') return cli(prog_name='kupferbootstrap')
except Exception as ex: except Exception as ex:
if config.runtime['verbose']: if config.runtime.verbose:
logging.fatal(get_trace()) logging.fatal(get_trace())
else: else:
logging.fatal(ex) logging.fatal(ex)
if config.runtime['error_shell']: if config.runtime.error_shell:
logging.info('Starting error shell. Type exit to quit.') logging.info('Starting error shell. Type exit to quit.')
subprocess.call('/bin/bash') subprocess.call('/bin/bash')
exit(1) exit(1)

View file

@ -36,7 +36,7 @@ pacman_cmd = [
def get_makepkg_env(arch: Optional[Arch] = None): def get_makepkg_env(arch: Optional[Arch] = None):
# has to be a function because calls to `config` must be done after config file was read # has to be a function because calls to `config` must be done after config file was read
threads = config.file['build']['threads'] or multiprocessing.cpu_count() threads = config.file.build.threads or multiprocessing.cpu_count()
env = {key: val for key, val in os.environ.items() if not key.split('_', maxsplit=1)[0] in ['CI', 'GITLAB', 'FF']} env = {key: val for key, val in os.environ.items() if not key.split('_', maxsplit=1)[0] in ['CI', 'GITLAB', 'FF']}
env |= { env |= {
'LANG': 'C', 'LANG': 'C',
@ -333,7 +333,8 @@ def try_download_package(dest_file_path: str, package: Pkgbuild, arch: Arch) ->
def check_package_version_built(package: Pkgbuild, arch: Arch, try_download: bool = False) -> bool: def check_package_version_built(package: Pkgbuild, arch: Arch, try_download: bool = False) -> bool:
enforce_wrap() enforce_wrap()
native_chroot = setup_build_chroot(config.runtime['arch']) assert config.runtime.arch
native_chroot = setup_build_chroot(config.runtime.arch)
config_path = '/' + native_chroot.write_makepkg_conf( config_path = '/' + native_chroot.write_makepkg_conf(
target_arch=arch, target_arch=arch,
cross_chroot_relative=os.path.join('chroot', arch), cross_chroot_relative=os.path.join('chroot', arch),
@ -407,7 +408,8 @@ def setup_build_chroot(
add_kupfer_repos: bool = True, add_kupfer_repos: bool = True,
clean_chroot: bool = False, clean_chroot: bool = False,
) -> BuildChroot: ) -> BuildChroot:
if arch != config.runtime['arch']: assert config.runtime.arch
if arch != config.runtime.arch:
wrap_if_foreign_arch(arch) wrap_if_foreign_arch(arch)
build_enable_qemu_binfmt(arch) build_enable_qemu_binfmt(arch)
init_prebuilts(arch) init_prebuilts(arch)
@ -467,15 +469,16 @@ def build_package(
makepkg_compile_opts = ['--holdver'] makepkg_compile_opts = ['--holdver']
makepkg_conf_path = 'etc/makepkg.conf' makepkg_conf_path = 'etc/makepkg.conf'
repo_dir = repo_dir if repo_dir else config.get_path('pkgbuilds') repo_dir = repo_dir if repo_dir else config.get_path('pkgbuilds')
foreign_arch = config.runtime['arch'] != arch foreign_arch = config.runtime.arch != arch
deps = (list(set(package.depends) - set(package.names()))) deps = (list(set(package.depends) - set(package.names())))
target_chroot = setup_build_chroot( target_chroot = setup_build_chroot(
arch=arch, arch=arch,
extra_packages=deps, extra_packages=deps,
clean_chroot=clean_chroot, clean_chroot=clean_chroot,
) )
assert config.runtime.arch
native_chroot = target_chroot if not foreign_arch else setup_build_chroot( native_chroot = target_chroot if not foreign_arch else setup_build_chroot(
arch=config.runtime['arch'], arch=config.runtime.arch,
extra_packages=['base-devel'] + CROSSDIRECT_PKGS, extra_packages=['base-devel'] + CROSSDIRECT_PKGS,
clean_chroot=clean_chroot, clean_chroot=clean_chroot,
) )
@ -635,7 +638,8 @@ def build_packages_by_paths(
if isinstance(paths, str): if isinstance(paths, str):
paths = [paths] paths = [paths]
for _arch in set([arch, config.runtime['arch']]): assert config.runtime.arch
for _arch in set([arch, config.runtime.arch]):
init_prebuilts(_arch) init_prebuilts(_arch)
packages = filter_packages(paths, repo=repo, allow_empty_results=False) packages = filter_packages(paths, repo=repo, allow_empty_results=False)
return build_packages( return build_packages(
@ -661,7 +665,8 @@ def build_enable_qemu_binfmt(arch: Arch, repo: Optional[dict[str, Pkgbuild]] = N
logging.info('Installing qemu-user (building if necessary)') logging.info('Installing qemu-user (building if necessary)')
if lazy and _qemu_enabled[arch]: if lazy and _qemu_enabled[arch]:
return return
native = config.runtime['arch'] native = config.runtime.arch
assert native
if arch == native: if arch == native:
return return
wrap_if_foreign_arch(arch) wrap_if_foreign_arch(arch)
@ -735,10 +740,10 @@ def build(
force=force, force=force,
rebuild_dependants=rebuild_dependants, rebuild_dependants=rebuild_dependants,
try_download=try_download, try_download=try_download,
enable_crosscompile=config.file['build']['crosscompile'], enable_crosscompile=config.file.build.crosscompile,
enable_crossdirect=config.file['build']['crossdirect'], enable_crossdirect=config.file.build.crossdirect,
enable_ccache=config.file['build']['ccache'], enable_ccache=config.file.build.ccache,
clean_chroot=config.file['build']['clean_mode'], clean_chroot=config.file.build.clean_mode,
) )

View file

@ -47,8 +47,8 @@ def clone_pkbuilds(pkgbuilds_dir: str, repo_url: str, branch: str, interactive=F
def init_pkgbuilds(interactive=False): def init_pkgbuilds(interactive=False):
pkgbuilds_dir = config.get_path('pkgbuilds') pkgbuilds_dir = config.get_path('pkgbuilds')
repo_url = config.file['pkgbuilds']['git_repo'] repo_url = config.file.pkgbuilds.git_repo
branch = config.file['pkgbuilds']['git_branch'] branch = config.file.pkgbuilds.git_branch
clone_pkbuilds(pkgbuilds_dir, repo_url, branch, interactive=interactive, update=False) clone_pkbuilds(pkgbuilds_dir, repo_url, branch, interactive=interactive, update=False)
@ -193,7 +193,7 @@ def parse_pkgbuild(relative_pkg_dir: str, _config: Optional[ConfigStateHolder] =
global config global config
if _config: if _config:
config = _config config = _config
setup_logging(verbose=config.runtime['verbose'], log_setup=False) # different thread needs log setup. setup_logging(verbose=config.runtime.verbose, log_setup=False) # different thread needs log setup.
logging.info(f"Parsing PKGBUILD for {relative_pkg_dir}") logging.info(f"Parsing PKGBUILD for {relative_pkg_dir}")
pkgbuilds_dir = config.get_path('pkgbuilds') pkgbuilds_dir = config.get_path('pkgbuilds')
pkgdir = os.path.join(pkgbuilds_dir, relative_pkg_dir) pkgdir = os.path.join(pkgbuilds_dir, relative_pkg_dir)

2
ssh.py
View file

@ -32,7 +32,7 @@ def run_ssh_command(cmd: list[str] = [],
extra_args = [] extra_args = []
if len(keys) > 0: if len(keys) > 0:
extra_args += ['-i', keys[0]] extra_args += ['-i', keys[0]]
if config.runtime['verbose']: if config.runtime.verbose:
extra_args += ['-v'] extra_args += ['-v']
if alloc_tty: if alloc_tty:
extra_args += ['-t'] extra_args += ['-t']

View file

@ -15,7 +15,7 @@ wrapper_impls: dict[str, Wrapper] = {
def get_wrapper_type(wrapper_type: str = None): def get_wrapper_type(wrapper_type: str = None):
return wrapper_type or config.file['wrapper']['type'] return wrapper_type or config.file.wrapper.type
def get_wrapper_impl(wrapper_type: str = None) -> Wrapper: def get_wrapper_impl(wrapper_type: str = None) -> Wrapper:
@ -34,7 +34,7 @@ def is_wrapped(wrapper_type: str = None):
def enforce_wrap(no_wrapper=False): def enforce_wrap(no_wrapper=False):
wrapper_type = get_wrapper_type() wrapper_type = get_wrapper_type()
if wrapper_type != 'none' and not is_wrapped(wrapper_type) and not config.runtime['no_wrap'] and not no_wrapper: if wrapper_type != 'none' and not is_wrapped(wrapper_type) and not config.runtime.no_wrap and not no_wrapper:
logging.info(f'Wrapping in {wrapper_type}') logging.info(f'Wrapping in {wrapper_type}')
wrap() wrap()

View file

@ -22,7 +22,7 @@ class DockerWrapper(BaseWrapper):
type: str = 'docker' type: str = 'docker'
def wrap(self): def wrap(self):
script_path = config.runtime['script_source_dir'] script_path = config.runtime.script_source_dir
with open(os.path.join(script_path, 'version.txt')) as version_file: with open(os.path.join(script_path, 'version.txt')) as version_file:
version = version_file.read().replace('\n', '') version = version_file.read().replace('\n', '')
tag = f'registry.gitlab.com/kupfer/kupferbootstrap:{version}' tag = f'registry.gitlab.com/kupfer/kupferbootstrap:{version}'
@ -34,7 +34,7 @@ class DockerWrapper(BaseWrapper):
'.', '.',
'-t', '-t',
tag, tag,
] + (['-q'] if not config.runtime['verbose'] else []) ] + (['-q'] if not config.runtime.verbose else [])
logging.debug('Running docker cmd: ' + ' '.join(cmd)) logging.debug('Running docker cmd: ' + ' '.join(cmd))
result = subprocess.run(cmd, cwd=script_path, capture_output=True) result = subprocess.run(cmd, cwd=script_path, capture_output=True)
if result.returncode != 0: if result.returncode != 0: