config.get_path(), wrapper: remove --config/-C from wrapper args
This commit is contained in:
parent
faa855eda9
commit
a3d85cda8c
6 changed files with 74 additions and 29 deletions
|
@ -6,7 +6,7 @@ from distro import get_base_distros, RepoInfo
|
|||
|
||||
|
||||
def get_chroot_path(chroot_name, override_basepath: str = None) -> str:
|
||||
base_path = config.file['paths']['chroots'] if not override_basepath else override_basepath
|
||||
base_path = config.get_path('chroots') if not override_basepath else override_basepath
|
||||
return os.path.join(base_path, chroot_name)
|
||||
|
||||
|
||||
|
|
33
config.py
33
config.py
|
@ -5,7 +5,10 @@ import logging
|
|||
from copy import deepcopy
|
||||
import click
|
||||
|
||||
CONFIG_DEFAULT_PATH = os.path.join(appdirs.user_config_dir('kupfer'), 'kupferbootstrap.toml')
|
||||
CONFIG_DIR = appdirs.user_config_dir('kupfer')
|
||||
CACHE_DIR = appdirs.user_cache_dir('kupfer')
|
||||
|
||||
CONFIG_DEFAULT_PATH = os.path.join(CONFIG_DIR, 'kupferbootstrap.toml')
|
||||
|
||||
Profile = dict[str, str]
|
||||
|
||||
|
@ -26,10 +29,11 @@ CONFIG_DEFAULTS = {
|
|||
'threads': 0,
|
||||
},
|
||||
'paths': {
|
||||
'chroots': os.path.join(appdirs.user_cache_dir('kupfer'), 'chroots'),
|
||||
'pacman': os.path.join(appdirs.user_cache_dir('kupfer'), 'pacman'),
|
||||
'jumpdrive': os.path.join(appdirs.user_cache_dir('kupfer'), 'jumpdrive'),
|
||||
'packages': os.path.join(appdirs.user_cache_dir('kupfer'), 'packages'),
|
||||
'cache_dir': CACHE_DIR,
|
||||
'chroots': os.path.join('%cache_dir%', 'chroots'),
|
||||
'pacman': os.path.join('%cache_dir%', 'pacman'),
|
||||
'jumpdrive': os.path.join('%cache_dir%', 'jumpdrive'),
|
||||
'packages': os.path.join('%cache_dir%', 'packages'),
|
||||
'pkgbuilds': os.path.abspath(os.getcwd()),
|
||||
},
|
||||
'profiles': {
|
||||
|
@ -46,6 +50,14 @@ CONFIG_RUNTIME_DEFAULTS = {
|
|||
}
|
||||
|
||||
|
||||
def resolve_path_template(path_template: str, paths: dict[str, str]) -> str:
|
||||
terminator = '%' # i'll be back
|
||||
result = path_template
|
||||
for path_name, path in paths.items():
|
||||
result = result.replace(terminator + path_name + terminator, path)
|
||||
return result
|
||||
|
||||
|
||||
def resolve_profile(
|
||||
name: str,
|
||||
sparse_profiles: dict[str, Profile],
|
||||
|
@ -70,6 +82,7 @@ def resolve_profile(
|
|||
if name in resolved:
|
||||
return resolved
|
||||
|
||||
logging.debug(f'Resolving profile {name}')
|
||||
_visited.append(name)
|
||||
sparse = sparse_profiles[name]
|
||||
full = deepcopy(sparse)
|
||||
|
@ -242,7 +255,7 @@ class ConfigStateHolder:
|
|||
self.file_state.exception = ex
|
||||
self.file_state.load_finished = True
|
||||
|
||||
def is_loaded(self):
|
||||
def is_loaded(self) -> bool:
|
||||
return self.file_state.load_finished and self.file_state.exception is None
|
||||
|
||||
def enforce_config_loaded(self):
|
||||
|
@ -255,12 +268,16 @@ class ConfigStateHolder:
|
|||
msg = "File doesn't exist. Try running `kupferbootstrap config init` first?"
|
||||
raise ConfigLoadException(extra_msg=msg, inner_exception=ex)
|
||||
|
||||
def get_profile(self, name: str = None):
|
||||
def get_profile(self, name: str = None) -> Profile:
|
||||
if not name:
|
||||
name = self.file['profiles']['current']
|
||||
self._profile_cache = resolve_profile(name, self.file['profiles'], resolved=self._profile_cache)
|
||||
self._profile_cache = resolve_profile(name=name, sparse_profiles=self.file['profiles'], resolved=self._profile_cache)
|
||||
return self._profile_cache[name]
|
||||
|
||||
def get_path(self, path_name: str) -> str:
|
||||
paths = self.file['paths']
|
||||
return resolve_path_template(paths[path_name], paths)
|
||||
|
||||
def dump(self) -> str:
|
||||
dump_toml(self.file)
|
||||
|
||||
|
|
|
@ -201,4 +201,4 @@ def get_kupfer_https(arch: str) -> Distro:
|
|||
|
||||
|
||||
def get_kupfer_local(arch: str) -> Distro:
|
||||
return get_kupfer(arch, f"file://{config.file['paths']['packages']}/$repo")
|
||||
return get_kupfer(arch, f"file://{config.get_path('packages')}/$repo")
|
||||
|
|
11
image.py
11
image.py
|
@ -11,7 +11,8 @@ from wrapper import enforce_wrap
|
|||
from signal import pause
|
||||
|
||||
|
||||
def get_device_and_flavour(profile=None) -> tuple[str, str]:
|
||||
def get_device_and_flavour(profile: str = None) -> tuple[str, str]:
|
||||
#config.enforce_config_loaded()
|
||||
profile = config.get_profile(profile)
|
||||
if not profile['device']:
|
||||
raise Exception("Please set the device using 'kupferbootstrap config init ...'")
|
||||
|
@ -63,7 +64,7 @@ def dump_bootimg(image_name: str) -> str:
|
|||
f'dump /boot/boot.img {path}',
|
||||
])
|
||||
if result.returncode != 0:
|
||||
logging.fatal(f'Faild to dump boot.img')
|
||||
logging.fatal('Failed to dump boot.img')
|
||||
exit(1)
|
||||
return path
|
||||
|
||||
|
@ -80,7 +81,7 @@ def dump_lk2nd(image_name: str) -> str:
|
|||
f'dump /boot/lk2nd.img {path}',
|
||||
])
|
||||
if result.returncode != 0:
|
||||
logging.fatal(f'Faild to dump lk2nd.img')
|
||||
logging.fatal('Failed to dump lk2nd.img')
|
||||
exit(1)
|
||||
return path
|
||||
|
||||
|
@ -94,7 +95,7 @@ def dump_qhypstub(image_name: str) -> str:
|
|||
f'dump /boot/qhypstub.bin {path}',
|
||||
])
|
||||
if result.returncode != 0:
|
||||
logging.fatal('Faild to dump qhypstub.bin')
|
||||
logging.fatal('Failed to dump qhypstub.bin')
|
||||
exit(1)
|
||||
return path
|
||||
|
||||
|
@ -139,7 +140,7 @@ def cmd_build():
|
|||
rootfs_mount = get_chroot_path(chroot_name)
|
||||
mount_rootfs_image(image_name, rootfs_mount)
|
||||
|
||||
packages_dir = config.file['paths']['packages']
|
||||
packages_dir = config.get_path('packages')
|
||||
if os.path.exists(os.path.join(packages_dir, 'main')):
|
||||
extra_repos = get_kupfer_local(arch).repos
|
||||
else:
|
||||
|
|
20
packages.py
20
packages.py
|
@ -47,7 +47,7 @@ class Package:
|
|||
|
||||
def __init__(self, path: str, dir: str = None) -> None:
|
||||
self.path = path
|
||||
dir = dir if dir else config.file['paths']['pkgbuilds']
|
||||
dir = dir if dir else config.get_path('pkgbuilds')
|
||||
self._loadinfo(dir)
|
||||
|
||||
def _loadinfo(self, dir):
|
||||
|
@ -96,7 +96,7 @@ class Package:
|
|||
|
||||
|
||||
def check_prebuilts(dir: str = None):
|
||||
prebuilts_dir = dir if dir else config.file['paths']['packages']
|
||||
prebuilts_dir = dir if dir else config.get_path('packages')
|
||||
os.makedirs(prebuilts_dir, exist_ok=True)
|
||||
for repo in REPOSITORIES:
|
||||
os.makedirs(os.path.join(prebuilts_dir, repo), exist_ok=True)
|
||||
|
@ -119,7 +119,7 @@ def check_prebuilts(dir: str = None):
|
|||
|
||||
|
||||
def discover_packages(dir: str = None) -> dict[str, Package]:
|
||||
dir = dir if dir else config.file['paths']['pkgbuilds']
|
||||
dir = dir if dir else config.get_paths('pkgbuilds')
|
||||
packages = {}
|
||||
paths = []
|
||||
|
||||
|
@ -288,7 +288,7 @@ def check_package_version_built(package: Package) -> bool:
|
|||
for line in result.stdout.decode('utf-8').split('\n'):
|
||||
if line != "":
|
||||
file = os.path.basename(line)
|
||||
if not os.path.exists(os.path.join(config.file['paths']['packages'], package.repo, file)):
|
||||
if not os.path.exists(os.path.join(config.get_path('packages'), package.repo, file)):
|
||||
built = False
|
||||
|
||||
return built
|
||||
|
@ -343,7 +343,7 @@ def setup_dependencies_and_sources(package: Package, chroot: str, repo_dir: str
|
|||
To make cross-compilation work for almost every package, the host needs to have the dependencies installed
|
||||
so that the build tools can be used
|
||||
"""
|
||||
repo_dir = repo_dir if repo_dir else config.file['paths']['pkgbuilds']
|
||||
repo_dir = repo_dir if repo_dir else config.get_path('pkgbuilds')
|
||||
makepkg_setup_args = [
|
||||
'--nobuild',
|
||||
'--holdver',
|
||||
|
@ -373,7 +373,7 @@ def build_package(package: Package, repo_dir: str = None, arch='aarch64', enable
|
|||
'--skipinteg',
|
||||
'--holdver',
|
||||
]
|
||||
repo_dir = repo_dir if repo_dir else config.file['paths']['pkgbuilds']
|
||||
repo_dir = repo_dir if repo_dir else config.get_path('pkgbuilds')
|
||||
chroot = setup_build_chroot(arch=arch, extra_packages=package.depends)
|
||||
setup_dependencies_and_sources(package, chroot, enable_crosscompile=enable_crosscompile)
|
||||
|
||||
|
@ -390,7 +390,7 @@ def build_package(package: Package, repo_dir: str = None, arch='aarch64', enable
|
|||
stderr=subprocess.DEVNULL,
|
||||
)
|
||||
|
||||
base_chroot = os.path.join(config.file['paths']['chroots'], f'base_{arch}')
|
||||
base_chroot = os.path.join(config.get_path('chroots'), f'base_{arch}')
|
||||
result = subprocess.run([
|
||||
'mount',
|
||||
'-o',
|
||||
|
@ -417,7 +417,7 @@ def build_package(package: Package, repo_dir: str = None, arch='aarch64', enable
|
|||
'mount',
|
||||
'-o',
|
||||
'bind',
|
||||
config.file['paths']['pkgbuilds'],
|
||||
config.get_path('pkgbuilds'),
|
||||
f'{chroot}/src',
|
||||
])
|
||||
|
||||
|
@ -455,8 +455,8 @@ def build_package(package: Package, repo_dir: str = None, arch='aarch64', enable
|
|||
|
||||
def add_package_to_repo(package: Package):
|
||||
logging.info(f'Adding {package.path} to repo')
|
||||
binary_dir = os.path.join(config.file['paths']['packages'], package.repo)
|
||||
pkgbuild_dir = os.path.join(config.file['paths']['pkgbuilds'], package.path)
|
||||
binary_dir = os.path.join(config.get_path('packages'), package.repo)
|
||||
pkgbuild_dir = os.path.join(config.get_path('pkgbuilds'), package.path)
|
||||
os.makedirs(binary_dir, exist_ok=True)
|
||||
|
||||
for file in os.listdir(pkgbuild_dir):
|
||||
|
|
35
wrapper.py
35
wrapper.py
|
@ -24,7 +24,30 @@ def wrap_docker():
|
|||
for source, destination in volume_mappings.items():
|
||||
result += ['-v', f'{source}:{destination}:z']
|
||||
return result
|
||||
os.readl
|
||||
|
||||
def _filter_args(args):
|
||||
"""hack. filter out --config since it doesn't apply in docker"""
|
||||
results = []
|
||||
done = False
|
||||
for i, arg in enumerate(args):
|
||||
if done:
|
||||
break
|
||||
if arg[0] != '-':
|
||||
results += args[i:]
|
||||
done = True
|
||||
break
|
||||
for argname in ['--config', '-C']:
|
||||
if arg.startswith(argname):
|
||||
done = True
|
||||
if arg != argname: # arg is longer, assume --arg=value
|
||||
offset = 1
|
||||
else:
|
||||
offset = 2
|
||||
results += args[i + offset:]
|
||||
break
|
||||
if not done:
|
||||
results.append(arg)
|
||||
return results
|
||||
|
||||
script_path = os.path.dirname(os.path.realpath(__file__))
|
||||
with open(os.path.join(script_path, 'version.txt')) as version_file:
|
||||
|
@ -84,10 +107,10 @@ def wrap_docker():
|
|||
os.getcwd(): '/src',
|
||||
wrapped_config: '/root/.config/kupfer/kupferbootstrap.toml',
|
||||
}
|
||||
volumes |= dict({(config.file['paths'][vol_name], vol_dest) for vol_name, vol_dest in DOCKER_PATHS.items()})
|
||||
volumes |= dict({config.get_path(vol_name): vol_dest for vol_name, vol_dest in DOCKER_PATHS.items()})
|
||||
if os.getenv('KUPFERBOOTSTRAP_PREBUILTS'):
|
||||
volumes |= {os.getenv("KUPFERBOOTSTRAP_PREBUILTS"): '/prebuilts'}
|
||||
cmd = [
|
||||
docker_cmd = [
|
||||
'docker',
|
||||
'run',
|
||||
'--name',
|
||||
|
@ -96,7 +119,11 @@ def wrap_docker():
|
|||
'--interactive',
|
||||
'--tty',
|
||||
'--privileged',
|
||||
] + _docker_volumes(volumes) + [tag, 'kupferbootstrap'] + sys.argv[1:]
|
||||
] + _docker_volumes(volumes) + [tag]
|
||||
|
||||
kupfer_cmd = ['kupferbootstrap'] + _filter_args(sys.argv[1:])
|
||||
|
||||
cmd = docker_cmd + kupfer_cmd
|
||||
logging.debug('Wrapping in docker:' + repr(cmd))
|
||||
result = subprocess.run(cmd)
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue