Compare commits

...
Sign in to create a new pull request.

9 commits

10 changed files with 251 additions and 116 deletions

View file

@ -3,6 +3,7 @@
import os
import logging
from constants import Arch, QEMU_ARCHES
from exec.cmd import run_root_cmd
from utils import mount
@ -38,11 +39,15 @@ def binfmt_info():
return full
def is_registered(arch: str) -> bool:
return os.path.exists("/proc/sys/fs/binfmt_misc/qemu-" + arch)
def is_registered(arch: Arch) -> bool:
qemu_arch = QEMU_ARCHES[arch]
return os.path.exists("/proc/sys/fs/binfmt_misc/qemu-" + qemu_arch)
def register(arch):
def register(arch: Arch):
if arch not in QEMU_ARCHES:
raise Exception(f'binfmt.register(): unknown arch {arch} (not in QEMU_ARCHES)')
qemu_arch = QEMU_ARCHES[arch]
if is_registered(arch):
return
@ -51,7 +56,7 @@ def register(arch):
# Build registration string
# https://en.wikipedia.org/wiki/Binfmt_misc
# :name:type:offset:magic:mask:interpreter:flags
info = lines[arch]
info = lines[qemu_arch]
code = info['line']
binfmt = '/proc/sys/fs/binfmt_misc'
register = binfmt + '/register'
@ -70,7 +75,10 @@ def register(arch):
def unregister(arch):
binfmt_file = "/proc/sys/fs/binfmt_misc/qemu-" + arch
if arch not in QEMU_ARCHES:
raise Exception(f'binfmt.unregister(): unknown arch {arch} (not in QEMU_ARCHES)')
qemu_arch = QEMU_ARCHES[arch]
binfmt_file = "/proc/sys/fs/binfmt_misc/qemu-" + qemu_arch
if not os.path.exists(binfmt_file):
return
logging.info(f"Unregistering qemu binfmt ({arch})")

View file

@ -91,11 +91,20 @@ Arch: TypeAlias = str
ARCHES = [
'x86_64',
'aarch64',
'armv7h',
]
DistroArch: TypeAlias = Arch
TargetArch: TypeAlias = Arch
ALARM_REPOS = {
'core': 'http://mirror.archlinuxarm.org/$arch/$repo',
'extra': 'http://mirror.archlinuxarm.org/$arch/$repo',
'community': 'http://mirror.archlinuxarm.org/$arch/$repo',
'alarm': 'http://mirror.archlinuxarm.org/$arch/$repo',
'aur': 'http://mirror.archlinuxarm.org/$arch/$repo',
}
BASE_DISTROS: dict[DistroArch, dict[str, dict[str, str]]] = {
'x86_64': {
'repos': {
@ -105,42 +114,58 @@ BASE_DISTROS: dict[DistroArch, dict[str, dict[str, str]]] = {
},
},
'aarch64': {
'repos': {
'core': 'http://mirror.archlinuxarm.org/$arch/$repo',
'extra': 'http://mirror.archlinuxarm.org/$arch/$repo',
'community': 'http://mirror.archlinuxarm.org/$arch/$repo',
'alarm': 'http://mirror.archlinuxarm.org/$arch/$repo',
'aur': 'http://mirror.archlinuxarm.org/$arch/$repo',
},
'repos': ALARM_REPOS,
},
'armv7h': {
'repos': ALARM_REPOS,
},
}
COMPILE_ARCHES: dict[Arch, str] = {
'x86_64': 'amd64',
'aarch64': 'arm64',
'armv7h': 'arm',
}
GCC_HOSTSPECS: dict[DistroArch, dict[TargetArch, str]] = {
'x86_64': {
'x86_64': 'x86_64-pc-linux-gnu',
'aarch64': 'aarch64-linux-gnu',
'armv7h': 'arm-unknown-linux-gnueabihf'
},
'aarch64': {
'aarch64': 'aarch64-unknown-linux-gnu',
}
},
'armv7h': {
'armv7h': 'armv7l-unknown-linux-gnueabihf'
},
}
CFLAGS_GENERAL = ['-O2', '-pipe', '-fstack-protector-strong']
CFLAGS_ALARM = [
' -fno-plt',
'-fexceptions',
'-Wp,-D_FORTIFY_SOURCE=2',
'-Wformat',
'-Werror=format-security',
'-fstack-clash-protection',
]
CFLAGS_ARCHES: dict[Arch, list[str]] = {
'x86_64': ['-march=x86-64', '-mtune=generic'],
'aarch64': [
'-march=armv8-a',
'-fexceptions',
'-Wp,-D_FORTIFY_SOURCE=2',
'-Wformat',
'-Werror=format-security',
'-fstack-clash-protection',
]
] + CFLAGS_ALARM,
'armv7h': [
'-march=armv7-a',
'-mfloat-abi=hard',
'-mfpu=neon',
] + CFLAGS_ALARM,
}
QEMU_ARCHES: dict[Arch, str] = {
'x86_64': 'x86_64',
'aarch64': 'aarch64',
'armv7h': 'arm',
}
QEMU_BINFMT_PKGS = ['qemu-user-static-bin', 'binfmt-qemu-static']

12
main.py
View file

@ -1,11 +1,13 @@
#!/usr/bin/env python3
import click
from traceback import format_exc as get_trace
import subprocess
from traceback import format_exc as get_trace
from typing import Optional
from logger import logging, setup_logging, verbose_option
from wrapper import nowrapper_option
from wrapper import nowrapper_option, enforce_wrap
from config import config, config_option, cmd_config
from forwarding import cmd_forwarding
from packages import cmd_packages
@ -23,12 +25,14 @@ from ssh import cmd_ssh
@verbose_option
@config_option
@nowrapper_option
def cli(verbose: bool = False, config_file: str = None, no_wrapper: bool = False, error_shell: bool = False):
def cli(verbose: bool = False, config_file: str = None, wrapper_override: Optional[bool] = None, error_shell: bool = False):
setup_logging(verbose)
config.runtime['verbose'] = verbose
config.runtime['no_wrap'] = no_wrapper
config.runtime['no_wrap'] = wrapper_override is False
config.runtime['error_shell'] = error_shell
config.try_load_file(config_file)
if wrapper_override:
enforce_wrap()
def main():

View file

@ -42,6 +42,7 @@ def get_makepkg_env(arch: Optional[Arch] = None):
'LANG': 'C',
'CARGO_BUILD_JOBS': str(threads),
'MAKEFLAGS': f"-j{threads}",
'HOME': '/root',
}
native = config.runtime.arch
assert native
@ -423,6 +424,15 @@ def setup_build_chroot(
return chroot
def setup_git_insecure_paths(chroot: BuildChroot):
chroot.run_cmd(
["git", "config", "--global", "--add", "safe.directory", "'*'"],
inner_env={
'HOME': '/root'
},
).check_returncode() # type: ignore[union-attr]
def setup_sources(package: Pkgbuild, chroot: BuildChroot, makepkg_conf_path='/etc/makepkg.conf'):
makepkg_setup_args = [
'--config',
@ -434,7 +444,12 @@ def setup_sources(package: Pkgbuild, chroot: BuildChroot, makepkg_conf_path='/et
]
logging.info(f'Setting up sources for {package.path} in {chroot.name}')
result = chroot.run_cmd(MAKEPKG_CMD + makepkg_setup_args, cwd=os.path.join(CHROOT_PATHS['pkgbuilds'], package.path))
setup_git_insecure_paths(chroot)
result = chroot.run_cmd(
MAKEPKG_CMD + makepkg_setup_args,
cwd=os.path.join(CHROOT_PATHS['pkgbuilds'], package.path),
inner_env=get_makepkg_env(chroot.arch),
)
assert isinstance(result, subprocess.CompletedProcess)
if result.returncode != 0:
raise Exception(f'Failed to check sources for {package.path}')
@ -506,6 +521,7 @@ def build_package(
if failed_deps:
raise Exception(f'Dependencies failed to install: {failed_deps}')
setup_git_insecure_paths(build_root)
makepkg_conf_absolute = os.path.join('/', makepkg_conf_path)
setup_sources(package, build_root, makepkg_conf_path=makepkg_conf_absolute)

View file

@ -6,7 +6,7 @@ from typing import Optional
from config import config
from constants import Arch, ARCHES
from config.scheme import DataClass, munchclass
from .pkgbuild import discover_pkgbuilds, _pkgbuilds_cache, Pkgbuild, parse_pkgbuild
from .pkgbuild import discover_pkgbuilds, get_pkgbuild_by_path, _pkgbuilds_cache, Pkgbuild
DEVICE_DEPRECATIONS = {
"oneplus-enchilada": "sdm845-oneplus-enchilada",
@ -105,8 +105,7 @@ def get_device(name: str, pkgbuilds: Optional[dict[str, Pkgbuild]] = None, lazy:
else:
relative_path = os.path.join('device', pkgname)
assert os.path.exists(os.path.join(config.get_path('pkgbuilds'), relative_path))
pkgbuild = [p for p in parse_pkgbuild(relative_path, _config=config) if p.name == pkgname][0]
_pkgbuilds_cache[pkgname] = pkgbuild
pkgbuild = [p for p in get_pkgbuild_by_path(relative_path, lazy=lazy, _config=config) if p.name == pkgname][0]
device = parse_device_pkg(pkgbuild)
if lazy:
_device_cache[name] = device

View file

@ -6,11 +6,11 @@ import multiprocessing
import os
import subprocess
from constants import REPOSITORIES
from joblib import Parallel, delayed
from typing import Optional, Sequence
from typing import Optional
from config import config, ConfigStateHolder
from constants import REPOSITORIES
from exec.cmd import run_cmd
from constants import Arch, MAKEPKG_CMD
from distro.package import PackageInfo
@ -65,6 +65,7 @@ class Pkgbuild(PackageInfo):
path: str
pkgver: str
pkgrel: str
sources_refreshed: bool
def __init__(
self,
@ -74,6 +75,7 @@ class Pkgbuild(PackageInfo):
provides: list[str] = [],
replaces: list[str] = [],
repo: Optional[str] = None,
sources_refreshed: bool = False,
) -> None:
"""
Create new Pkgbuild representation for file located at `{relative_path}/PKGBUILD`.
@ -91,9 +93,15 @@ class Pkgbuild(PackageInfo):
self.path = relative_path
self.pkgver = ''
self.pkgrel = ''
self.sources_refreshed = sources_refreshed
def __repr__(self):
return f'Pkgbuild({self.name},{repr(self.path)},{self.version},{self.mode})'
return ','.join([
'Pkgbuild(' + self.name,
repr(self.path),
self.version + ("🔄" if self.sources_refreshed else ""),
self.mode + ')',
])
def names(self):
return list(set([self.name] + self.provides + self.replaces))
@ -102,14 +110,62 @@ class Pkgbuild(PackageInfo):
"""updates `self.version` from `self.pkgver` and `self.pkgrel`"""
self.version = f'{self.pkgver}-{self.pkgrel}'
def update(self, pkg: Pkgbuild):
self.version = pkg.version
self.arches = list(pkg.arches)
self.depends = list(pkg.depends)
self.provides = list(pkg.provides)
self.replaces = list(pkg.replaces)
self.local_depends = list(pkg.local_depends)
self.repo = pkg.repo
self.mode = pkg.mode
self.path = pkg.path
self.pkgver = pkg.pkgver
self.pkgrel = pkg.pkgrel
self.sources_refreshed = self.sources_refreshed or pkg.sources_refreshed
self.update_version()
def refresh_sources(self):
raise NotImplementedError()
class Pkgbase(Pkgbuild):
subpackages: Sequence[SubPkgbuild]
subpackages: list[SubPkgbuild]
def __init__(self, relative_path: str, subpackages: Sequence[SubPkgbuild] = [], **args):
def __init__(self, relative_path: str, subpackages: list[SubPkgbuild] = [], **args):
self.subpackages = list(subpackages)
super().__init__(relative_path, **args)
def update(self, pkg: Pkgbuild):
if not isinstance(pkg, Pkgbase):
raise Exception(f"Tried to update pkgbase {self.name} with non-base pkg {pkg}")
Pkgbuild.update(self, pkg)
sub_dict = {p.name: p for p in self.subpackages}
self.subpackages.clear()
for new_pkg in pkg.subpackages:
name = new_pkg.name
if name not in sub_dict:
sub_dict[name] = new_pkg
else:
sub_dict[name].update(new_pkg)
updated = sub_dict[name]
updated.sources_refreshed = self.sources_refreshed
self.subpackages.append(updated)
def refresh_sources(self, lazy: bool = True):
'''
Reloads the pkgbuild from disk.
Does **NOT** actually perform the makepkg action to refresh the pkgver() first!
'''
if lazy and self.sources_refreshed:
return
parsed = parse_pkgbuild(self.path, sources_refreshed=True)
basepkgs = [p for p in parsed if isinstance(p, Pkgbase)]
if not len(basepkgs) == 1:
raise Exception(f"error refreshing {self.name}: wrong number of base packages found: {basepkgs}")
self.sources_refreshed = True
self.update(basepkgs[0])
class SubPkgbuild(Pkgbuild):
pkgbase: Pkgbase
@ -119,21 +175,18 @@ class SubPkgbuild(Pkgbuild):
self.name = name
self.pkgbase = pkgbase
self.version = pkgbase.version
self.arches = pkgbase.arches
self.depends = list(pkgbase.depends)
self.sources_refreshed = False
self.update(pkgbase)
self.provides = []
self.replaces = []
self.local_depends = list(pkgbase.local_depends)
self.repo = pkgbase.repo
self.mode = pkgbase.mode
self.path = pkgbase.path
self.pkgver = pkgbase.pkgver
self.pkgrel = pkgbase.pkgrel
self.update_version()
def refresh_sources(self, lazy: bool = True):
assert self.pkgbase
self.pkgbase.refresh_sources(lazy=lazy)
def parse_pkgbuild(relative_pkg_dir: str, _config: Optional[ConfigStateHolder] = None) -> Sequence[Pkgbuild]:
def parse_pkgbuild(relative_pkg_dir: str, _config: Optional[ConfigStateHolder] = None, sources_refreshed: bool = False) -> list[Pkgbuild]:
"""
Since function may run in a different subprocess, we need to be passed the config via parameter
"""
@ -156,7 +209,7 @@ def parse_pkgbuild(relative_pkg_dir: str, _config: Optional[ConfigStateHolder] =
raise Exception((f'{relative_pkg_dir}/PKGBUILD has {"no" if mode is None else "an invalid"} mode configured') +
(f': "{mode}"' if mode is not None else ''))
base_package = Pkgbase(relative_pkg_dir)
base_package = Pkgbase(relative_pkg_dir, sources_refreshed=sources_refreshed)
base_package.mode = mode
base_package.repo = relative_pkg_dir.split('/')[0]
srcinfo = run_cmd(
@ -197,7 +250,7 @@ def parse_pkgbuild(relative_pkg_dir: str, _config: Optional[ConfigStateHolder] =
elif line.startswith('depends') or line.startswith('makedepends') or line.startswith('checkdepends') or line.startswith('optdepends'):
current.depends.append(splits[1].split('=')[0].split(': ')[0])
results: Sequence[Pkgbuild] = list(base_package.subpackages)
results: list[Pkgbuild] = list(base_package.subpackages)
if len(results) > 1:
logging.debug(f" Split package detected: {base_package.name}: {results}")
base_package.update_version()
@ -214,9 +267,21 @@ def parse_pkgbuild(relative_pkg_dir: str, _config: Optional[ConfigStateHolder] =
_pkgbuilds_cache = dict[str, Pkgbuild]()
_pkgbuilds_paths = dict[str, list[Pkgbuild]]()
_pkgbuilds_scanned: bool = False
def get_pkgbuild_by_path(relative_path: str, lazy: bool = True, _config: Optional[ConfigStateHolder] = None) -> list[Pkgbuild]:
global _pkgbuilds_cache, _pkgbuilds_paths
if lazy and relative_path in _pkgbuilds_paths:
return _pkgbuilds_paths[relative_path]
parsed = parse_pkgbuild(relative_path, _config=_config)
_pkgbuilds_paths[relative_path] = parsed
for pkg in parsed:
_pkgbuilds_cache[pkg.name] = pkg
return parsed
def discover_pkgbuilds(parallel: bool = True, lazy: bool = True) -> dict[str, Pkgbuild]:
global _pkgbuilds_cache, _pkgbuilds_scanned
if lazy and _pkgbuilds_scanned:
@ -230,17 +295,29 @@ def discover_pkgbuilds(parallel: bool = True, lazy: bool = True) -> dict[str, Pk
for dir in os.listdir(os.path.join(pkgbuilds_dir, repo)):
paths.append(os.path.join(repo, dir))
results = []
logging.info("Parsing PKGBUILDs")
logging.debug(f"About to parse pkgbuilds. verbosity: {config.runtime['verbose']}")
results = []
if parallel:
chunks = (Parallel(n_jobs=multiprocessing.cpu_count() * 4)(delayed(parse_pkgbuild)(path, config) for path in paths))
paths_filtered = paths
if lazy:
# filter out cached packages as the caches don't cross process boundaries
paths_filtered = []
for p in paths:
if p in _pkgbuilds_paths:
# use cache
results += _pkgbuilds_paths[p]
else:
paths_filtered += [p]
chunks = (Parallel(n_jobs=multiprocessing.cpu_count() * 4)(
delayed(get_pkgbuild_by_path)(path, lazy=lazy, _config=config) for path in paths_filtered))
else:
chunks = (parse_pkgbuild(path) for path in paths)
chunks = (get_pkgbuild_by_path(path, lazy=lazy) for path in paths)
_pkgbuilds_paths.clear()
# one list of packages per path
for pkglist in chunks:
_pkgbuilds_paths[pkglist[0].path] = pkglist
results += pkglist
logging.debug('Building package dictionary!')
@ -255,11 +332,11 @@ def discover_pkgbuilds(parallel: bool = True, lazy: bool = True) -> dict[str, Pk
package.local_depends = package.depends.copy()
for dep in package.depends.copy():
found = dep in packages
for p in packages.values():
for pkg in packages.values():
if found:
break
if dep in p.names():
logging.debug(f'Found {p.name} that provides {dep}')
if dep in pkg.names():
logging.debug(f'Found {pkg.name} that provides {dep}')
found = True
break
if not found:

View file

@ -42,7 +42,7 @@ ONEPLUS_ENCHILADA_PKG = f'device-{ONEPLUS_ENCHILADA}'
def enchilada_pkgbuild(initialise_pkgbuilds_dir: ConfigStateHolder):
config = initialise_pkgbuilds_dir
config.try_load_file()
return parse_pkgbuild(os.path.join('device', ONEPLUS_ENCHILADA_PKG), config)[0]
return parse_pkgbuild(os.path.join('device', ONEPLUS_ENCHILADA_PKG), _config=config)[0]
def validate_oneplus_enchilada(d: Device):

View file

@ -1,2 +1,2 @@
#!/bin/bash
git ls-files \*.py | sort -u | xargs mypy --pretty --install-types --ignore-missing-imports "$@"
git ls-files \*.py | sort -u | xargs mypy --pretty --show-error-codes --install-types --ignore-missing-imports "$@"

View file

@ -51,10 +51,10 @@ def wrap_if_foreign_arch(arch: Arch):
nowrapper_option = click.option(
'-W',
'--no-wrapper',
'no_wrapper',
'-w/-W',
'--force-wrapper/--no-wrapper',
'wrapper_override',
is_flag=True,
default=False,
help='Disable the docker wrapper. Defaults to autodetection.',
default=None,
help='Force or disable the docker wrapper. Defaults to autodetection.',
)

View file

@ -6,6 +6,8 @@ import sys
from config import config
from constants import CHROOT_PATHS
from exec.file import makedir
from .wrapper import BaseWrapper
DOCKER_PATHS = CHROOT_PATHS.copy()
@ -25,66 +27,70 @@ class DockerWrapper(BaseWrapper):
script_path = config.runtime['script_source_dir']
with open(os.path.join(script_path, 'version.txt')) as version_file:
version = version_file.read().replace('\n', '')
tag = f'registry.gitlab.com/kupfer/kupferbootstrap:{version}'
if version == 'dev':
logging.info(f'Building docker image "{tag}"')
cmd = [
'docker',
'build',
'.',
'-t',
tag,
] + (['-q'] if not config.runtime['verbose'] else [])
logging.debug('Running docker cmd: ' + ' '.join(cmd))
result = subprocess.run(cmd, cwd=script_path, capture_output=True)
if result.returncode != 0:
logging.fatal('Failed to build docker image:\n' + result.stderr.decode())
exit(1)
else:
# Check if the image for the version already exists
result = subprocess.run(
[
'docker',
'images',
'-q',
tag,
],
capture_output=True,
)
if result.stdout == b'':
logging.info(f'Pulling kupferbootstrap docker image version \'{version}\'')
subprocess.run([
'docker',
'pull',
tag,
])
container_name = f'kupferbootstrap-{self.uuid}'
wrapped_config = self.generate_wrapper_config()
ssh_dir = os.path.join(pathlib.Path.home(), '.ssh')
if not os.path.exists(ssh_dir):
os.makedirs(ssh_dir, mode=0o700)
volumes = self.get_bind_mounts_default(wrapped_config)
volumes |= dict({config.get_path(vol_name): vol_dest for vol_name, vol_dest in DOCKER_PATHS.items()})
docker_cmd = [
tag = f'registry.gitlab.com/kupfer/kupferbootstrap:{version}'
if version == 'dev':
logging.info(f'Building docker image "{tag}"')
cmd = [
'docker',
'run',
'--name',
container_name,
'--rm',
'--interactive',
'--tty',
'--privileged',
] + docker_volumes_args(volumes) + [tag]
'build',
'.',
'-t',
tag,
] + (['-q'] if not config.runtime['verbose'] else [])
logging.debug('Running docker cmd: ' + ' '.join(cmd))
result = subprocess.run(cmd, cwd=script_path, capture_output=True)
if result.returncode != 0:
logging.fatal('Failed to build docker image:\n' + result.stderr.decode())
exit(1)
else:
# Check if the image for the version already exists
result = subprocess.run(
[
'docker',
'images',
'-q',
tag,
],
capture_output=True,
)
if result.stdout == b'':
logging.info(f'Pulling kupferbootstrap docker image version \'{version}\'')
subprocess.run([
'docker',
'pull',
tag,
])
container_name = f'kupferbootstrap-{self.uuid}'
kupfer_cmd = ['kupferbootstrap', '--config', '/root/.config/kupfer/kupferbootstrap.toml'] + self.filter_args_wrapper(sys.argv[1:])
wrapped_config = self.generate_wrapper_config()
cmd = docker_cmd + kupfer_cmd
logging.debug('Wrapping in docker:' + repr(cmd))
result = subprocess.run(cmd)
ssh_dir = os.path.join(pathlib.Path.home(), '.ssh')
if not os.path.exists(ssh_dir):
os.makedirs(ssh_dir, mode=0o700)
exit(result.returncode)
volumes = self.get_bind_mounts_default(wrapped_config)
for vol_name, vol_dest in DOCKER_PATHS.items():
vol_src = config.get_path(vol_name)
makedir(vol_src)
volumes[vol_src] = vol_dest
docker_cmd = [
'docker',
'run',
'--name',
container_name,
'--rm',
'--interactive',
'--tty',
'--privileged',
] + docker_volumes_args(volumes) + [tag]
kupfer_cmd = ['kupferbootstrap', '--config', '/root/.config/kupfer/kupferbootstrap.toml'] + self.filter_args_wrapper(sys.argv[1:])
cmd = docker_cmd + kupfer_cmd
logging.debug('Wrapping in docker:' + repr(cmd))
result = subprocess.run(cmd)
exit(result.returncode)
def stop(self):
subprocess.run(