mirror of
https://gitlab.com/kupfer/kupferbootstrap.git
synced 2025-02-22 21:25:43 -05:00
distro: refactor into multiple files, packages: factor out distro.Pkgbuild class
This commit is contained in:
parent
7359b447e7
commit
0c3592065c
7 changed files with 196 additions and 145 deletions
|
@ -5,14 +5,14 @@ import os
|
|||
import atexit
|
||||
from glob import glob
|
||||
from shutil import rmtree
|
||||
from shlex import quote as shell_quote
|
||||
|
||||
from config import config
|
||||
from distro import get_base_distro, get_kupfer_local, RepoInfo
|
||||
from shlex import quote as shell_quote
|
||||
from utils import mount, umount, check_findmnt, log_or_exception
|
||||
from distro.distro import get_base_distro, get_kupfer_local, RepoInfo
|
||||
from wrapper import enforce_wrap
|
||||
from constants import Arch, GCC_HOSTSPECS, CROSSDIRECT_PKGS, BASE_PACKAGES, CHROOT_PATHS
|
||||
from generator import generate_makepkg_conf
|
||||
from utils import mount, umount, check_findmnt, log_or_exception
|
||||
|
||||
BIND_BUILD_DIRS = 'BINDBUILDDIRS'
|
||||
BASE_CHROOT_PREFIX = 'base_'
|
||||
|
|
|
@ -156,3 +156,10 @@ WRAPPER_TYPES = [
|
|||
'none',
|
||||
'docker',
|
||||
]
|
||||
|
||||
MAKEPKG_CMD = [
|
||||
'makepkg',
|
||||
'--noconfirm',
|
||||
'--ignorearch',
|
||||
'--needed',
|
||||
]
|
||||
|
|
|
@ -1,64 +1,2 @@
|
|||
from constants import ARCHES, BASE_DISTROS, REPOSITORIES, KUPFER_HTTPS, CHROOT_PATHS
|
||||
from generator import generate_pacman_conf_body
|
||||
from config import config
|
||||
|
||||
from .package import PackageInfo
|
||||
from .repo import RepoInfo, Repo
|
||||
|
||||
|
||||
class Distro:
|
||||
repos: dict[str, Repo]
|
||||
arch: str
|
||||
|
||||
def __init__(self, arch: str, repo_infos: dict[str, RepoInfo], scan=False):
|
||||
assert (arch in ARCHES)
|
||||
self.arch = arch
|
||||
self.repos = dict[str, Repo]()
|
||||
for repo_name, repo_info in repo_infos.items():
|
||||
self.repos[repo_name] = Repo(
|
||||
name=repo_name,
|
||||
arch=arch,
|
||||
url_template=repo_info.url_template,
|
||||
options=repo_info.options,
|
||||
scan=scan,
|
||||
)
|
||||
|
||||
def get_packages(self):
|
||||
""" get packages from all repos, semantically overlaying them"""
|
||||
results = dict[str, PackageInfo]()
|
||||
for repo in self.repos.values().reverse():
|
||||
assert (repo.packages is not None)
|
||||
for package in repo.packages:
|
||||
results[package.name] = package
|
||||
|
||||
def repos_config_snippet(self, extra_repos: dict[str, RepoInfo] = {}) -> str:
|
||||
extras = [Repo(name, url_template=info.url_template, arch=self.arch, options=info.options, scan=False) for name, info in extra_repos.items()]
|
||||
return '\n\n'.join(repo.config_snippet() for repo in (list(self.repos.values()) + extras))
|
||||
|
||||
def get_pacman_conf(self, extra_repos: dict[str, RepoInfo] = {}, check_space: bool = True):
|
||||
body = generate_pacman_conf_body(self.arch, check_space=check_space)
|
||||
return body + self.repos_config_snippet(extra_repos)
|
||||
|
||||
|
||||
def get_base_distro(arch: str) -> Distro:
|
||||
repos = {name: RepoInfo(url_template=url) for name, url in BASE_DISTROS[arch]['repos'].items()}
|
||||
return Distro(arch=arch, repo_infos=repos, scan=False)
|
||||
|
||||
|
||||
def get_kupfer(arch: str, url_template: str) -> Distro:
|
||||
repos = {name: RepoInfo(url_template=url_template, options={'SigLevel': 'Never'}) for name in REPOSITORIES}
|
||||
return Distro(
|
||||
arch=arch,
|
||||
repo_infos=repos,
|
||||
)
|
||||
|
||||
|
||||
def get_kupfer_https(arch: str) -> Distro:
|
||||
return get_kupfer(arch, KUPFER_HTTPS)
|
||||
|
||||
|
||||
def get_kupfer_local(arch: str = None, in_chroot: bool = True) -> Distro:
|
||||
if not arch:
|
||||
arch = config.runtime['arch']
|
||||
dir = CHROOT_PATHS['packages'] if in_chroot else config.get_path('packages')
|
||||
return get_kupfer(arch, f"file://{dir}/$arch/$repo")
|
||||
|
|
64
distro/distro.py
Normal file
64
distro/distro.py
Normal file
|
@ -0,0 +1,64 @@
|
|||
from constants import ARCHES, BASE_DISTROS, REPOSITORIES, KUPFER_HTTPS, CHROOT_PATHS
|
||||
from generator import generate_pacman_conf_body
|
||||
from config import config
|
||||
|
||||
from .package import PackageInfo
|
||||
from .repo import RepoInfo, Repo
|
||||
|
||||
|
||||
class Distro:
|
||||
repos: dict[str, Repo]
|
||||
arch: str
|
||||
|
||||
def __init__(self, arch: str, repo_infos: dict[str, RepoInfo], scan=False):
|
||||
assert (arch in ARCHES)
|
||||
self.arch = arch
|
||||
self.repos = dict[str, Repo]()
|
||||
for repo_name, repo_info in repo_infos.items():
|
||||
self.repos[repo_name] = Repo(
|
||||
name=repo_name,
|
||||
arch=arch,
|
||||
url_template=repo_info.url_template,
|
||||
options=repo_info.options,
|
||||
scan=scan,
|
||||
)
|
||||
|
||||
def get_packages(self):
|
||||
""" get packages from all repos, semantically overlaying them"""
|
||||
results = dict[str, PackageInfo]()
|
||||
for repo in self.repos.values().reverse():
|
||||
assert (repo.packages is not None)
|
||||
for package in repo.packages:
|
||||
results[package.name] = package
|
||||
|
||||
def repos_config_snippet(self, extra_repos: dict[str, RepoInfo] = {}) -> str:
|
||||
extras = [Repo(name, url_template=info.url_template, arch=self.arch, options=info.options, scan=False) for name, info in extra_repos.items()]
|
||||
return '\n\n'.join(repo.config_snippet() for repo in (list(self.repos.values()) + extras))
|
||||
|
||||
def get_pacman_conf(self, extra_repos: dict[str, RepoInfo] = {}, check_space: bool = True):
|
||||
body = generate_pacman_conf_body(self.arch, check_space=check_space)
|
||||
return body + self.repos_config_snippet(extra_repos)
|
||||
|
||||
|
||||
def get_base_distro(arch: str) -> Distro:
|
||||
repos = {name: RepoInfo(url_template=url) for name, url in BASE_DISTROS[arch]['repos'].items()}
|
||||
return Distro(arch=arch, repo_infos=repos, scan=False)
|
||||
|
||||
|
||||
def get_kupfer(arch: str, url_template: str) -> Distro:
|
||||
repos = {name: RepoInfo(url_template=url_template, options={'SigLevel': 'Never'}) for name in REPOSITORIES}
|
||||
return Distro(
|
||||
arch=arch,
|
||||
repo_infos=repos,
|
||||
)
|
||||
|
||||
|
||||
def get_kupfer_https(arch: str) -> Distro:
|
||||
return get_kupfer(arch, KUPFER_HTTPS)
|
||||
|
||||
|
||||
def get_kupfer_local(arch: str = None, in_chroot: bool = True) -> Distro:
|
||||
if not arch:
|
||||
arch = config.runtime['arch']
|
||||
dir = CHROOT_PATHS['packages'] if in_chroot else config.get_path('packages')
|
||||
return get_kupfer(arch, f"file://{dir}/$arch/$repo")
|
98
distro/pkgbuild.py
Normal file
98
distro/pkgbuild.py
Normal file
|
@ -0,0 +1,98 @@
|
|||
from copy import deepcopy
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
from typing import Generator
|
||||
|
||||
from chroot import Chroot
|
||||
from constants import CHROOT_PATHS, MAKEPKG_CMD
|
||||
|
||||
from .package import PackageInfo
|
||||
|
||||
|
||||
class Pkgbuild(PackageInfo):
|
||||
depends: list[str] = None
|
||||
provides: list[str] = None
|
||||
local_depends: list[PackageInfo] = None
|
||||
subpackages: list[PackageInfo] = None
|
||||
repo = ''
|
||||
mode = ''
|
||||
path = ''
|
||||
pkgver = ''
|
||||
pkgrel = ''
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
relative_path: str,
|
||||
depends: list[str] = [],
|
||||
provides: list[str] = [],
|
||||
subpackages: list[PackageInfo] = [],
|
||||
) -> None:
|
||||
self.version = None
|
||||
self.path = relative_path
|
||||
self.depends = deepcopy(depends)
|
||||
self.provides = deepcopy(provides)
|
||||
self.subpackages = deepcopy(subpackages)
|
||||
|
||||
def __repr__(self):
|
||||
return f'Package({self.name},{repr(self.path)},{self.version},{self.mode})'
|
||||
|
||||
def names(self):
|
||||
return [self.name] + self.provides
|
||||
|
||||
|
||||
def parse_pkgbuild(relative_pkg_dir: str, native_chroot: Chroot) -> list[Pkgbuild]:
|
||||
mode = None
|
||||
with open(os.path.join(native_chroot.get_path(CHROOT_PATHS['pkgbuilds']), relative_pkg_dir, 'PKGBUILD'), 'r') as file:
|
||||
for line in file.read().split('\n'):
|
||||
if line.startswith('_mode='):
|
||||
mode = line.split('=')[1]
|
||||
break
|
||||
if mode not in ['host', 'cross']:
|
||||
raise Exception((f'{relative_pkg_dir}/PKGBUILD has {"no" if mode is None else "an invalid"} mode configured') +
|
||||
(f': "{mode}"' if mode is not None else ''))
|
||||
|
||||
base_package = Pkgbuild(relative_pkg_dir)
|
||||
base_package.mode = mode
|
||||
base_package.repo = relative_pkg_dir.split('/')[0]
|
||||
srcinfo = native_chroot.run_cmd(
|
||||
MAKEPKG_CMD + ['--printsrcinfo'],
|
||||
cwd=os.path.join(CHROOT_PATHS['pkgbuilds'], base_package.path),
|
||||
stdout=subprocess.PIPE,
|
||||
)
|
||||
lines = srcinfo.stdout.decode('utf-8').split('\n')
|
||||
|
||||
current = base_package
|
||||
multi_pkgs = False
|
||||
for line_raw in lines:
|
||||
line = line_raw.strip()
|
||||
if not line:
|
||||
continue
|
||||
splits = line.split(' = ')
|
||||
if line.startswith('pkgbase'):
|
||||
base_package.name = splits[1]
|
||||
multi_pkgs = True
|
||||
elif line.startswith('pkgname'):
|
||||
if multi_pkgs:
|
||||
if current is not base_package:
|
||||
base_package.subpackages.append(current)
|
||||
current = deepcopy(base_package)
|
||||
current.name = splits[1]
|
||||
elif line.startswith('pkgver'):
|
||||
current.pkgver = splits[1]
|
||||
elif line.startswith('pkgrel'):
|
||||
current.pkgrel = splits[1]
|
||||
elif line.startswith('provides'):
|
||||
current.provides.append(splits[1])
|
||||
elif line.startswith('depends') or line.startswith('makedepends') or line.startswith('checkdepends') or line.startswith('optdepends'):
|
||||
current.depends.append(splits[1].split('=')[0].split(': ')[0])
|
||||
current.depends = list(set(current.depends))
|
||||
|
||||
results = base_package.subpackages or [base_package]
|
||||
for pkg in results:
|
||||
pkg.mode = mode
|
||||
pkg.version = f'{pkg.pkgver}-{pkg.pkgrel}'
|
||||
if not (pkg.pkgver == base_package.pkgver and pkg.pkgrel == base_package.pkgrel):
|
||||
raise Exception('subpackage malformed! pkgver differs!')
|
||||
|
||||
return results
|
2
image.py
2
image.py
|
@ -12,7 +12,7 @@ from subprocess import run, CompletedProcess
|
|||
from chroot import Chroot, get_device_chroot
|
||||
from constants import BASE_PACKAGES, DEVICES, FLAVOURS
|
||||
from config import config
|
||||
from distro import get_base_distro, get_kupfer_https, get_kupfer_local
|
||||
from distro.distro import get_base_distro, get_kupfer_https, get_kupfer_local
|
||||
from packages import build_enable_qemu_binfmt, discover_packages, build_packages
|
||||
from ssh import copy_ssh_keys
|
||||
from wrapper import enforce_wrap
|
||||
|
|
102
packages.py
102
packages.py
|
@ -9,20 +9,14 @@ from joblib import Parallel, delayed
|
|||
from glob import glob
|
||||
from shutil import rmtree
|
||||
|
||||
from constants import REPOSITORIES, CROSSDIRECT_PKGS, QEMU_BINFMT_PKGS, GCC_HOSTSPECS, ARCHES, Arch, CHROOT_PATHS
|
||||
from constants import REPOSITORIES, CROSSDIRECT_PKGS, QEMU_BINFMT_PKGS, GCC_HOSTSPECS, ARCHES, Arch, CHROOT_PATHS, MAKEPKG_CMD
|
||||
from config import config
|
||||
from chroot import get_build_chroot, Chroot
|
||||
from ssh import run_ssh_command, scp_put_files
|
||||
from wrapper import enforce_wrap
|
||||
from utils import git
|
||||
from binfmt import register as binfmt_register
|
||||
|
||||
makepkg_cmd = [
|
||||
'makepkg',
|
||||
'--noconfirm',
|
||||
'--ignorearch',
|
||||
'--needed',
|
||||
]
|
||||
from distro.pkgbuild import Pkgbuild as Package, parse_pkgbuild
|
||||
|
||||
pacman_cmd = [
|
||||
'pacman',
|
||||
|
@ -44,68 +38,6 @@ def get_makepkg_env():
|
|||
}
|
||||
|
||||
|
||||
class Package:
|
||||
name = ''
|
||||
names: list[str] = []
|
||||
depends: list[str] = []
|
||||
local_depends = None
|
||||
repo = ''
|
||||
mode = ''
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
path: str,
|
||||
native_chroot: Chroot,
|
||||
) -> None:
|
||||
self.path = path
|
||||
self._loadinfo(native_chroot)
|
||||
|
||||
def _loadinfo(self, native_chroot: Chroot):
|
||||
result = native_chroot.run_cmd(
|
||||
makepkg_cmd + ['--printsrcinfo'],
|
||||
cwd=os.path.join(CHROOT_PATHS['pkgbuilds'], self.path),
|
||||
stdout=subprocess.PIPE,
|
||||
)
|
||||
lines = result.stdout.decode('utf-8').split('\n')
|
||||
names = []
|
||||
depends = []
|
||||
multi_pkgs = False
|
||||
|
||||
for line_raw in lines:
|
||||
line = line_raw.lstrip()
|
||||
if line.startswith('pkgbase'):
|
||||
self.name = line.split(' = ')[1]
|
||||
names.append(self.name)
|
||||
multi_pkgs = True
|
||||
if line.startswith('pkgname'):
|
||||
names.append(line.split(' = ')[1])
|
||||
if not multi_pkgs:
|
||||
self.name = line.split(' = ')[1]
|
||||
if line.startswith('pkgbase') or line.startswith('provides'):
|
||||
names.append(line.split(' = ')[1])
|
||||
if line.startswith('depends') or line.startswith('makedepends') or line.startswith('checkdepends') or line.startswith('optdepends'):
|
||||
depends.append(line.split(' = ')[1].split('=')[0].split(': ')[0])
|
||||
self.names = list(set(names))
|
||||
self.depends = list(set(depends))
|
||||
|
||||
self.repo = self.path.split('/')[0]
|
||||
|
||||
mode = ''
|
||||
logging.debug(config)
|
||||
with open(os.path.join(native_chroot.get_path(CHROOT_PATHS['pkgbuilds']), self.path, 'PKGBUILD'), 'r') as file:
|
||||
for line in file.read().split('\n'):
|
||||
if line.startswith('_mode='):
|
||||
mode = line.split('=')[1]
|
||||
break
|
||||
if mode not in ['host', 'cross']:
|
||||
logging.fatal(f'Package {self.path} has an invalid mode configured: \'{mode}\'')
|
||||
exit(1)
|
||||
self.mode = mode
|
||||
|
||||
def __repr__(self):
|
||||
return f'Package({self.name},{repr(self.names)},{repr(self.path)})'
|
||||
|
||||
|
||||
def clone_pkbuilds(pkgbuilds_dir: str, repo_url: str, branch: str, interactive=False, update=True):
|
||||
git_dir = os.path.join(pkgbuilds_dir, '.git')
|
||||
if not os.path.exists(git_dir):
|
||||
|
@ -162,7 +94,7 @@ def init_prebuilts(arch: Arch, dir: str = None):
|
|||
exit(1)
|
||||
|
||||
|
||||
def discover_packages() -> dict[str, Package]:
|
||||
def discover_packages(parallel: bool = True) -> dict[str, Package]:
|
||||
pkgbuilds_dir = config.get_path('pkgbuilds')
|
||||
packages = {}
|
||||
paths = []
|
||||
|
@ -172,8 +104,20 @@ def discover_packages() -> dict[str, Package]:
|
|||
paths.append(os.path.join(repo, dir))
|
||||
|
||||
native_chroot = setup_build_chroot(config.runtime['arch'], add_kupfer_repos=False)
|
||||
results = Parallel(n_jobs=multiprocessing.cpu_count() * 4)(delayed(Package)(path, native_chroot) for path in paths)
|
||||
results = []
|
||||
|
||||
if parallel:
|
||||
chunks = (Parallel(n_jobs=multiprocessing.cpu_count() * 4)(delayed(parse_pkgbuild)(path, native_chroot) for path in paths))
|
||||
else:
|
||||
chunks = (parse_pkgbuild(path, native_chroot) for path in paths)
|
||||
|
||||
for pkglist in chunks:
|
||||
results += pkglist
|
||||
|
||||
logging.debug('Building package dictionary!')
|
||||
for package in results:
|
||||
if package.name in packages:
|
||||
logging.warn(f'Overriding {packages[package.name]} with {package}')
|
||||
packages[package.name] = package
|
||||
|
||||
# This filters the deps to only include the ones that are provided in this repo
|
||||
|
@ -184,7 +128,7 @@ def discover_packages() -> dict[str, Package]:
|
|||
for p in packages.values():
|
||||
if found:
|
||||
break
|
||||
for name in p.names:
|
||||
for name in p.names():
|
||||
if dep == name:
|
||||
logging.debug(f'Found {p.name} that provides {dep}')
|
||||
found = True
|
||||
|
@ -221,7 +165,7 @@ def generate_dependency_chain(package_repo: dict[str, Package], to_build: list[P
|
|||
|
||||
def visit(package: Package, visited=visited, visited_names=visited_names):
|
||||
visited.add(package)
|
||||
visited_names.update(package.names)
|
||||
visited_names.update(package.names())
|
||||
|
||||
def join_levels(levels: list[set[Package]]) -> dict[Package, int]:
|
||||
result = dict[Package, int]()
|
||||
|
@ -285,7 +229,7 @@ def generate_dependency_chain(package_repo: dict[str, Package], to_build: list[P
|
|||
if type(other_pkg) != Package:
|
||||
raise Exception('Not a Package object:' + repr(other_pkg))
|
||||
for dep_name in other_pkg.depends:
|
||||
if dep_name in pkg.names:
|
||||
if dep_name in pkg.names():
|
||||
dep_levels[level].remove(pkg)
|
||||
dep_levels[level + 1].add(pkg)
|
||||
logging.debug(f'Moving {pkg.name} to level {level+1} because {other_pkg.name} depends on it as {dep_name}')
|
||||
|
@ -380,7 +324,7 @@ def check_package_version_built(package: Package, arch: Arch) -> bool:
|
|||
cross=True,
|
||||
)
|
||||
|
||||
cmd = ['cd', os.path.join(CHROOT_PATHS['pkgbuilds'], package.path), '&&'] + makepkg_cmd + [
|
||||
cmd = ['cd', os.path.join(CHROOT_PATHS['pkgbuilds'], package.path), '&&'] + MAKEPKG_CMD + [
|
||||
'--config',
|
||||
config_path,
|
||||
'--nobuild',
|
||||
|
@ -440,7 +384,7 @@ def setup_sources(package: Package, chroot: Chroot, makepkg_conf_path='/etc/make
|
|||
]
|
||||
|
||||
logging.info(f'Setting up sources for {package.path} in {chroot.name}')
|
||||
result = chroot.run_cmd(makepkg_cmd + makepkg_setup_args, cwd=os.path.join(CHROOT_PATHS['pkgbuilds'], package.path))
|
||||
result = chroot.run_cmd(MAKEPKG_CMD + makepkg_setup_args, cwd=os.path.join(CHROOT_PATHS['pkgbuilds'], package.path))
|
||||
if result.returncode != 0:
|
||||
raise Exception(f'Failed to check sources for {package.path}')
|
||||
|
||||
|
@ -458,7 +402,7 @@ def build_package(
|
|||
makepkg_conf_path = 'etc/makepkg.conf'
|
||||
repo_dir = repo_dir if repo_dir else config.get_path('pkgbuilds')
|
||||
foreign_arch = config.runtime['arch'] != arch
|
||||
deps = (list(set(package.depends) - set(package.names)))
|
||||
deps = (list(set(package.depends) - set(package.names())))
|
||||
target_chroot = setup_build_chroot(
|
||||
arch=arch,
|
||||
extra_packages=deps,
|
||||
|
@ -531,7 +475,7 @@ def get_unbuilt_package_levels(repo: dict[str, Package], packages: list[Package]
|
|||
if ((not check_package_version_built(package, arch)) or set.intersection(set(package.depends), set(build_names)) or
|
||||
(force and package in packages)):
|
||||
level.add(package)
|
||||
build_names.update(package.names)
|
||||
build_names.update(package.names())
|
||||
if level:
|
||||
build_levels.append(level)
|
||||
logging.debug(f'Adding to level {i}:' + '\n' + ('\n'.join([p.name for p in level])))
|
||||
|
|
Loading…
Add table
Reference in a new issue