2022-08-14 04:14:47 +02:00
|
|
|
from __future__ import annotations
|
|
|
|
|
2022-08-19 17:11:56 +02:00
|
|
|
import click
|
|
|
|
import logging
|
|
|
|
import multiprocessing
|
2022-02-17 05:55:35 +01:00
|
|
|
import os
|
|
|
|
import subprocess
|
|
|
|
|
2022-08-19 17:11:56 +02:00
|
|
|
from joblib import Parallel, delayed
|
2022-08-30 00:20:04 +02:00
|
|
|
from typing import Iterable, Optional
|
2022-02-17 05:55:35 +01:00
|
|
|
|
2022-08-19 16:55:17 +02:00
|
|
|
from config import config, ConfigStateHolder
|
2022-08-26 03:07:34 +02:00
|
|
|
from constants import REPOSITORIES
|
2022-08-15 17:41:23 +02:00
|
|
|
from exec.cmd import run_cmd
|
2022-08-15 05:51:34 +02:00
|
|
|
from constants import Arch, MAKEPKG_CMD
|
2022-02-20 20:20:34 +01:00
|
|
|
from distro.package import PackageInfo
|
2022-08-19 17:11:56 +02:00
|
|
|
from logger import setup_logging
|
|
|
|
from utils import git
|
2022-08-20 03:48:15 +02:00
|
|
|
from wrapper import check_programs_wrap
|
2022-08-19 17:11:56 +02:00
|
|
|
|
|
|
|
|
|
|
|
def clone_pkbuilds(pkgbuilds_dir: str, repo_url: str, branch: str, interactive=False, update=True):
|
2022-08-20 03:48:15 +02:00
|
|
|
check_programs_wrap(['git'])
|
2022-08-19 17:11:56 +02:00
|
|
|
git_dir = os.path.join(pkgbuilds_dir, '.git')
|
|
|
|
if not os.path.exists(git_dir):
|
|
|
|
logging.info('Cloning branch {branch} from {repo}')
|
|
|
|
result = git(['clone', '-b', branch, repo_url, pkgbuilds_dir])
|
|
|
|
if result.returncode != 0:
|
|
|
|
raise Exception('Error cloning pkgbuilds')
|
|
|
|
else:
|
|
|
|
result = git(['--git-dir', git_dir, 'branch', '--show-current'], capture_output=True)
|
|
|
|
current_branch = result.stdout.decode().strip()
|
|
|
|
if current_branch != branch:
|
|
|
|
logging.warning(f'pkgbuilds repository is on the wrong branch: {current_branch}, requested: {branch}')
|
|
|
|
if interactive and click.confirm('Would you like to switch branches?', default=False):
|
|
|
|
result = git(['switch', branch], dir=pkgbuilds_dir)
|
|
|
|
if result.returncode != 0:
|
|
|
|
raise Exception('failed switching branches')
|
|
|
|
if update:
|
|
|
|
if interactive:
|
|
|
|
if not click.confirm('Would you like to try updating the PKGBUILDs repo?'):
|
|
|
|
return
|
|
|
|
result = git(['pull'], pkgbuilds_dir)
|
|
|
|
if result.returncode != 0:
|
|
|
|
raise Exception('failed to update pkgbuilds')
|
|
|
|
|
|
|
|
|
2022-09-08 01:45:34 +02:00
|
|
|
_pkgbuilds_initialised: bool = False
|
|
|
|
|
|
|
|
|
|
|
|
def init_pkgbuilds(interactive=False, lazy: bool = True):
|
|
|
|
global _pkgbuilds_initialised
|
|
|
|
if lazy and _pkgbuilds_initialised:
|
|
|
|
return
|
2022-08-19 17:11:56 +02:00
|
|
|
pkgbuilds_dir = config.get_path('pkgbuilds')
|
2022-08-27 16:48:50 +02:00
|
|
|
repo_url = config.file.pkgbuilds.git_repo
|
|
|
|
branch = config.file.pkgbuilds.git_branch
|
2022-08-19 17:11:56 +02:00
|
|
|
clone_pkbuilds(pkgbuilds_dir, repo_url, branch, interactive=interactive, update=False)
|
2022-09-08 01:45:34 +02:00
|
|
|
_pkgbuilds_initialised = True
|
2022-02-17 05:55:35 +01:00
|
|
|
|
|
|
|
|
|
|
|
class Pkgbuild(PackageInfo):
|
2022-08-14 04:14:47 +02:00
|
|
|
name: str
|
|
|
|
version: str
|
|
|
|
arches: list[Arch]
|
2022-02-17 19:34:58 +01:00
|
|
|
depends: list[str]
|
|
|
|
provides: list[str]
|
|
|
|
replaces: list[str]
|
|
|
|
local_depends: list[str]
|
2022-08-14 04:14:47 +02:00
|
|
|
repo: str
|
|
|
|
mode: str
|
|
|
|
path: str
|
|
|
|
pkgver: str
|
|
|
|
pkgrel: str
|
2022-09-08 01:55:46 +02:00
|
|
|
description: str
|
2022-08-26 03:28:17 +02:00
|
|
|
sources_refreshed: bool
|
2022-02-17 05:55:35 +01:00
|
|
|
|
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
relative_path: str,
|
2022-08-14 04:14:47 +02:00
|
|
|
arches: list[Arch] = [],
|
2022-02-17 05:55:35 +01:00
|
|
|
depends: list[str] = [],
|
|
|
|
provides: list[str] = [],
|
2022-02-17 06:08:31 +01:00
|
|
|
replaces: list[str] = [],
|
2022-08-14 04:14:47 +02:00
|
|
|
repo: Optional[str] = None,
|
2022-08-26 03:28:17 +02:00
|
|
|
sources_refreshed: bool = False,
|
2022-02-17 05:55:35 +01:00
|
|
|
) -> None:
|
2022-08-14 04:14:47 +02:00
|
|
|
"""
|
|
|
|
Create new Pkgbuild representation for file located at `{relative_path}/PKGBUILD`.
|
|
|
|
`relative_path` will be stored in `self.path`.
|
|
|
|
"""
|
|
|
|
self.name = os.path.basename(relative_path)
|
2022-02-17 19:34:58 +01:00
|
|
|
self.version = ''
|
2022-08-14 04:14:47 +02:00
|
|
|
self.arches = list(arches)
|
|
|
|
self.depends = list(depends)
|
|
|
|
self.provides = list(provides)
|
|
|
|
self.replaces = list(replaces)
|
|
|
|
self.local_depends = []
|
|
|
|
self.repo = repo or ''
|
|
|
|
self.mode = ''
|
2022-02-17 05:55:35 +01:00
|
|
|
self.path = relative_path
|
2022-08-14 04:14:47 +02:00
|
|
|
self.pkgver = ''
|
|
|
|
self.pkgrel = ''
|
2022-09-08 01:55:46 +02:00
|
|
|
self.description = ''
|
2022-08-26 03:28:17 +02:00
|
|
|
self.sources_refreshed = sources_refreshed
|
2022-02-17 05:55:35 +01:00
|
|
|
|
|
|
|
def __repr__(self):
|
2022-08-26 03:28:17 +02:00
|
|
|
return ','.join([
|
|
|
|
'Pkgbuild(' + self.name,
|
|
|
|
repr(self.path),
|
|
|
|
self.version + ("🔄" if self.sources_refreshed else ""),
|
|
|
|
self.mode + ')',
|
|
|
|
])
|
2022-02-17 05:55:35 +01:00
|
|
|
|
|
|
|
def names(self):
|
2022-02-17 06:42:15 +01:00
|
|
|
return list(set([self.name] + self.provides + self.replaces))
|
2022-02-17 05:55:35 +01:00
|
|
|
|
2022-08-14 04:14:47 +02:00
|
|
|
def update_version(self):
|
|
|
|
"""updates `self.version` from `self.pkgver` and `self.pkgrel`"""
|
|
|
|
self.version = f'{self.pkgver}-{self.pkgrel}'
|
|
|
|
|
2022-08-26 03:07:34 +02:00
|
|
|
def update(self, pkg: Pkgbuild):
|
|
|
|
self.version = pkg.version
|
|
|
|
self.arches = list(pkg.arches)
|
|
|
|
self.depends = list(pkg.depends)
|
|
|
|
self.provides = list(pkg.provides)
|
|
|
|
self.replaces = list(pkg.replaces)
|
|
|
|
self.local_depends = list(pkg.local_depends)
|
|
|
|
self.repo = pkg.repo
|
|
|
|
self.mode = pkg.mode
|
|
|
|
self.path = pkg.path
|
|
|
|
self.pkgver = pkg.pkgver
|
|
|
|
self.pkgrel = pkg.pkgrel
|
2022-09-08 01:55:46 +02:00
|
|
|
self.description = pkg.description
|
2022-08-26 03:28:17 +02:00
|
|
|
self.sources_refreshed = self.sources_refreshed or pkg.sources_refreshed
|
2022-08-26 03:07:34 +02:00
|
|
|
self.update_version()
|
2022-02-17 05:55:35 +01:00
|
|
|
|
2022-08-26 03:28:17 +02:00
|
|
|
def refresh_sources(self):
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
2022-08-28 04:06:36 +02:00
|
|
|
def get_filename(self, arch: Arch):
|
|
|
|
if not self.version:
|
|
|
|
self.update_version()
|
|
|
|
if self.arches[0] == 'any':
|
|
|
|
arch = 'any'
|
|
|
|
return f'{self.name}-{self.version}-{arch}.pkg.tar.zst'
|
|
|
|
|
2022-08-26 03:28:17 +02:00
|
|
|
|
2022-02-17 19:34:58 +01:00
|
|
|
class Pkgbase(Pkgbuild):
|
2022-08-26 03:28:17 +02:00
|
|
|
subpackages: list[SubPkgbuild]
|
2022-02-17 19:34:58 +01:00
|
|
|
|
2022-08-26 03:28:17 +02:00
|
|
|
def __init__(self, relative_path: str, subpackages: list[SubPkgbuild] = [], **args):
|
2022-08-14 04:14:47 +02:00
|
|
|
self.subpackages = list(subpackages)
|
2022-02-17 19:34:58 +01:00
|
|
|
super().__init__(relative_path, **args)
|
|
|
|
|
2022-08-26 03:07:34 +02:00
|
|
|
def update(self, pkg: Pkgbuild):
|
|
|
|
if not isinstance(pkg, Pkgbase):
|
|
|
|
raise Exception(f"Tried to update pkgbase {self.name} with non-base pkg {pkg}")
|
|
|
|
Pkgbuild.update(self, pkg)
|
|
|
|
sub_dict = {p.name: p for p in self.subpackages}
|
|
|
|
self.subpackages.clear()
|
|
|
|
for new_pkg in pkg.subpackages:
|
|
|
|
name = new_pkg.name
|
|
|
|
if name not in sub_dict:
|
|
|
|
sub_dict[name] = new_pkg
|
|
|
|
else:
|
|
|
|
sub_dict[name].update(new_pkg)
|
|
|
|
updated = sub_dict[name]
|
2022-08-26 03:28:17 +02:00
|
|
|
updated.sources_refreshed = self.sources_refreshed
|
2022-08-26 03:07:34 +02:00
|
|
|
self.subpackages.append(updated)
|
|
|
|
|
2022-08-26 03:28:17 +02:00
|
|
|
def refresh_sources(self, lazy: bool = True):
|
|
|
|
'''
|
|
|
|
Reloads the pkgbuild from disk.
|
|
|
|
Does **NOT** actually perform the makepkg action to refresh the pkgver() first!
|
|
|
|
'''
|
|
|
|
if lazy and self.sources_refreshed:
|
|
|
|
return
|
|
|
|
parsed = parse_pkgbuild(self.path, sources_refreshed=True)
|
|
|
|
basepkgs = [p for p in parsed if isinstance(p, Pkgbase)]
|
|
|
|
if not len(basepkgs) == 1:
|
|
|
|
raise Exception(f"error refreshing {self.name}: wrong number of base packages found: {basepkgs}")
|
|
|
|
self.sources_refreshed = True
|
|
|
|
self.update(basepkgs[0])
|
|
|
|
|
2022-02-17 19:34:58 +01:00
|
|
|
|
2022-08-14 04:14:47 +02:00
|
|
|
class SubPkgbuild(Pkgbuild):
|
|
|
|
pkgbase: Pkgbase
|
|
|
|
|
|
|
|
def __init__(self, name: str, pkgbase: Pkgbase):
|
|
|
|
|
|
|
|
self.name = name
|
|
|
|
self.pkgbase = pkgbase
|
|
|
|
|
2022-08-26 03:28:17 +02:00
|
|
|
self.sources_refreshed = False
|
2022-08-26 03:07:34 +02:00
|
|
|
self.update(pkgbase)
|
|
|
|
|
2022-08-14 04:14:47 +02:00
|
|
|
self.provides = []
|
|
|
|
self.replaces = []
|
|
|
|
|
2022-08-26 03:28:17 +02:00
|
|
|
def refresh_sources(self, lazy: bool = True):
|
|
|
|
assert self.pkgbase
|
|
|
|
self.pkgbase.refresh_sources(lazy=lazy)
|
|
|
|
|
2022-08-14 04:14:47 +02:00
|
|
|
|
2022-08-26 03:28:17 +02:00
|
|
|
def parse_pkgbuild(relative_pkg_dir: str, _config: Optional[ConfigStateHolder] = None, sources_refreshed: bool = False) -> list[Pkgbuild]:
|
2022-08-15 05:51:34 +02:00
|
|
|
"""
|
|
|
|
Since function may run in a different subprocess, we need to be passed the config via parameter
|
|
|
|
"""
|
2022-08-19 16:55:17 +02:00
|
|
|
global config
|
|
|
|
if _config:
|
|
|
|
config = _config
|
2022-08-27 16:48:50 +02:00
|
|
|
setup_logging(verbose=config.runtime.verbose, log_setup=False) # different thread needs log setup.
|
2022-08-15 07:06:03 +02:00
|
|
|
logging.info(f"Parsing PKGBUILD for {relative_pkg_dir}")
|
2022-08-15 05:51:34 +02:00
|
|
|
pkgbuilds_dir = config.get_path('pkgbuilds')
|
|
|
|
pkgdir = os.path.join(pkgbuilds_dir, relative_pkg_dir)
|
|
|
|
filename = os.path.join(pkgdir, 'PKGBUILD')
|
2022-08-14 04:14:47 +02:00
|
|
|
logging.debug(f"Parsing {filename}")
|
2022-02-17 05:55:35 +01:00
|
|
|
mode = None
|
2022-08-14 04:14:47 +02:00
|
|
|
with open(filename, 'r') as file:
|
2022-02-17 05:55:35 +01:00
|
|
|
for line in file.read().split('\n'):
|
|
|
|
if line.startswith('_mode='):
|
|
|
|
mode = line.split('=')[1]
|
|
|
|
break
|
|
|
|
if mode not in ['host', 'cross']:
|
|
|
|
raise Exception((f'{relative_pkg_dir}/PKGBUILD has {"no" if mode is None else "an invalid"} mode configured') +
|
|
|
|
(f': "{mode}"' if mode is not None else ''))
|
|
|
|
|
2022-08-26 03:28:17 +02:00
|
|
|
base_package = Pkgbase(relative_pkg_dir, sources_refreshed=sources_refreshed)
|
2022-02-17 05:55:35 +01:00
|
|
|
base_package.mode = mode
|
|
|
|
base_package.repo = relative_pkg_dir.split('/')[0]
|
2022-08-15 05:51:34 +02:00
|
|
|
srcinfo = run_cmd(
|
2022-02-17 05:55:35 +01:00
|
|
|
MAKEPKG_CMD + ['--printsrcinfo'],
|
2022-08-15 05:51:34 +02:00
|
|
|
cwd=pkgdir,
|
2022-02-17 05:55:35 +01:00
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
)
|
2022-02-18 06:32:04 +01:00
|
|
|
assert (isinstance(srcinfo, subprocess.CompletedProcess))
|
2022-02-17 05:55:35 +01:00
|
|
|
lines = srcinfo.stdout.decode('utf-8').split('\n')
|
|
|
|
|
2022-08-14 04:14:47 +02:00
|
|
|
current: Pkgbuild = base_package
|
2022-02-17 05:55:35 +01:00
|
|
|
multi_pkgs = False
|
|
|
|
for line_raw in lines:
|
|
|
|
line = line_raw.strip()
|
|
|
|
if not line:
|
|
|
|
continue
|
|
|
|
splits = line.split(' = ')
|
|
|
|
if line.startswith('pkgbase'):
|
|
|
|
base_package.name = splits[1]
|
|
|
|
elif line.startswith('pkgname'):
|
2022-09-05 17:31:04 +02:00
|
|
|
current = SubPkgbuild(splits[1], base_package)
|
|
|
|
assert isinstance(base_package.subpackages, list)
|
|
|
|
base_package.subpackages.append(current)
|
|
|
|
if current.name != base_package.name:
|
|
|
|
multi_pkgs = True
|
2022-02-17 05:55:35 +01:00
|
|
|
elif line.startswith('pkgver'):
|
|
|
|
current.pkgver = splits[1]
|
|
|
|
elif line.startswith('pkgrel'):
|
|
|
|
current.pkgrel = splits[1]
|
2022-09-08 01:55:46 +02:00
|
|
|
elif line.startswith('pkgdesc'):
|
|
|
|
current.description = splits[1]
|
2022-08-14 04:14:47 +02:00
|
|
|
elif line.startswith('arch'):
|
|
|
|
current.arches.append(splits[1])
|
2022-02-17 05:55:35 +01:00
|
|
|
elif line.startswith('provides'):
|
|
|
|
current.provides.append(splits[1])
|
2022-02-17 06:08:31 +01:00
|
|
|
elif line.startswith('replaces'):
|
|
|
|
current.replaces.append(splits[1])
|
2022-02-17 05:55:35 +01:00
|
|
|
elif line.startswith('depends') or line.startswith('makedepends') or line.startswith('checkdepends') or line.startswith('optdepends'):
|
|
|
|
current.depends.append(splits[1].split('=')[0].split(': ')[0])
|
|
|
|
|
2022-08-26 03:28:17 +02:00
|
|
|
results: list[Pkgbuild] = list(base_package.subpackages)
|
2022-09-05 17:31:04 +02:00
|
|
|
if multi_pkgs:
|
2022-08-14 04:14:47 +02:00
|
|
|
logging.debug(f" Split package detected: {base_package.name}: {results}")
|
2022-02-17 05:55:35 +01:00
|
|
|
|
2022-09-05 17:31:04 +02:00
|
|
|
base_package.update_version()
|
2022-08-14 04:14:47 +02:00
|
|
|
for pkg in results:
|
|
|
|
assert isinstance(pkg, Pkgbuild)
|
|
|
|
pkg.depends = list(set(pkg.depends)) # deduplicate dependencies
|
|
|
|
pkg.update_version()
|
|
|
|
if not (pkg.version == base_package.version):
|
|
|
|
raise Exception(f'Subpackage malformed! Versions differ! base: {base_package}, subpackage: {pkg}')
|
2022-02-17 05:55:35 +01:00
|
|
|
return results
|
2022-08-19 17:11:56 +02:00
|
|
|
|
|
|
|
|
|
|
|
_pkgbuilds_cache = dict[str, Pkgbuild]()
|
2022-08-26 03:07:34 +02:00
|
|
|
_pkgbuilds_paths = dict[str, list[Pkgbuild]]()
|
2022-08-19 17:11:56 +02:00
|
|
|
_pkgbuilds_scanned: bool = False
|
|
|
|
|
|
|
|
|
2022-08-26 03:28:17 +02:00
|
|
|
def get_pkgbuild_by_path(relative_path: str, lazy: bool = True, _config: Optional[ConfigStateHolder] = None) -> list[Pkgbuild]:
|
2022-08-26 03:07:34 +02:00
|
|
|
global _pkgbuilds_cache, _pkgbuilds_paths
|
|
|
|
if lazy and relative_path in _pkgbuilds_paths:
|
|
|
|
return _pkgbuilds_paths[relative_path]
|
|
|
|
parsed = parse_pkgbuild(relative_path, _config=_config)
|
|
|
|
_pkgbuilds_paths[relative_path] = parsed
|
|
|
|
for pkg in parsed:
|
|
|
|
_pkgbuilds_cache[pkg.name] = pkg
|
|
|
|
return parsed
|
|
|
|
|
|
|
|
|
2022-09-08 01:48:08 +02:00
|
|
|
def get_pkgbuild_by_name(name: str, lazy: bool = True):
|
|
|
|
if lazy and name in _pkgbuilds_cache:
|
|
|
|
return _pkgbuilds_cache[name]
|
|
|
|
if _pkgbuilds_scanned and lazy:
|
|
|
|
raise Exception(f"couldn't find PKGBUILD for package with name {name}")
|
|
|
|
discover_pkgbuilds(lazy=lazy)
|
|
|
|
assert _pkgbuilds_scanned
|
|
|
|
return get_pkgbuild_by_name(name=name, lazy=lazy)
|
|
|
|
|
|
|
|
|
2022-08-19 17:11:56 +02:00
|
|
|
def discover_pkgbuilds(parallel: bool = True, lazy: bool = True) -> dict[str, Pkgbuild]:
|
|
|
|
global _pkgbuilds_cache, _pkgbuilds_scanned
|
|
|
|
if lazy and _pkgbuilds_scanned:
|
|
|
|
logging.debug("Reusing cached pkgbuilds repo")
|
|
|
|
return _pkgbuilds_cache.copy()
|
|
|
|
pkgbuilds_dir = config.get_path('pkgbuilds')
|
|
|
|
packages: dict[str, Pkgbuild] = {}
|
|
|
|
paths = []
|
|
|
|
init_pkgbuilds(interactive=False)
|
|
|
|
for repo in REPOSITORIES:
|
|
|
|
for dir in os.listdir(os.path.join(pkgbuilds_dir, repo)):
|
2022-08-28 17:26:45 +02:00
|
|
|
p = os.path.join(repo, dir)
|
|
|
|
if not os.path.exists(os.path.join(pkgbuilds_dir, p, 'PKGBUILD')):
|
|
|
|
logging.warning(f"{p} doesn't include a PKGBUILD file; skipping")
|
|
|
|
continue
|
|
|
|
paths.append(p)
|
2022-08-19 17:11:56 +02:00
|
|
|
|
|
|
|
logging.info("Parsing PKGBUILDs")
|
|
|
|
|
2022-08-26 03:07:34 +02:00
|
|
|
results = []
|
2022-08-19 17:11:56 +02:00
|
|
|
if parallel:
|
2022-08-26 03:07:34 +02:00
|
|
|
paths_filtered = paths
|
|
|
|
if lazy:
|
|
|
|
# filter out cached packages as the caches don't cross process boundaries
|
|
|
|
paths_filtered = []
|
|
|
|
for p in paths:
|
|
|
|
if p in _pkgbuilds_paths:
|
|
|
|
# use cache
|
|
|
|
results += _pkgbuilds_paths[p]
|
|
|
|
else:
|
|
|
|
paths_filtered += [p]
|
|
|
|
chunks = (Parallel(n_jobs=multiprocessing.cpu_count() * 4)(
|
|
|
|
delayed(get_pkgbuild_by_path)(path, lazy=lazy, _config=config) for path in paths_filtered))
|
2022-08-19 17:11:56 +02:00
|
|
|
else:
|
2022-08-26 03:07:34 +02:00
|
|
|
chunks = (get_pkgbuild_by_path(path, lazy=lazy) for path in paths)
|
2022-08-19 17:11:56 +02:00
|
|
|
|
2022-08-26 03:07:34 +02:00
|
|
|
_pkgbuilds_paths.clear()
|
|
|
|
# one list of packages per path
|
2022-08-19 17:11:56 +02:00
|
|
|
for pkglist in chunks:
|
2022-08-26 03:07:34 +02:00
|
|
|
_pkgbuilds_paths[pkglist[0].path] = pkglist
|
2022-08-19 17:11:56 +02:00
|
|
|
results += pkglist
|
|
|
|
|
2022-08-28 05:47:46 +02:00
|
|
|
logging.info('Building package dictionary')
|
2022-08-19 17:11:56 +02:00
|
|
|
for package in results:
|
|
|
|
for name in [package.name] + package.replaces:
|
|
|
|
if name in packages:
|
|
|
|
logging.warning(f'Overriding {packages[package.name]} with {package}')
|
|
|
|
packages[name] = package
|
|
|
|
|
|
|
|
# This filters the deps to only include the ones that are provided in this repo
|
|
|
|
for package in packages.values():
|
|
|
|
package.local_depends = package.depends.copy()
|
|
|
|
for dep in package.depends.copy():
|
|
|
|
found = dep in packages
|
2022-08-26 03:28:17 +02:00
|
|
|
for pkg in packages.values():
|
2022-08-19 17:11:56 +02:00
|
|
|
if found:
|
|
|
|
break
|
2022-08-26 03:28:17 +02:00
|
|
|
if dep in pkg.names():
|
|
|
|
logging.debug(f'Found {pkg.name} that provides {dep}')
|
2022-08-19 17:11:56 +02:00
|
|
|
found = True
|
|
|
|
break
|
|
|
|
if not found:
|
|
|
|
logging.debug(f'Removing {dep} from dependencies')
|
|
|
|
package.local_depends.remove(dep)
|
|
|
|
|
|
|
|
_pkgbuilds_cache.clear()
|
|
|
|
_pkgbuilds_cache.update(packages)
|
|
|
|
_pkgbuilds_scanned = True
|
|
|
|
return packages
|
2022-08-30 00:20:04 +02:00
|
|
|
|
|
|
|
|
|
|
|
def filter_pkgbuilds(
|
|
|
|
paths: Iterable[str],
|
|
|
|
repo: Optional[dict[str, Pkgbuild]] = None,
|
|
|
|
arch: Optional[Arch] = None,
|
|
|
|
allow_empty_results=True,
|
|
|
|
use_paths=True,
|
|
|
|
use_names=True,
|
|
|
|
) -> Iterable[Pkgbuild]:
|
|
|
|
if not (use_names or use_paths):
|
|
|
|
raise Exception('Error: filter_packages instructed to match neither by names nor paths; impossible!')
|
|
|
|
if not allow_empty_results and not paths:
|
|
|
|
raise Exception("Can't search for packages: no query given")
|
|
|
|
repo = repo or discover_pkgbuilds()
|
|
|
|
if 'all' in paths:
|
|
|
|
all_pkgs = list(repo.values())
|
|
|
|
if arch:
|
|
|
|
all_pkgs = [pkg for pkg in all_pkgs if set([arch, 'any']).intersection(pkg.arches)]
|
|
|
|
return all_pkgs
|
|
|
|
result = []
|
|
|
|
for pkg in repo.values():
|
|
|
|
comparison = set()
|
|
|
|
if use_paths:
|
|
|
|
comparison.add(pkg.path)
|
|
|
|
if use_names:
|
|
|
|
comparison.add(pkg.name)
|
|
|
|
matches = list(comparison.intersection(paths))
|
|
|
|
if matches:
|
|
|
|
assert pkg.arches
|
|
|
|
if arch and not set([arch, 'any']).intersection(pkg.arches):
|
|
|
|
logging.warn(f"Pkg {pkg.name} matches query {matches[0]} but isn't available for architecture {arch}: {pkg.arches}")
|
|
|
|
continue
|
|
|
|
result += [pkg]
|
|
|
|
|
|
|
|
if not allow_empty_results and not result:
|
|
|
|
raise Exception('No packages matched by paths: ' + ', '.join([f'"{p}"' for p in paths]))
|
|
|
|
return result
|