Compare commits
15 Commits
prawn/pack
...
prawn/imag
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d2fe124d6f | ||
|
|
e758e3c343 | ||
|
|
7955842da9 | ||
|
|
c7084895d6 | ||
|
|
dc59378243 | ||
|
|
cec828553d | ||
|
|
45eba305cb | ||
|
|
60d8cb77ea | ||
|
|
6fa717ce64 | ||
|
|
795878cfeb | ||
|
|
0693792791 | ||
|
|
9e81fbf345 | ||
|
|
12b414fe79 | ||
|
|
61a9b53c5f | ||
|
|
db4fbc083a |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,3 +1,4 @@
|
||||
venv/
|
||||
__pycache__/
|
||||
.coverage*
|
||||
*.xml
|
||||
|
||||
@@ -70,24 +70,66 @@ push_docker:
|
||||
except:
|
||||
|
||||
.docs:
|
||||
image: "${CI_REGISTRY_IMAGE}:dev"
|
||||
before_script:
|
||||
image: "registry.gitlab.com/kupfer/kupferbootstrap:dev"
|
||||
variables:
|
||||
DOCS_SPHINXARGS: '-W'
|
||||
DOCS_MAKE_TARGET: "html"
|
||||
DOCS_MAKE_THREADS: 6
|
||||
before_script: &docs_before_script
|
||||
- pip install -r requirements.txt -r docs/requirements.txt
|
||||
script:
|
||||
- (cd docs && SPHINXARGS='-W' make)
|
||||
- mv docs/html public
|
||||
script: &docs_script
|
||||
- make -C docs -j$DOCS_MAKE_THREADS SPHINXARGS="$DOCS_SPHINXARGS" $DOCS_MAKE_TARGET
|
||||
- mv "docs/$DOCS_MAKE_TARGET" public
|
||||
- if [[ -e docs/archived ]]; then cp -r docs/archived public/ ; fi
|
||||
- rm -vf docs/archived/{main,dev,"$CI_COMMIT_REF_NAME"}.tar.gz # we want to cache only old tags as they won't change
|
||||
after_script:
|
||||
artifacts:
|
||||
paths:
|
||||
- public
|
||||
cache:
|
||||
key: docs
|
||||
paths:
|
||||
- docs/archived/*.tar.gz
|
||||
|
||||
build_docs:
|
||||
stage: build
|
||||
extends: .docs
|
||||
except:
|
||||
refs:
|
||||
- main
|
||||
- dev
|
||||
- docs
|
||||
variables:
|
||||
- '$CI_COMMIT_MESSAGE =~ /ci-kbs-docs-build-full/'
|
||||
- '$KBS_DOCS_FULL_BUILD == "1"'
|
||||
|
||||
build_docs_all:
|
||||
stage: build
|
||||
extends: pages
|
||||
resource_group: $CI_COMMIT_SHA
|
||||
script:
|
||||
- (cd docs && make SPHINXARGS="$DOCS_SPHINXARGS -D 'version=$CI_COMMIT_REF_NAME'" && mkdir -p versions && cp -r html versions/$CI_COMMIT_REF_SLUG)
|
||||
- *docs_script
|
||||
only:
|
||||
refs:
|
||||
- branches
|
||||
variables:
|
||||
- '$CI_COMMIT_MESSAGE =~ /ci-kbs-docs-build-full/'
|
||||
- '$KBS_DOCS_FULL_BUILD == "1"'
|
||||
- '$CI_COMMIT_REF_NAME == "docs"'
|
||||
except:
|
||||
- main
|
||||
- dev
|
||||
|
||||
pages:
|
||||
stage: deploy
|
||||
extends: .docs
|
||||
only:
|
||||
- main
|
||||
- dev
|
||||
variables:
|
||||
DOCS_MAKE_TARGET: versions
|
||||
resource_group: docs
|
||||
before_script:
|
||||
- git remote update
|
||||
- *docs_before_script
|
||||
|
||||
@@ -37,7 +37,8 @@ REPOSITORIES = [
|
||||
]
|
||||
|
||||
DEFAULT_PACKAGE_BRANCH = 'dev'
|
||||
KUPFER_HTTPS = 'https://gitlab.com/kupfer/packages/prebuilts/-/raw/%branch%/$arch/$repo'
|
||||
KUPFER_HTTPS_BASE = 'https://gitlab.com/kupfer/packages/prebuilts/-/raw/%branch%'
|
||||
KUPFER_HTTPS = KUPFER_HTTPS_BASE + '/$arch/$repo'
|
||||
|
||||
Arch: TypeAlias = str
|
||||
ARCHES = [
|
||||
@@ -160,5 +161,8 @@ SRCINFO_FILE = '.SRCINFO'
|
||||
SRCINFO_METADATA_FILE = '.srcinfo_meta.json'
|
||||
SRCINFO_INITIALISED_FILE = ".srcinfo_initialised.json"
|
||||
|
||||
SRCINFO_TARBALL_FILE = "srcinfos.tar.gz"
|
||||
SRCINFO_TARBALL_URL = f'{KUPFER_HTTPS_BASE}/{SRCINFO_TARBALL_FILE}'
|
||||
|
||||
FLAVOUR_INFO_FILE = 'flavourinfo.json'
|
||||
FLAVOUR_DESCRIPTION_PREFIX = 'kupfer flavour:'
|
||||
|
||||
@@ -95,10 +95,16 @@ _kupfer_local = dict[Arch, LocalDistro]()
|
||||
_kupfer_local_chroots = dict[Arch, LocalDistro]()
|
||||
|
||||
|
||||
def get_kupfer_url(url: str = KUPFER_HTTPS, branch: Optional[str] = None) -> str:
|
||||
"""gets the repo URL for `branch`, getting branch from config if `None` is passed."""
|
||||
branch = config.file.pacman.repo_branch if branch is None else branch
|
||||
return url.replace('%branch%', branch)
|
||||
|
||||
|
||||
def get_kupfer_https(arch: Arch, scan: bool = False) -> RemoteDistro:
|
||||
global _kupfer_https
|
||||
if arch not in _kupfer_https or not _kupfer_https[arch]:
|
||||
kupfer = get_kupfer(arch, KUPFER_HTTPS.replace('%branch%', config.file.pacman.repo_branch), scan)
|
||||
kupfer = get_kupfer(arch, get_kupfer_url(), scan)
|
||||
assert isinstance(kupfer, RemoteDistro)
|
||||
_kupfer_https[arch] = kupfer
|
||||
item = _kupfer_https[arch]
|
||||
|
||||
3
docs/.gitignore
vendored
3
docs/.gitignore
vendored
@@ -2,3 +2,6 @@
|
||||
.doctrees
|
||||
html
|
||||
source/cli
|
||||
checkouts
|
||||
versions
|
||||
archived
|
||||
|
||||
@@ -1,16 +1,72 @@
|
||||
buildargs := -b dirhtml -aE source html
|
||||
buildargs := -b dirhtml -aE source
|
||||
|
||||
.PHONY: cleanbuild clean
|
||||
.PHONY: cleanbuild clean serve serve_versions versions versions_git versions_index
|
||||
.NOTINTERMEDIATE:
|
||||
.PRECIOUS: versions/index.html versions/%/index.html checkouts/%/docs/html/index.html archived/%.tar.gz
|
||||
|
||||
BRANCHES := main dev
|
||||
TAGS := $(shell git tag)
|
||||
FILTERTED_TAGS := $(foreach tag,$(TAGS),$(shell if [[ -n "$$(git log --max-count=1 --oneline "$(tag)" -- .)" ]]; then echo "$(tag)"; fi))
|
||||
VERSIONS := $(BRANCHES) $(FILTERTED_TAGS)
|
||||
|
||||
cleanbuild:
|
||||
@make clean
|
||||
@make html
|
||||
@$(MAKE) clean
|
||||
@$(MAKE) html
|
||||
|
||||
clean:
|
||||
rm -rf html source/cli .buildinfo .doctrees
|
||||
rm -rf html source/cli .buildinfo .doctrees versions checkouts
|
||||
|
||||
html:
|
||||
sphinx-build $(SPHINXARGS) $(buildargs)
|
||||
sphinx-build $(SPHINXARGS) $(buildargs) html
|
||||
|
||||
serve: html
|
||||
(cd html && python -m http.server 9999)
|
||||
cd html && python -m http.server 9999
|
||||
|
||||
checkouts/%/docs/html/index.html:
|
||||
@mkdir -p checkouts
|
||||
@# use backslashed multi-line cmd because otherwise variables will be lost
|
||||
@branch="$$(echo "$(@D)" | sed 's|^checkouts/||g;s|/docs/html$$||g')" && \
|
||||
ref="$$branch" && \
|
||||
if ! git log --max-count=1 --oneline "$$branch" >/dev/null 2>/dev/null ; then \
|
||||
commit="$$(git ls-remote origin refs/{tags,heads}/"$$branch" | cut -f 1)" ; \
|
||||
[[ -n "$$commit" ]] && echo "found commit $$commit for $$branch" >&2 && \
|
||||
ref="$$commit" && git branch -f "$$branch" "$$ref" ; \
|
||||
fi && \
|
||||
[[ -n "$$(git log --max-count=1 --oneline "$$ref" -- .)" ]] || \
|
||||
(echo "ERROR: branch '$$branch' seems to have no docs/ dir, checked ref '$$ref'" >&2 && exit 1) && \
|
||||
checkout="checkouts/$$branch" && \
|
||||
ver="$$(echo "$$branch" | sed 's|^v\([0-9]\)|\1|g')" && \
|
||||
set -x && \
|
||||
([[ -e "$$checkout/.git" ]] || git clone .. "$$checkout" ) && \
|
||||
(! [[ -e "$$checkout/docs/source/conf.py" ]] || echo "version = '$$ver'" >> "$$checkout/docs/source/conf.py") && \
|
||||
$(MAKE) -C "$$checkout/docs" SPHINXARGS="-D version=$$ver"
|
||||
|
||||
archived/%.tar.gz: checkouts/%/docs/html/index.html
|
||||
mkdir -p archived
|
||||
tar -C "checkouts/$*/docs/html" -czf "$@" .
|
||||
|
||||
versions/%/index.html: archived/%.tar.gz
|
||||
@mkdir -p "$(@D)"
|
||||
@echo "working on version '$*'"
|
||||
tar -xf "archived/$*.tar.gz" -C "$(@D)"
|
||||
@# ensure index file exists and update its timestamp for Make's dependency detection
|
||||
[[ -e "$(@)" ]] && touch "$(@)"
|
||||
|
||||
versions/versions.css: versjon/versions.css
|
||||
@mkdir -p versions
|
||||
cp versjon/versions.css versions/
|
||||
|
||||
versions_git:
|
||||
@$(MAKE) $(patsubst %, versions/%/index.html, $(VERSIONS))
|
||||
|
||||
versions/index.html: $(sort $(wildcard versions/*/index.html))
|
||||
rm -rf versions/stable
|
||||
@cd versions && set -x && versjon --stable-version main --user_templates ../versjon
|
||||
@# ensure the global index.html exists and is newer than each version's index.html
|
||||
[[ -e "$(@)" ]] && touch "$(@)"
|
||||
|
||||
versions: versions_git versions/versions.css
|
||||
@$(MAKE) versions/index.html
|
||||
|
||||
serve_versions: versions/index.html
|
||||
cd versions && python -m http.server 9888
|
||||
|
||||
@@ -2,3 +2,4 @@ sphinx-click
|
||||
myst-parser
|
||||
# furo sphinx theme
|
||||
furo
|
||||
versjon<=2.3.0
|
||||
|
||||
58
docs/versjon/footer.html
Normal file
58
docs/versjon/footer.html
Normal file
@@ -0,0 +1,58 @@
|
||||
{# FORMAT_VERSION #}
|
||||
|
||||
{% macro format_version(version) %}
|
||||
{% if page in version.html_files %}
|
||||
{% set version_path = page_root + docs_path[version.name] + "/" + page %}
|
||||
{% else %}
|
||||
{% set version_path = page_root + docs_path[version.name] %}
|
||||
{% endif %}
|
||||
{% if current == version.name %}
|
||||
<strong>
|
||||
<dd><a href="{{ version_path }}">{{ version.name }}</a></dd>
|
||||
</strong>
|
||||
{% else %}
|
||||
<dd><a href="{{ version_path }}">{{ version.name }}</a></dd>
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
|
||||
<div id="versjon-overlay">
|
||||
<button type="button" class="versjon">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" id="branch-icon" class="ionicon" viewBox="0 0 512 512">
|
||||
<!-- Taken from Ionic, MIT licensed. Copyright (c) 2015-present Ionic (http://ionic.io/) -->
|
||||
<title>Git Branch</title><circle cx="160" cy="96" r="48" fill="none" stroke="currentColor" stroke-linecap="round" stroke-linejoin="round" stroke-width="32"/><circle cx="160" cy="416" r="48" fill="none" stroke="currentColor" stroke-linecap="round" stroke-linejoin="round" stroke-width="32"/><path fill="none" stroke="currentColor" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M160 368V144"/><circle cx="352" cy="160" r="48" fill="none" stroke="currentColor" stroke-linecap="round" stroke-linejoin="round" stroke-width="32"/><path d="M352 208c0 128-192 48-192 160" fill="none" stroke="currentColor" stroke-linecap="round" stroke-linejoin="round" stroke-width="32"/>
|
||||
</svg>
|
||||
Version: {{current}}
|
||||
</button>
|
||||
<div class="versjon-content">
|
||||
<div class="versjon-content-inner">
|
||||
<dl>
|
||||
<dl>
|
||||
<dt>Branches</dt>
|
||||
{% for version in other %}
|
||||
{{ format_version(version) | indent(16) }}
|
||||
{% endfor %}
|
||||
</dl>
|
||||
<dt>Versions</dt>
|
||||
{% for version in semver %}
|
||||
{{ format_version(version) | indent(16) }}
|
||||
{% endfor %}
|
||||
</dl>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<script>
|
||||
var coll = document.getElementsByClassName("versjon");
|
||||
var i;
|
||||
|
||||
for (i = 0; i < coll.length; i++) {
|
||||
coll[i].addEventListener("click", function () {
|
||||
this.classList.toggle("active");
|
||||
var content = this.nextElementSibling;
|
||||
if (content.style.maxHeight) {
|
||||
content.style.maxHeight = null;
|
||||
} else {
|
||||
content.style.maxHeight = content.scrollHeight + "px";
|
||||
}
|
||||
});
|
||||
}
|
||||
</script>
|
||||
1
docs/versjon/head.html
Normal file
1
docs/versjon/head.html
Normal file
@@ -0,0 +1 @@
|
||||
<link href="{{ page_root }}versions.css" rel="stylesheet" type="text/css">
|
||||
11
docs/versjon/header.html
Normal file
11
docs/versjon/header.html
Normal file
@@ -0,0 +1,11 @@
|
||||
{% if stable and (stable.name|default("")) != current %}
|
||||
{% if page in stable.html_files %}
|
||||
{% set stable_path = page_root + docs_path[stable.name] + "/" + page %}
|
||||
{% else %}
|
||||
{% set stable_path = page_root + docs_path[stable.name] %}
|
||||
{% endif %}
|
||||
<p class="versjon-{% if is_semver %}old{% else %}dev{% endif %}-warning">
|
||||
<strong>Warning:</strong> These docs are for version <b>{{current}}</b>. The docs for the latest stable version are at
|
||||
<b> <a href="{{ stable_path }}">{{ stable.name }}</a> </b>.
|
||||
</p>
|
||||
{% endif %}
|
||||
99
docs/versjon/versions.css
Normal file
99
docs/versjon/versions.css
Normal file
@@ -0,0 +1,99 @@
|
||||
.versjon {
|
||||
cursor: pointer;
|
||||
padding: 10px;
|
||||
width: 100%;
|
||||
border: none;
|
||||
text-align: left;
|
||||
outline: none;
|
||||
font-size: 15px;
|
||||
background: var(--color-code-background);
|
||||
color: var(--color-code-foreground);
|
||||
transition: background-color 0.1s linear;
|
||||
}
|
||||
|
||||
.versjon:hover {
|
||||
background-color: var(--color-highlighted-background);
|
||||
}
|
||||
|
||||
.versjon:after {
|
||||
content: '\002B';
|
||||
font-weight: bold;
|
||||
float: right;
|
||||
margin-left: 5px;
|
||||
}
|
||||
|
||||
.versjon:active:after {
|
||||
content: "\2212";
|
||||
}
|
||||
|
||||
.versjon-content {
|
||||
max-height: 0;
|
||||
overflow: hidden;
|
||||
transition: max-height 0.2s ease-out;
|
||||
}
|
||||
|
||||
.versjon-content-inner {
|
||||
padding: 10px 18px
|
||||
}
|
||||
|
||||
#versjon-overlay {
|
||||
position: fixed;
|
||||
z-index: 100;
|
||||
bottom: 0px;
|
||||
right: 0px;
|
||||
width: 250px;
|
||||
background: var(--color-code-background);
|
||||
max-height: 100%;
|
||||
overflow: scroll;
|
||||
}
|
||||
|
||||
p.versjon-old-warning {
|
||||
margin: 10px 0;
|
||||
padding: 5px 10px;
|
||||
border-radius: 4px;
|
||||
|
||||
letter-spacing: 1px;
|
||||
color: #fff;
|
||||
text-shadow: 0 0 2px #000;
|
||||
text-align: center;
|
||||
|
||||
background: #d40 repeating-linear-gradient(135deg,
|
||||
transparent,
|
||||
transparent 56px,
|
||||
rgba(255, 255, 255, 0.2) 56px,
|
||||
rgba(255, 255, 255, 0.2) 112px);
|
||||
}
|
||||
|
||||
p.versjon-old-warning a {
|
||||
color: #fff;
|
||||
border-color: #fff;
|
||||
}
|
||||
|
||||
p.versjon-dev-warning {
|
||||
margin: 10px 0;
|
||||
padding: 5px 10px;
|
||||
border-radius: 4px;
|
||||
|
||||
letter-spacing: 1px;
|
||||
color: #fff;
|
||||
text-shadow: 0 0 2px #000;
|
||||
text-align: center;
|
||||
|
||||
background: #E67300 repeating-linear-gradient(135deg,
|
||||
transparent,
|
||||
transparent 56px,
|
||||
rgba(255, 255, 255, 0.2) 56px,
|
||||
rgba(255, 255, 255, 0.2) 112px);
|
||||
}
|
||||
|
||||
p.versjon-dev-warning a {
|
||||
color: #fff;
|
||||
border-color: #fff;
|
||||
}
|
||||
|
||||
#branch-icon {
|
||||
width: 1em;
|
||||
height: 1em;
|
||||
background-size: contain;
|
||||
background-repeat: no-repeat;
|
||||
}
|
||||
@@ -3,6 +3,7 @@ import os
|
||||
import pwd
|
||||
import subprocess
|
||||
|
||||
from subprocess import CompletedProcess # make it easy for users of this module
|
||||
from shlex import quote as shell_quote
|
||||
from typing import Optional, Union, TypeAlias
|
||||
|
||||
@@ -90,7 +91,7 @@ def run_cmd(
|
||||
elevation_method: Optional[ElevationMethod] = None,
|
||||
stdout: Optional[int] = None,
|
||||
stderr=None,
|
||||
) -> Union[subprocess.CompletedProcess, int]:
|
||||
) -> Union[CompletedProcess, int]:
|
||||
"execute `script` as `switch_user`, elevating and su'ing as necessary"
|
||||
kwargs: dict = {}
|
||||
env_cmd = []
|
||||
|
||||
@@ -452,10 +452,10 @@ def cmd_build(profile_name: Optional[str] = None,
|
||||
|
||||
|
||||
@cmd_image.command(name='inspect')
|
||||
@click.option('--shell', '-s', is_flag=True)
|
||||
@click.option('--shell', '-s', help="Open a shell in the image's rootfs", is_flag=True)
|
||||
@click.argument('profile', required=False)
|
||||
def cmd_inspect(profile: Optional[str] = None, shell: bool = False):
|
||||
"""Open a shell in a device image"""
|
||||
"""Loop-mount the device image for inspection."""
|
||||
config.enforce_profile_device_set()
|
||||
config.enforce_profile_flavour_set()
|
||||
enforce_wrap()
|
||||
|
||||
@@ -10,13 +10,16 @@ from constants import SRCINFO_METADATA_FILE
|
||||
from exec.cmd import run_cmd
|
||||
from exec.file import get_temp_dir
|
||||
from logger import setup_logging
|
||||
from packages.cli import cmd_build, cmd_clean, cmd_update
|
||||
from packages.cli import SRCINFO_CACHE_FILES, cmd_build, cmd_clean, cmd_init, cmd_update
|
||||
from utils import git_get_branch
|
||||
|
||||
tempdir = None
|
||||
config.try_load_file()
|
||||
setup_logging(True)
|
||||
|
||||
PKG_TEST_PATH = 'device/device-sdm845-oneplus-enchilada'
|
||||
PKG_TEST_NAME = 'device-sdm845-xiaomi-beryllium-ebbg'
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def ctx() -> click.Context:
|
||||
@@ -54,7 +57,7 @@ def test_packages_update(ctx: click.Context):
|
||||
for branch, may_fail in branches.items():
|
||||
config.file.pkgbuilds.git_branch = branch
|
||||
try:
|
||||
ctx.invoke(cmd_update, non_interactive=True, switch_branch=True, discard_changes=True)
|
||||
ctx.invoke(cmd_init, update=True, non_interactive=True, switch_branch=True, discard_changes=True, init_caches=False)
|
||||
except Exception as ex:
|
||||
print(f'may_fail: {may_fail}; Exception: {ex}')
|
||||
if not may_fail:
|
||||
@@ -73,15 +76,20 @@ def test_packages_clean(ctx: click.Context):
|
||||
ctx.invoke(cmd_clean, what=['git'], force=True)
|
||||
|
||||
|
||||
def build_pkgs(_ctx: click.Context, query: list[str], arch: str = 'aarch64'):
|
||||
_ctx.invoke(cmd_build, paths=query, arch=arch)
|
||||
def test_packages_cache_init(ctx: click.Context):
|
||||
ctx.invoke(cmd_update, non_interactive=True, switch_branch=False, discard_changes=False, init_caches=True)
|
||||
|
||||
for f in SRCINFO_CACHE_FILES:
|
||||
assert os.path.exists(os.path.join(config.get_path('pkgbuilds'), PKG_TEST_PATH, f))
|
||||
|
||||
|
||||
def build_pkgs(_ctx: click.Context, query: list[str], arch: str = 'aarch64', **kwargs):
|
||||
_ctx.invoke(cmd_build, paths=query, arch=arch, **kwargs)
|
||||
|
||||
|
||||
def test_packages_build_by_path(ctx: click.Context):
|
||||
name = 'device/device-sdm845-oneplus-enchilada'
|
||||
build_pkgs(ctx, [name])
|
||||
build_pkgs(ctx, [PKG_TEST_PATH], force=True)
|
||||
|
||||
|
||||
def test_split_package_build_by_name(ctx: click.Context):
|
||||
name = 'device-sdm845-xiaomi-beryllium-ebbg'
|
||||
build_pkgs(ctx, [name])
|
||||
build_pkgs(ctx, [PKG_TEST_NAME])
|
||||
|
||||
@@ -460,13 +460,12 @@ def setup_sources(package: Pkgbuild, lazy: bool = True):
|
||||
if cache.validate_checksums():
|
||||
logging.info(f"{package.path}: Sources already set up.")
|
||||
return
|
||||
makepkg_setup = ' '.join(MAKEPKG_CMD + [
|
||||
makepkg_setup = MAKEPKG_CMD + [
|
||||
'--nodeps',
|
||||
'--nobuild',
|
||||
'--noprepare',
|
||||
'--skippgpcheck',
|
||||
])
|
||||
msg = "makepkg sources setup failed; retrying without --holdver"
|
||||
]
|
||||
|
||||
logging.info(f'{package.path}: Getting build chroot for source setup')
|
||||
# we need to use a chroot here because makepkg symlinks sources into src/ via an absolute path
|
||||
@@ -474,11 +473,7 @@ def setup_sources(package: Pkgbuild, lazy: bool = True):
|
||||
assert config.runtime.arch
|
||||
chroot = setup_build_chroot(config.runtime.arch)
|
||||
logging.info(f'{package.path}: Setting up sources with makepkg')
|
||||
result = chroot.run_cmd(
|
||||
f"{makepkg_setup} --holdver || ( echo '{package.path}: {msg}' ; {makepkg_setup} )",
|
||||
cwd=dir,
|
||||
switch_user='kupfer',
|
||||
)
|
||||
result = chroot.run_cmd(makepkg_setup, cwd=dir, switch_user='kupfer')
|
||||
assert isinstance(result, subprocess.CompletedProcess)
|
||||
if result.returncode != 0:
|
||||
raise Exception(f'{package.path}: Failed to setup sources, exit code: {result.returncode}')
|
||||
@@ -703,7 +698,7 @@ def build_packages(
|
||||
for package in need_build:
|
||||
base = package.pkgbase if isinstance(package, SubPkgbuild) else package
|
||||
assert isinstance(base, Pkgbase)
|
||||
if package.is_built():
|
||||
if package.is_built(arch):
|
||||
logging.info(f"Skipping building {package.name} since it was already built this run as part of pkgbase {base.name}")
|
||||
continue
|
||||
build_package(
|
||||
@@ -716,7 +711,9 @@ def build_packages(
|
||||
)
|
||||
files += add_package_to_repo(package, arch)
|
||||
updated_repos.add(package.repo)
|
||||
base._is_built = True
|
||||
for _arch in ['any', arch]:
|
||||
if _arch in base.arches:
|
||||
base._built_for.add(_arch)
|
||||
# rescan affected repos
|
||||
local_repos = get_kupfer_local(arch, in_chroot=False, scan=False)
|
||||
for repo_name in updated_repos:
|
||||
|
||||
143
packages/cli.py
143
packages/cli.py
@@ -1,4 +1,5 @@
|
||||
import click
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
|
||||
@@ -6,17 +7,20 @@ from glob import glob
|
||||
from typing import Iterable, Optional
|
||||
|
||||
from config.state import config
|
||||
from constants import Arch, ARCHES, REPOSITORIES, SRCINFO_INITIALISED_FILE
|
||||
from exec.file import remove_file
|
||||
from constants import Arch, ARCHES, REPOSITORIES, SRCINFO_FILE, SRCINFO_INITIALISED_FILE, SRCINFO_METADATA_FILE, SRCINFO_TARBALL_FILE, SRCINFO_TARBALL_URL
|
||||
from exec.cmd import run_cmd, shell_quote, CompletedProcess
|
||||
from exec.file import get_temp_dir, makedir, remove_file
|
||||
from devices.device import get_profile_device
|
||||
from distro.distro import get_kupfer_local
|
||||
from distro.distro import get_kupfer_local, get_kupfer_url
|
||||
from distro.package import LocalPackage
|
||||
from net.ssh import run_ssh_command, scp_put_files
|
||||
from utils import git
|
||||
from utils import download_file, git, sha256sum
|
||||
from wrapper import check_programs_wrap, enforce_wrap
|
||||
|
||||
from .build import build_packages_by_paths
|
||||
from .pkgbuild import discover_pkgbuilds, filter_pkgbuilds, init_pkgbuilds
|
||||
from .pkgbuild import discover_pkgbuilds, filter_pkgbuilds, get_pkgbuild_dirs, init_pkgbuilds
|
||||
|
||||
SRCINFO_CACHE_FILES = [SRCINFO_FILE, SRCINFO_INITIALISED_FILE, SRCINFO_METADATA_FILE]
|
||||
|
||||
|
||||
def build(
|
||||
@@ -46,31 +50,140 @@ def build(
|
||||
)
|
||||
|
||||
|
||||
def init_pkgbuild_caches(clean_src_dirs: bool = True, remote_branch: Optional[str] = None):
|
||||
|
||||
def read_srcinitialised_checksum(src_initialised):
|
||||
with open(src_initialised) as fd:
|
||||
d = json.load(fd)
|
||||
if isinstance(d, dict):
|
||||
return d.get('PKGBUILD', '!!!ERROR!!!')
|
||||
raise Exception("JSON content not a dictionary!")
|
||||
|
||||
# get_kupfer_url() resolves repo branch variable in url
|
||||
url = get_kupfer_url(url=SRCINFO_TARBALL_URL, branch=remote_branch)
|
||||
cachetar = os.path.join(config.get_path('packages'), SRCINFO_TARBALL_FILE)
|
||||
makedir(os.path.dirname(cachetar))
|
||||
logging.info(f"Updating PKGBUILD caches from {url}" + (", pruning outdated src/ directories" if clean_src_dirs else ""))
|
||||
updated = download_file(cachetar, url)
|
||||
logging.info("Cache tarball was " + ('downloaded successfully' if updated else 'already up to date'))
|
||||
tmpdir = get_temp_dir()
|
||||
logging.debug(f"Extracting {cachetar} to {tmpdir}")
|
||||
res = run_cmd(['tar', 'xf', cachetar], cwd=tmpdir)
|
||||
assert isinstance(res, CompletedProcess)
|
||||
if res.returncode:
|
||||
raise Exception(f"failed to extract srcinfo cache archive '{cachetar}'")
|
||||
pkgbuild_dirs = get_pkgbuild_dirs()
|
||||
for pkg in pkgbuild_dirs:
|
||||
logging.info(f"{pkg}: analyzing cache")
|
||||
pkgdir = os.path.join(config.get_path('pkgbuilds'), pkg)
|
||||
srcdir = os.path.join(pkgdir, 'src')
|
||||
src_initialised = os.path.join(pkgdir, SRCINFO_INITIALISED_FILE)
|
||||
cachedir = os.path.join(tmpdir, pkg)
|
||||
pkgbuild_checksum = sha256sum(os.path.join(pkgdir, 'PKGBUILD'))
|
||||
copy_files: set[str] = set(SRCINFO_CACHE_FILES)
|
||||
if os.path.exists(src_initialised):
|
||||
try:
|
||||
if read_srcinitialised_checksum(src_initialised) == pkgbuild_checksum:
|
||||
copy_files.remove(SRCINFO_INITIALISED_FILE)
|
||||
for f in copy_files.copy():
|
||||
fpath = os.path.join(pkgdir, f)
|
||||
if os.path.exists(fpath):
|
||||
copy_files.remove(f)
|
||||
if not copy_files:
|
||||
logging.info(f"{pkg}: SRCINFO cache already up to date")
|
||||
continue
|
||||
except Exception as ex:
|
||||
logging.warning(f"{pkg}: Something went wrong parsing {SRCINFO_INITIALISED_FILE}, treating as outdated!:\n{ex}")
|
||||
if clean_src_dirs and os.path.exists(srcdir):
|
||||
logging.info(f"{pkg}: outdated src/ detected, removing")
|
||||
remove_file(srcdir, recursive=True)
|
||||
remove_file(src_initialised)
|
||||
if not os.path.exists(cachedir):
|
||||
logging.info(f"{pkg}: not found in remote repo cache, skipping")
|
||||
continue
|
||||
cache_initialised = os.path.join(cachedir, SRCINFO_INITIALISED_FILE)
|
||||
try:
|
||||
if read_srcinitialised_checksum(cache_initialised) != pkgbuild_checksum:
|
||||
logging.info(f"{pkg}: PKGBUILD checksum differs from remote repo cache, skipping")
|
||||
continue
|
||||
except Exception as ex:
|
||||
logging.warning(f"{pkg}: Failed to parse the remote repo's cached {SRCINFO_INITIALISED_FILE}, skipping!:\n{ex}")
|
||||
continue
|
||||
if not copy_files:
|
||||
continue
|
||||
logging.info(f"{pkg}: Copying srcinfo cache from remote repo")
|
||||
logging.debug(f'{pkg}: copying {copy_files}')
|
||||
copy_files_list = [shell_quote(os.path.join(cachedir, f)) for f in copy_files]
|
||||
res = run_cmd(f"cp {' '.join(copy_files_list)} {shell_quote(pkgdir)}/")
|
||||
assert isinstance(res, CompletedProcess)
|
||||
if res.returncode:
|
||||
raise Exception(f"{pkg}: failed to copy cache contents from {cachedir}")
|
||||
|
||||
|
||||
non_interactive_flag = click.option('--non-interactive', is_flag=True)
|
||||
init_caches_flag = click.option(
|
||||
'--init-caches/--no-init-caches',
|
||||
is_flag=True,
|
||||
default=True,
|
||||
show_default=True,
|
||||
help="Fill PKGBUILDs caches from HTTPS repo where checksums match",
|
||||
)
|
||||
remove_outdated_src_flag = click.option(
|
||||
'--clean-src-dirs/--no-clean-src-dirs',
|
||||
is_flag=True,
|
||||
default=True,
|
||||
show_default=True,
|
||||
help="Remove outdated src/ directories to avoid problems",
|
||||
)
|
||||
switch_branch_flag = click.option('--switch-branch', is_flag=True, help="Force the branch to be corrected even in non-interactive mode")
|
||||
discard_changes_flag = click.option('--discard-changes', is_flag=True, help="When switching branches, discard any locally changed conflicting files")
|
||||
|
||||
|
||||
@click.group(name='packages')
|
||||
def cmd_packages():
|
||||
"""Build and manage packages and PKGBUILDs"""
|
||||
|
||||
|
||||
non_interactive_flag = click.option('--non-interactive', is_flag=True)
|
||||
|
||||
|
||||
@cmd_packages.command(name='update')
|
||||
@non_interactive_flag
|
||||
@click.option('--switch-branch', is_flag=True, help="Force the branch to be corrected even in non-interactive mode")
|
||||
@click.option('--discard-changes', is_flag=True, help="When switching branches, discard any locally changed conflicting files")
|
||||
def cmd_update(non_interactive: bool = False, switch_branch: bool = False, discard_changes: bool = False):
|
||||
@init_caches_flag
|
||||
@switch_branch_flag
|
||||
@discard_changes_flag
|
||||
@remove_outdated_src_flag
|
||||
def cmd_update(
|
||||
non_interactive: bool = False,
|
||||
init_caches: bool = False,
|
||||
clean_src_dirs: bool = True,
|
||||
switch_branch: bool = False,
|
||||
discard_changes: bool = False,
|
||||
):
|
||||
"""Update PKGBUILDs git repo"""
|
||||
init_pkgbuilds(interactive=not non_interactive, lazy=False, update=True, switch_branch=switch_branch, discard_changes=discard_changes)
|
||||
logging.info("Refreshing SRCINFO caches")
|
||||
if init_caches:
|
||||
init_pkgbuild_caches(clean_src_dirs=clean_src_dirs)
|
||||
logging.info("Refreshing outdated SRCINFO caches")
|
||||
discover_pkgbuilds(lazy=False)
|
||||
|
||||
|
||||
@cmd_packages.command(name='init')
|
||||
@non_interactive_flag
|
||||
@init_caches_flag
|
||||
@switch_branch_flag
|
||||
@discard_changes_flag
|
||||
@remove_outdated_src_flag
|
||||
@click.option('-u', '--update', is_flag=True, help='Use git pull to update the PKGBUILDs')
|
||||
def cmd_init(non_interactive: bool = False, update: bool = False):
|
||||
def cmd_init(
|
||||
non_interactive: bool = False,
|
||||
init_caches: bool = True,
|
||||
clean_src_dirs: bool = True,
|
||||
switch_branch: bool = False,
|
||||
discard_changes: bool = False,
|
||||
update: bool = False,
|
||||
):
|
||||
"Ensure PKGBUILDs git repo is checked out locally"
|
||||
init_pkgbuilds(interactive=not non_interactive, lazy=False, update=update, switch_branch=False)
|
||||
init_pkgbuilds(interactive=not non_interactive, lazy=False, update=update, switch_branch=switch_branch, discard_changes=discard_changes)
|
||||
if init_caches:
|
||||
init_pkgbuild_caches(clean_src_dirs=clean_src_dirs)
|
||||
|
||||
|
||||
@cmd_packages.command(name='build')
|
||||
@@ -122,7 +235,7 @@ def cmd_sideload(paths: Iterable[str], arch: Optional[Arch] = None, no_build: bo
|
||||
alloc_tty=True).check_returncode()
|
||||
|
||||
|
||||
CLEAN_LOCATIONS = ['src', 'pkg', SRCINFO_INITIALISED_FILE]
|
||||
CLEAN_LOCATIONS = ['src', 'pkg', *SRCINFO_CACHE_FILES]
|
||||
|
||||
|
||||
@cmd_packages.command(name='clean')
|
||||
|
||||
@@ -239,16 +239,16 @@ class Pkgbuild(PackageInfo):
|
||||
arch = 'any'
|
||||
return f'{self.name}-{self.version}-{arch}.pkg.tar.zst'
|
||||
|
||||
def is_built(self) -> bool:
|
||||
def is_built(self, arch: Arch, tolerate_archless: bool = True) -> bool:
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class Pkgbase(Pkgbuild):
|
||||
subpackages: list[SubPkgbuild]
|
||||
_is_built: bool
|
||||
_built_for: set[Arch]
|
||||
|
||||
def __init__(self, relative_path: str, subpackages: list[SubPkgbuild] = [], **args):
|
||||
self._is_built = False
|
||||
self._built_for = set()
|
||||
self.subpackages = list(subpackages)
|
||||
super().__init__(relative_path, **args)
|
||||
|
||||
@@ -256,7 +256,7 @@ class Pkgbase(Pkgbuild):
|
||||
if not isinstance(pkg, Pkgbase):
|
||||
raise Exception(f"Tried to update pkgbase {self.name} with non-base pkg {pkg}")
|
||||
Pkgbuild.update(self, pkg)
|
||||
self._is_built = pkg._is_built or self._is_built
|
||||
self._built_for.update(pkg._built_for)
|
||||
sub_dict = {p.name: p for p in self.subpackages}
|
||||
self.subpackages.clear()
|
||||
for new_pkg in pkg.subpackages:
|
||||
@@ -290,8 +290,11 @@ class Pkgbase(Pkgbuild):
|
||||
names.update(pkg.names())
|
||||
return list(names)
|
||||
|
||||
def is_built(self) -> bool:
|
||||
return self._is_built
|
||||
def is_built(self, arch: Arch, tolerate_archless: bool = True) -> bool:
|
||||
arches = {arch}
|
||||
if tolerate_archless:
|
||||
arches.add('any')
|
||||
return bool(self._built_for.intersection(arches))
|
||||
|
||||
|
||||
class SubPkgbuild(Pkgbuild):
|
||||
@@ -313,8 +316,8 @@ class SubPkgbuild(Pkgbuild):
|
||||
assert self.pkgbase
|
||||
self.pkgbase.refresh_sources(lazy=lazy)
|
||||
|
||||
def is_built(self) -> bool:
|
||||
return self.pkgbase.is_built()
|
||||
def is_built(self, arch: Arch, tolerate_archless: bool = True) -> bool:
|
||||
return self.pkgbase.is_built(arch)
|
||||
|
||||
|
||||
def parse_pkgbuild(
|
||||
@@ -432,24 +435,30 @@ def get_pkgbuild_by_name(name: str, lazy: bool = True):
|
||||
return get_pkgbuild_by_name(name=name, lazy=lazy)
|
||||
|
||||
|
||||
def get_pkgbuild_dirs(quiet: bool = True, repositories: Optional[list[str]] = None) -> list[str]:
|
||||
"""Gets the relative paths to directories containing PKGBUILDs, optionally warns about dirs without a PKGBUILD"""
|
||||
pkgbuilds_dir = config.get_path('pkgbuilds')
|
||||
paths = []
|
||||
for repo in repositories or REPOSITORIES:
|
||||
for dir in os.listdir(os.path.join(pkgbuilds_dir, repo)):
|
||||
p = os.path.join(repo, dir)
|
||||
if not os.path.exists(os.path.join(pkgbuilds_dir, p, 'PKGBUILD')):
|
||||
if not quiet:
|
||||
logging.warning(f"{p} doesn't include a PKGBUILD file; skipping")
|
||||
continue
|
||||
paths.append(p)
|
||||
return paths
|
||||
|
||||
|
||||
def discover_pkgbuilds(parallel: bool = True, lazy: bool = True, repositories: Optional[list[str]] = None) -> dict[str, Pkgbuild]:
|
||||
global _pkgbuilds_cache, _pkgbuilds_scanned
|
||||
if lazy and _pkgbuilds_scanned:
|
||||
logging.debug("Reusing cached pkgbuilds repo")
|
||||
return _pkgbuilds_cache.copy()
|
||||
check_programs_wrap(['makepkg'])
|
||||
pkgbuilds_dir = config.get_path('pkgbuilds')
|
||||
packages: dict[str, Pkgbuild] = {}
|
||||
paths = []
|
||||
init_pkgbuilds(interactive=False)
|
||||
for repo in repositories or REPOSITORIES:
|
||||
for dir in os.listdir(os.path.join(pkgbuilds_dir, repo)):
|
||||
p = os.path.join(repo, dir)
|
||||
if not os.path.exists(os.path.join(pkgbuilds_dir, p, 'PKGBUILD')):
|
||||
logging.warning(f"{p} doesn't include a PKGBUILD file; skipping")
|
||||
continue
|
||||
paths.append(p)
|
||||
|
||||
paths = get_pkgbuild_dirs(quiet=False, repositories=repositories)
|
||||
logging.info(f"Discovering PKGBUILDs{f' in repositories: {repositories}' if repositories else ''}")
|
||||
|
||||
results = []
|
||||
|
||||
@@ -6,3 +6,5 @@ typing_extensions
|
||||
coloredlogs
|
||||
munch
|
||||
setuptools # required by munch
|
||||
requests
|
||||
python-dateutil
|
||||
|
||||
26
utils.py
26
utils.py
@@ -1,12 +1,15 @@
|
||||
import atexit
|
||||
import datetime
|
||||
import grp
|
||||
import hashlib
|
||||
import logging
|
||||
import os
|
||||
import pwd
|
||||
import requests
|
||||
import subprocess
|
||||
import tarfile
|
||||
|
||||
from dateutil.parser import parse as parsedate
|
||||
from shutil import which
|
||||
from typing import Generator, IO, Optional, Union, Sequence
|
||||
|
||||
@@ -134,6 +137,29 @@ def read_files_from_tar(tar_file: str, files: Sequence[str]) -> Generator[tuple[
|
||||
yield path, fd
|
||||
|
||||
|
||||
def download_file(path: str, url: str, update: bool = True):
|
||||
"""Download a file over http[s]. With `update`, tries to use mtime timestamps to download only changed files."""
|
||||
url_time = None
|
||||
if os.path.exists(path) and update:
|
||||
headers = requests.head(url).headers
|
||||
if 'last-modified' in headers:
|
||||
url_time = parsedate(headers['last-modified']).astimezone()
|
||||
file_time = datetime.datetime.fromtimestamp(os.path.getmtime(path)).astimezone()
|
||||
if url_time == file_time:
|
||||
logging.debug(f"{path} seems already up to date")
|
||||
return False
|
||||
user_agent = {"User-agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:46.0) Gecko/20100101 Firefox/46.0"}
|
||||
download = requests.get(url, headers=user_agent)
|
||||
with open(path, 'wb') as fd:
|
||||
for chunk in download.iter_content(4096):
|
||||
fd.write(chunk)
|
||||
if 'last-modified' in download.headers:
|
||||
url_time = parsedate(download.headers['last-modified']).astimezone()
|
||||
os.utime(path, (datetime.datetime.now().timestamp(), url_time.timestamp()))
|
||||
logging.debug(f"{path} downloaded!")
|
||||
return True
|
||||
|
||||
|
||||
# stackoverflow magic from https://stackoverflow.com/a/44873382
|
||||
def sha256sum(filename):
|
||||
h = hashlib.sha256()
|
||||
|
||||
Reference in New Issue
Block a user