Init, adding corrade,magnum,magnum-plugins,magnum-integration,magnum-extras, and magnum-examples 2025.47_1

This commit is contained in:
2025-11-19 13:35:26 +02:00
parent 6031ef978f
commit 17b687c5c1
346 changed files with 28087 additions and 12 deletions

View File

@@ -0,0 +1,22 @@
# Cycle detector for void-packages
This script enumerates dependencies for packages in a
[void-packages repository](https://github.com/void-linux/void-packages)
and identifies build-time dependency cycles.
For command syntax, run `xbps-cycles.py -h`. Often, it may be sufficient to run
`xbps-cycles.py` with no arguments. By default, the script will look for a
repository at `$XBPS_DISTDIR`; if that variable is not defined, the current
directory is used instead. To override this behavior, use the `-d` option to
provide the path to your desired void-packages clone.
The standard behavior will be to spawn multiple processes, one per CPU, to
enumerate package dependencies. This is by far the most time-consuming part of
the execution. To override the degree of parallelism, use the `-j` option.
Dependencies can be cached on disk, one file per package, in directory
passed with `-c` option. On next execution with same option, dependencies are
read from file rather than computed.
Failures should be harmless but, at this early stage, unlikely to be pretty or
even helpful.

View File

@@ -0,0 +1,9 @@
#!/bin/bash
# checks for licenses that are no longer custom in packages
mapfile -t licenses < common/travis/license.lst
for lic in "${licenses[@]}"; do
grep -rl "custom:$lic"'[,"]' --include=template srcpkgs \
| cut -d/ -f2 | xargs -rn1 printf "%s: $lic is no longer custom\n"
done

66
common/scripts/lint-commits Executable file
View File

@@ -0,0 +1,66 @@
#!/bin/sh
die() {
printf '%s\n' "$*" >&2
exit 1
}
command -v git >/dev/null 2>&1 ||
die "neither chroot-git nor git could be found!"
rev_parse() {
if [ -n "$1" ]; then
git rev-parse --verify "$1"
else
shift
while test "$#" != 0
do
git rev-parse --verify "$1" 2>/dev/null && return
shift
done
return 1
fi
}
base=$(rev_parse "$1" FETCH_HEAD ORIG_HEAD) || die "base commit not found"
tip=$(rev_parse "$2" HEAD) || die "tip commit not found"
status=0
for cmt in $(git rev-list --abbrev-commit $base..$tip)
do
git cat-file commit "$cmt" |
awk -vC="$cmt" '
# skip header
/^$/ && !msg { msg = 1; next }
/^author .*noreply/ && !msg {
print "::error title=Commit Lint::" C ": authored by noreply email";
ret=1;
}
/^author .*(localhost|localdomain|[(]none[)])/ && !msg {
print "::error title=Commit Lint::" C ": authored by localhost email";
ret=1;
}
!msg { next }
# 3: long-line-is-banned-except-footnote-like-this-for-url
(NF > 2) && (length > 80) {
print "::error title=Commit Lint::" C ": long line: " $0
ret = 1
}
!subject {
if (length > 50) { print "::warning title=Commit Lint::" C ": subject is a bit long" }
if (!($0 ~ ":" || $0 ~ "^Take over maintainership " || $0 ~ "^Orphan ")) { print "::error title=Commit Lint::" C ": subject does not follow CONTRIBUTING.md guildelines"; exit 1 }
# Below check is too noisy?
# if (!($0 ~ "^New package:" || $0 ~ ".*: update to")) {
# print "::warning title=Commit Lint::" C ": not new package/update/removal?"
# }
subject = 1; next
}
/^$/ { body = 1; next }
!body {
print "::error title=Commit Lint::" C ": second line must be blank"
ret = 1
}
END { exit ret }
' || status=1
done
exit $status

200
common/scripts/lint-conflicts Executable file
View File

@@ -0,0 +1,200 @@
#!/usr/bin/env bash
# Report packages installing same file and not marked with
# conflicts or replaces.
# Without argument, find conflicts between packages in local
# repository at hostdir/binpkgs and packages indexed in xlocate.
# With single path as argument, read that local repository.
# With -a flag, find conflicts between packages indexed in xlocate.
set -e
if [ "$#" = 0 ]; then
binpkgs="$PWD/hostdir/binpkgs"
elif [ "$1" = -a ]; then
all=1
elif [ -d "$1" ]; then
binpkgs="$1"
else
echo "Usage:"
echo "$0"
echo " check packages in ./hostdir/binpkgs"
echo "$0 path/to/hostdir/binpkgs"
echo " check packages there"
echo "$0 -a"
echo " check all packages indexed in xlocate"
exit 1
fi
declare -A newly_built conflicts_cache providers_cache pairs owners
repositories=("--repository=${binpkgs}/bootstrap" "--repository=${binpkgs}" "--repository=${binpkgs}/nonfree")
rv=0
template_exists() {
[ -f "srcpkgs/$1/template" ]
}
partial_check() {
[ -z "$all" ]
}
providers_of() {
# print the pkgname and packages that `provides` it
local pkgname=$1
if [ "${providers_cache[$pkgname]}" = '' ]; then
local line provider_pkgver provided_pkgver provider_pkgname provided_pkgname
local -A providers
providers[$pkgname]=$pkgname
while read -r line; do
line=${line%%'('*}
provider_pkgver=${line%': '*}
provided_pkgver=${line#*': '}
provider_pkgname=${provider_pkgver%-*}
provided_pkgname=${provided_pkgver%-*}
# comes from $(xbps-query -s $pkgname), so $pkgname can be substring
if [ "$provided_pkgname" = "$pkgname" ]; then
providers[$provider_pkgname]=$provider_pkgname
fi
done < <(xbps-query "${repositories[@]}" -p provides -R -s "$pkgname")
# leading space ensures ${[]} != ''
providers_cache[$pkgname]=" ${providers[*]}"
fi
echo ${providers_cache[$pkgname]}
}
conflicts_of() {
# print list of packages that are _marked_ as conflicting with given one
local pkgname=$1
if [ "${conflicts_cache[$pkgname]}" = '' ]; then
local in_conflict provider
local -A all
while read -r in_conflict; do
in_conflict=${in_conflict%'<'*}
in_conflict=${in_conflict%'>'*}
providers_of "$in_conflict" > /dev/null # executing in same process to fill cache
for provider in $(providers_of "$in_conflict"); do
all[$provider]=$provider
done
done < <(xbps-query "${repositories[@]}" -p conflicts,replaces -R "$pkgname")
# leading space ensures ${[]} != ''
conflicts_cache[$pkgname]=" ${all[*]}"
fi
echo ${conflicts_cache[$pkgname]}
}
conflict_between() {
# exit successfully if packages are _marked_ as conflicting
conflicts_of "$1" > /dev/null # executing in same process to fill cache
case " $(conflicts_of "$1") " in
*" $2 "*) return 0
esac
conflicts_of "$2" > /dev/null # executing in same process to fill cache
case " $(conflicts_of "$2") " in
*" $1 "*) return 0
esac
return 1
}
list_newly_built_files() {
# print one line per file in newly built packages
# each line contains pkgname and file path
local pkgver pkgname
while read -r pkgver; do
pkgname=${pkgver%-*}
xbps-query "${repositories[@]}" -i -f "$pkgname" | sed s'/ -> .*//;'" s/^/$pkgname /"
done < <(xbps-query "${repositories[@]}" -i -R -s '' | cut -d' ' -f 2)
}
list_interesting_files() {
# list files potentially contained in more than one package
# each line contains pkgver/pkgname and file path
if partial_check; then
list_newly_built_files
else
xlocate / | sed s'/ -> .*//' | grep -F -f <(xlocate / | sed 's/[^[:space:]]*[[:space:]]*//' | sed s'/ -> .*//' | sort | uniq -d)
fi
}
group_by_file_full() {
# create associative array `owners` mapping file to list of packages
# for packages potentially conflicting with newly built ones
local pkgver file pkgname
while read -r pkgver file; do
pkgname=${pkgver%-*}
if template_exists "$pkgname"; then
owners[$file]+=" $pkgname"
fi
done < <(list_interesting_files)
}
group_by_file_partial() {
# create associative array `owners` mapping file to list of packages
# for all packages in xlocate
local pkgname file
## newly built packages
while read -r pkgver; do
pkgname=${pkgver%-*}
newly_built[$pkgname]=$pkgname
done < <(xbps-query "${repositories[@]}" -i -R -s '' | cut -d' ' -f 2)
while read -r pkgname file; do
owners[$file]+=" $pkgname"
done < <(list_newly_built_files)
## rest of repository
while read -r pkgver file; do
pkgname=${pkgver%-*}
if [ -z "${newly_built[$pkgname]}" ] && template_exists "$pkgname"; then
owners[$file]+=" $pkgname"
fi
done < <(xlocate / | sed s'/ -> .*//' | grep -F -f <(list_newly_built_files | cut -d ' ' -f 2-))
}
group_by_pair() {
# find package pairs owning same file and not marked as conflicting
local pkg file a b
while read -r pkg file; do
for a in ${owners[$file]}; do
for b in ${owners[$file]}; do
if ! [ "$a" "<" "$b" ]; then
continue
fi
if partial_check && [ -z "${newly_built[$a]}" ] && [ -z "${newly_built[$b]}" ]; then
continue
fi
if ! conflict_between "$a" "$b"; then
unset pair_files
local -A pair_files
eval "${pairs["$a $b"]}"
pair_files[$file]="$file"
pairs["$a $b"]="${pair_files[@]@A}"
fi
done
done
done < <(list_interesting_files)
}
print_out() {
local pair file
if [ "${#pairs[@]}" = 0 ]; then
echo 1>&2 "No conflicts found in" "${repositories[@]#*=}"
exit 0
fi
while read -r pair; do
rv=1
echo "${pair% *} and ${pair#* } conflict for"
unset pair_files
eval "${pairs[$pair]}"
for file in "${pair_files[@]}"; do
echo " $file"
done | sort
done < <(printf '%s\n' "${!pairs[@]}" | sort)
}
if partial_check; then
group_by_file_partial
else
group_by_file_full
fi
group_by_pair
print_out
exit $rv

View File

@@ -0,0 +1,113 @@
#!/bin/sh
# when $3 is empty, template is read from working directory
template="$1" base_rev="$2" tip_rev="$3"
status=0
die() {
printf '%s\n' "$*" >&2
exit 1
}
if ! [ "$base_rev" ]; then
die "usage: $0 TEMPLATE BASE-REVISION [TIP-REVISION]"
fi
if ! command -v git >/dev/null 2>&1; then
die "neither chroot-git nor git could be found"
fi
scan() {
rx="$1" msg="$2"
template_path=$template
maybe_git=
if [ "$tip_rev" ]; then
template_path="${tip_rev}:${template}"
maybe_git="git"
revspec="[^:]*:"
fi
$maybe_git grep -P -Hn -e "$rx" "$template_path" |
grep -v -P -e "[^:]*:[^:]*:\s*#" |
sed "s/^${revspec}\([^:]*:[^:]*:\)\(.*\)/\1 $msg/"
}
show_template() {
rev="$1"
if [ "$rev" ]; then
git cat-file blob "${rev}:${template}" 2>/dev/null
else
cat "${template}" 2>/dev/null
fi
}
show_template_var() {
rev="$1" var="$2"
(
show_template "$rev"
printf '%s\n' "printf '%s\\n' \"\$${var}\""
) | bash 2>/dev/null
}
revision_reset() {
[ "${revision}" = 1 ] && return
if [ "$prev_version" ]; then
case "$prev_reverts" in
*"$version"_*) return
esac
action=update
else
action=creation
fi
scan "^revision=" "revision should be set to 1 on ${action}"
status=1
}
reverts_on_downgrade() {
[ "$version" ] || return
case "$reverts" in
*"$prev_version"_*) ;;
*)
scan "^version=" "missing reverts= for downgrade $prev_version to $version"
status=1
esac
}
check_revert() {
for vr in $reverts; do
xbps-uhelper cmpver "${version}" "${vr%_*}"
case "$?" in
0 | 1)
scan '^version=' "remove $vr from \$reverts"
status=1
;;
esac
done
for vr in $prev_reverts; do
if ! xbps-uhelper cmpver "$version" "${vr%_*}"; then
continue
fi
if [ $revision -gt "${vr##*_}" ]; then
continue
fi
scan '^revision=' "undo a revert with same revision as before"
status=1
done
}
version_change() {
version="$(show_template_var "$tip_rev" version)"
revision="$(show_template_var "$tip_rev" revision)"
reverts="$(show_template_var "$tip_rev" reverts)"
prev_version="$(show_template_var "$base_rev" version)"
prev_reverts="$(show_template_var "$base_rev" reverts)"
xbps-uhelper cmpver "$version" "$prev_version"
case "$?" in
1) revision_reset;;
-1|255) reverts_on_downgrade;;
esac
check_revert
}
version_change
exit $status

View File

@@ -0,0 +1,11 @@
# Converts xlint/etc format lints into GH Actions annotations
# The original line is printed alongside the annotation command
{
split($0, a, ": ")
split(a[1], b, ":")
msg = substr($0, index($0, ": ") + 2)
if (b[2]) {
line = ",line=" b[2]
}
printf "::error title=Template Lint,file=%s%s::%s\n", b[1], line, msg
}

View File

@@ -0,0 +1,214 @@
#!/usr/bin/python3
# vim: set ts=4 sw=4 et:
"""
Usage:
./parse-py-metadata.py -S "$DESTDIR/$py3_sitelib" provides -v "$version"
extract the names of top-level packages from:
- $DESTDIR/$py3_sitelib/*.dist-info/METADATA
- $DESTDIR/$py3_sitelib/*.egg-info/PKG-INFO
./parse-py-metadata.py -S "$DESTDIR/$py3_sitelib" [-s] [-C] depends -e "extra1 extra2 ..."
-D "$XBPS_STATEDIR/$pkgname-rdeps" -V <( xbps-query -R -p provides -s "py3:" )
check that the dependencies of a package match what's listed in the python
package metadata, using the virtual package provides entries generated by
`parse-py-metadata.py provides`.
This script requires python3-packaging-bootstrap to be installed in the chroot
to run (which should be taken care of by the python3-module and python3-pep517
build styles).
"""
import argparse
from pathlib import Path
from sys import stderr
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from packaging.metadata import Metadata
from packaging.requirements import Requirement
from packaging.utils import canonicalize_name
# packages to always ignore
global_ignore = ["tzdata"]
def msg_err(msg: str, *, nocolor: bool = False, strict: bool = False):
if nocolor:
print(msg, flush=True)
else:
color = "31" if strict else "33"
print(f"\033[1m\033[{color}m{msg}\033[m", file=stderr, flush=True)
def vpkgname(val: "str | Requirement", *, version: str | None = None) -> str:
sfx = ""
if version is not None:
sfx = f"-{version}"
if isinstance(val, Requirement):
name = val.name
else:
name = val
return f"py3:{canonicalize_name(name)}{sfx}"
def getpkgname(pkgver: str) -> str:
return pkgver.rpartition("-")[0]
def getpkgversion(pkgver: str) -> str:
return pkgver.rpartition("-")[2]
def getpkgdepname(pkgdep: str) -> str:
if "<" in pkgdep:
return pkgdep.partition("<")[0]
elif ">" in pkgdep:
return pkgdep.partition(">")[0]
else:
return pkgdep.rpartition("-")[0]
def match_markers(req: "Requirement", extras: set[str]) -> bool:
# unconditional requirement
if req.marker is None:
return True
# check the requirement for each extra we want and without any extras
if extras:
return req.marker.evaluate() or any(req.marker.evaluate({"extra": e}) for e in extras)
return req.marker.evaluate()
def find_metadata_files(sitepkgs: Path) -> list[Path]:
metafiles = list(sitepkgs.glob("*.dist-info/METADATA"))
metafiles.extend(sitepkgs.glob("*.egg-info/PKG-INFO"))
return metafiles
def parse_provides(args):
out = set()
for metafile in find_metadata_files(args.sitepkgs):
with metafile.open() as f:
raw = f.read()
meta = Metadata.from_email(raw, validate=False)
out.add(vpkgname(meta.name, version=getpkgversion(args.pkgver)))
if meta.provides_dist is not None:
out.update(map(lambda n: vpkgname(n, version=getpkgversion(args.pkgver)), meta.provides_dist))
# deprecated but may be used
if meta.provides is not None:
out.update(map(lambda n: vpkgname(n, version=getpkgversion(args.pkgver)), meta.provides))
print("\n".join(out), flush=True)
def parse_depends(args):
depends = dict()
vpkgs = dict()
extras = set(args.extras.split())
with args.vpkgs.open() as f:
for ln in f.readlines():
if not ln.strip():
continue
pkgver, _, rest = ln.partition(":")
vpkgvers, _, _ = rest.strip().partition("(")
pkg = getpkgname(pkgver)
vpkg = map(getpkgname, vpkgvers.split())
for v in vpkg:
if pkg not in vpkgs.setdefault(v, []):
vpkgs[v].append(pkg)
if args.rdeps.exists():
with args.rdeps.open() as f:
rdeps = list(map(getpkgdepname, f.read().split()))
else:
rdeps = []
for metafile in find_metadata_files(args.sitepkgs):
with metafile.open() as f:
raw = f.read()
meta = Metadata.from_email(raw, validate=False)
if meta.requires_dist is not None:
depends.update(map(lambda p: (vpkgname(p), None),
filter(lambda r: match_markers(r, extras), meta.requires_dist)))
# deprecated but may be used
if meta.requires is not None:
depends.update(map(lambda p: (vpkgname(p), None), meta.requires))
err = False
unknown = False
missing = []
for k in depends.keys():
if k in vpkgs.keys():
for pkgname in vpkgs[k]:
if pkgname in rdeps:
print(f" PYTHON: {k} <-> {pkgname}", flush=True)
break
elif pkgname in global_ignore:
print(f" PYTHON: {k} <-> {pkgname} (ignored)", flush=True)
break
else:
pkgname = " OR ".join(vpkgs[k])
if len(vpkgs[k]) > 1:
pkgname = "(" + pkgname + ")"
msg_err(f" PYTHON: {k} <-> {pkgname} NOT IN depends PLEASE FIX!",
nocolor=args.nocolor, strict=args.strict)
missing.append(pkgname)
err = True
else:
msg_err(f" PYTHON: {k} <-> UNKNOWN PKG PLEASE FIX!",
nocolor=args.nocolor, strict=args.strict)
unknown = True
err = True
if missing or unknown:
msg_err(f"=> {args.pkgver}: missing dependencies detected!",
nocolor=args.nocolor, strict=args.strict)
if missing:
msg_err(f"=> {args.pkgver}: please add these packages to depends: {' '.join(sorted(missing))}",
nocolor=args.nocolor, strict=args.strict)
if err and args.strict:
exit(1)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("-S", dest="sitepkgs", type=Path)
parser.add_argument("-v", dest="pkgver")
parser.add_argument("-s", dest="strict", action="store_true")
parser.add_argument("-C", dest="nocolor", action="store_true")
subparsers = parser.add_subparsers()
prov_parser = subparsers.add_parser("provides")
prov_parser.set_defaults(func=parse_provides)
deps_parser = subparsers.add_parser("depends")
deps_parser.add_argument("-e", dest="extras", default="")
deps_parser.add_argument("-V", dest="vpkgs", type=Path)
deps_parser.add_argument("-D", dest="rdeps", type=Path)
deps_parser.set_defaults(func=parse_depends)
args = parser.parse_args()
try:
from packaging.metadata import Metadata
from packaging.requirements import Requirement
from packaging.utils import canonicalize_name
except ImportError:
msg_err(f"=> WARNING: {args.pkgver}: missing packaging module!\n"
f"=> WARNING: {args.pkgver}: please add python3-packaging-bootstrap to hostmakedepends to run this check",
nocolor=args.nocolor)
exit(0)
args.func(args)

126
common/scripts/xbps-cycles.py Executable file
View File

@@ -0,0 +1,126 @@
#!/usr/bin/env python3
import os
import sys
import glob
import subprocess
import multiprocessing
from argparse import ArgumentParser
import networkx as nx
def enum_depends(pkg, xbpsdir, cachedir):
'''
Return a pair (pkg, [dependencies]), where [dependencies] is the list
of dependencies for the given package pkg. The argument xbpsdir should
be a path to a void-packages repository. Dependencies will be
determined by invoking
<xbpsdir>/xbps-src show-build-deps <pkg>
unless <cachedir>/deps-<pkg> file exist, in that case it is read.
If the return code of this call nonzero, a message will be printed but
the package will treated as if it has no dependencies.
'''
if cachedir:
cachepath = os.path.join(cachedir, 'deps-' + pkg)
try:
with open(cachepath) as f:
return pkg, [l.strip() for l in f]
except FileNotFoundError:
pass
cmd = [os.path.join(xbpsdir, 'xbps-src'), 'show-build-deps', pkg]
try:
deps = subprocess.check_output(cmd)
except subprocess.CalledProcessError as err:
print('xbps-src failed to find dependencies for package', pkg)
deps = [ ]
else:
deps = [d for d in deps.decode('utf-8').split('\n') if d]
if cachedir:
with open(cachepath, 'w') as f:
for d in deps:
print(d, file=f)
return pkg, deps
def find_cycles(depmap, xbpsdir):
'''
For a map depmap: package -> [dependencies], construct a directed graph
and identify any cycles therein.
The argument xbpsdir should be a path to the root of a void-packages
repository. All package names in depmap will be appended to the path
<xbpsdir>/srcpkgs and reduced with os.path.realpath to coalesce
subpackages.
'''
G = nx.DiGraph()
for i, deps in depmap.items():
path = os.path.join(xbpsdir, 'srcpkgs', i)
i = os.path.basename(os.path.realpath(path))
for j in deps:
path = os.path.join(xbpsdir, 'srcpkgs', j.strip())
j = os.path.basename(os.path.realpath(path))
G.add_edge(i, j)
for c in nx.strongly_connected_components(G):
if len(c) < 2: continue
pkgs = nx.to_dict_of_lists(G, c)
p = min(pkgs.keys())
cycles = [ ]
while True:
cycles.append(p)
# Cycle is complete when package is not in map
try: deps = pkgs.pop(p)
except KeyError: break
# Any of the dependencies here contributes to a cycle
p = min(deps)
if len(deps) > 1:
print('Multipath: {} -> {}, choosing first'.format(p, deps))
if cycles:
print('Cycle: ' + ' -> '.join(cycles) + '\n')
if __name__ == '__main__':
parser = ArgumentParser(description='Cycle detector for xbps-src')
parser.add_argument('-j', '--jobs', default=None,
type=int, help='Number of parallel jobs')
parser.add_argument('-c', '--cachedir', default=None,
help='Directory used to cache build dependencies (must exist)')
parser.add_argument('-d', '--directory',
default=None, help='Path to void-packages repo')
parser.add_argument('-Q', dest='check_pkgs', action='store_const',
const='yes', help='Use build dependencies for check -Q')
parser.add_argument('-K', dest='check_pkgs', action='store_const',
const='full', help='Use build dependencies for check -K')
args = parser.parse_args()
if not args.directory:
try: args.directory = os.environ['XBPS_DISTDIR']
except KeyError: args.directory = '.'
if args.check_pkgs:
os.environ['XBPS_CHECK_PKGS'] = args.check_pkgs
pool = multiprocessing.Pool(processes = args.jobs)
pattern = os.path.join(args.directory, 'srcpkgs', '*')
pkgs = {os.path.realpath(p) for p in glob.iglob(pattern)}
depargs = ((os.path.basename(g), args.directory, args.cachedir) for g in pkgs)
depmap = dict(pool.starmap(enum_depends, depargs))
find_cycles(depmap, args.directory)