mirror of
https://gitlab.alpinelinux.org/alpine/abuild.git
synced 2024-12-27 17:32:11 +00:00
c54d39d8aa
With a recent change in apk [1], virtual packages of the same name will upgrade each other. Adjust abuild to this by not using the same virtual package name for two types of dependencies. This fixes the way crosscompilers are built in postmarketOS [2], which is essentially the same as running this on Alpine's gcc aport: $ cd aports/main/gcc $ C_TARGET_ARCH=armhf CTARGET=armv6-alpine-linux-musleabihf \ BOOTSTRAP=nobuildbase CBUILDROOT=/ abuild -r ... >>> gcc-armhf: Installing for host: (1/24) Upgrading .makedepends-gcc-armhf (20190714.104731 -> 20190714.104741) (2/24) Purging binutils-armhf (2.31.1-r2) ... [1] apk-tools.git 37fbafcd928c466c82c892a7868d686d710e5d07 ("add: make virtual packages upgradeable (ref #9957)") [2] https://gitlab.com/postmarketOS/pmaports/blob/master/cross/gcc-armhf/APKBUILD Fixes: https://gitlab.alpinelinux.org/alpine/apk-tools/issues/10649
2687 lines
66 KiB
Bash
2687 lines
66 KiB
Bash
#!/bin/ash -e
|
|
|
|
# abuild - build apk packages (light version of makepkg)
|
|
# Copyright (c) 2008-2015 Natanael Copa <ncopa@alpinelinux.org>
|
|
# Copyright (c) 2016 Timo Teräs <timo.teras@iki.fi>
|
|
#
|
|
# Distributed under GPL-2
|
|
#
|
|
|
|
program_version=@VERSION@
|
|
sysconfdir=@sysconfdir@
|
|
datadir=@datadir@
|
|
|
|
abuild_path=$(readlink -f $0)
|
|
git=$(command -v git) || git=true
|
|
|
|
if ! [ -f "$datadir/functions.sh" ]; then
|
|
echo "$datadir/functions.sh: not found" >&2
|
|
exit 1
|
|
fi
|
|
. "$datadir/functions.sh"
|
|
|
|
# defaults
|
|
: ${FAKEROOT:="fakeroot"}
|
|
: ${SUDO_APK:="abuild-apk"}
|
|
: ${APK:="apk"}
|
|
: ${ADDUSER:="abuild-adduser"}
|
|
: ${ADDGROUP:="abuild-addgroup"}
|
|
|
|
apk_opt_wait="--wait 30"
|
|
|
|
if [ -z "$SOURCE_DATE_EPOCH" ]; then
|
|
SOURCE_DATE_EPOCH=$(date -u "+%s")
|
|
fi
|
|
export SOURCE_DATE_EPOCH
|
|
|
|
umask 022
|
|
|
|
# run optional log command for remote logging
|
|
logcmd() {
|
|
${ABUILD_LOG_CMD:-true} "$@"
|
|
return 0
|
|
}
|
|
|
|
# we override the default msg, warning and error as we want the pkgname
|
|
msg() {
|
|
[ -n "$quiet" ] && return 0
|
|
local prompt="$GREEN>>>${NORMAL}"
|
|
local fake="${FAKEROOTKEY:+${BLUE}*${NORMAL}}"
|
|
local name="${STRONG}${subpkgname:-$pkgname}${NORMAL}"
|
|
printf "${prompt} ${name}${fake}: %s\n" "$1" >&2
|
|
}
|
|
|
|
warning() {
|
|
local prompt="${YELLOW}>>> WARNING:${NORMAL}"
|
|
local fake="${FAKEROOTKEY:+${BLUE}*${NORMAL}}"
|
|
local name="${STRONG}${subpkgname:-$pkgname}${NORMAL}"
|
|
printf "${prompt} ${name}${fake}: %s\n" "$1" >&2
|
|
}
|
|
|
|
error() {
|
|
local prompt="${RED}>>> ERROR:${NORMAL}"
|
|
local fake="${FAKEROOTKEY:+${BLUE}*${NORMAL}}"
|
|
local name="${STRONG}${subpkgname:-$pkgname}${NORMAL}"
|
|
printf "${prompt} ${name}${fake}: %s\n" "$1" >&2
|
|
logcmd "ERROR: $pkgname: $1"
|
|
}
|
|
|
|
cross_creating() {
|
|
test "$CHOST" != "$CTARGET" -a -n "$CBUILDROOT"
|
|
}
|
|
|
|
cross_compiling() {
|
|
test "$CBUILD" != "$CHOST" -a -n "$CBUILDROOT"
|
|
}
|
|
|
|
want_check() {
|
|
[ -n "$ABUILD_BOOTSTRAP" ] && return 1
|
|
cross_compiling && return 1
|
|
options_has "!check" && return 1
|
|
return 0
|
|
}
|
|
|
|
default_cleanup_srcdir() {
|
|
if options_has "chmod-clean" && test -d "$srcdir"; then
|
|
chmod -R +w "$srcdir"
|
|
fi
|
|
rm -rf "$srcdir"
|
|
}
|
|
|
|
cleanup_srcdir() {
|
|
default_cleanup_srcdir
|
|
}
|
|
|
|
cleanup() {
|
|
local i=
|
|
[ -z "$subpkgdir" ] && set_xterm_title ""
|
|
if [ -n "$keep_build" ]; then
|
|
return 0
|
|
fi
|
|
for i; do
|
|
case $i in
|
|
bldroot)
|
|
if [ "$BUILD_ROOT" ]; then
|
|
msg "Cleaning up build chroot"
|
|
abuild-rmtemp "$BUILD_ROOT"
|
|
fi;;
|
|
pkgdir) msg "Cleaning up pkgdir"; rm -rf "$pkgbasedir";;
|
|
srcdir) msg "Cleaning up srcdir"; cleanup_srcdir;;
|
|
deps)
|
|
if [ -n "$uninstall_after" ]; then
|
|
msg "Uninstalling dependencies..."
|
|
undeps
|
|
fi
|
|
;;
|
|
esac
|
|
done
|
|
}
|
|
|
|
die() {
|
|
trap - EXIT
|
|
error "$@"
|
|
cleanup $ERROR_CLEANUP
|
|
exit 1
|
|
}
|
|
|
|
spell_error() {
|
|
die "APKBUILD contains '$1'. It should be '$2'"
|
|
}
|
|
|
|
print_version() {
|
|
msg "$program $program_version"
|
|
}
|
|
|
|
# check if apkbuild is basicly sane
|
|
default_sanitycheck() {
|
|
local i= j= suggestion=
|
|
msg "Checking sanity of $APKBUILD..."
|
|
[ -z "$pkgname" ] && die "Missing pkgname in APKBUILD"
|
|
[ -z "${pkgname##* *}" ] && die "pkgname contains spaces"
|
|
[ -z "$pkgver" ] && die "Missing pkgver in APKBUILD"
|
|
if [ "$pkgver" != "volatile" ] && [ -z "$nodeps" ]; then
|
|
$APK version --check --quiet "$pkgver" ||\
|
|
die "$pkgver is not a valid version"
|
|
fi
|
|
[ -z "$pkgrel" ] && die "Missing pkgrel in APKBUILD"
|
|
[ -z "$pkgdesc" ] && die "Missing pkgdesc in APKBUILD"
|
|
[ -z "$url" ] && die "Missing url in APKBUILD"
|
|
[ -z "$license" ] && die "Missing license in APKBUILD"
|
|
if [ $(echo "$pkgdesc" | wc -c) -gt 128 ]; then
|
|
die "pkgdesc is too long"
|
|
fi
|
|
is_function package || die "Missing package() function in APKBUILD"
|
|
|
|
if [ -n "$replaces_priority" ] \
|
|
&& ! echo $replaces_priority | egrep -q '^[0-9]+$'; then
|
|
die "replaces_priority must be a number"
|
|
fi
|
|
|
|
if [ -n "$provider_priority" ] \
|
|
&& ! echo $provider_priority | egrep -q '^[0-9]+$'; then
|
|
die "provider_priority must be a number"
|
|
fi
|
|
|
|
# check so no package names starts with -
|
|
for i in $pkgname $subpackages; do
|
|
case $i in
|
|
-*) die "${i%%:*} is not a valid package name";;
|
|
esac
|
|
done
|
|
|
|
for i in $install; do
|
|
local n=${i%.*}
|
|
local suff=${i##*.}
|
|
case "$suff" in
|
|
pre-install|post-install|pre-upgrade|post-upgrade|pre-deinstall|post-deinstall);;
|
|
*) die "$i: unknown install script suffix"
|
|
esac
|
|
if ! subpackages_has "$n" && [ "$n" != "$pkgname" ]; then
|
|
die "$i: install script does not match pkgname or any subpackage"
|
|
fi
|
|
[ -e "$startdir/$i" ] || die "install script $i is missing"
|
|
for j in chown chmod chgrp; do
|
|
if grep -q $j "$startdir"/$i; then
|
|
warning "$i: found $j"
|
|
warning2 "Permissions should be fixed in APKBUILD package()"
|
|
fi
|
|
done
|
|
done
|
|
|
|
for i in $triggers; do
|
|
local f=${i%=*}
|
|
local p=${f%.trigger}
|
|
[ "$f" = "$i" ] && die "$f: triggers must contain '='"
|
|
[ "$p" = "$f" ] && die "$f: triggers scripts must have .trigger suffix"
|
|
if ! subpackages_has "$p" && [ "$p" != "$pkgname" ]; then
|
|
die "$p: trigger script does not match pkgname or any subpackage"
|
|
fi
|
|
|
|
if source_has "$f"; then
|
|
warning "You should not have \$triggers in source"
|
|
continue
|
|
fi
|
|
|
|
[ -e "$startdir"/$f ] || die "trigger script $f is missing"
|
|
done
|
|
if [ -n "$source" ]; then
|
|
for i in $source; do
|
|
if install_has "$i"; then
|
|
warning "You should not have \$install in source"
|
|
continue
|
|
fi
|
|
case "$i" in
|
|
*::*) i=${i%%::*};;
|
|
https://*) makedepends_has wget && warning "wget no longer need to be in makedepends when source has https://" ;;
|
|
esac
|
|
list_has ${i##*/} $md5sums $sha256sums $sha512sums \
|
|
|| die "${i##*/} is missing in checksums"
|
|
|
|
# verify that our source does not have git tag version
|
|
# name as tarball (typicallly github)
|
|
if is_remote "$i" && [ "${i#*::}" = "$i" ]; then
|
|
case ${i##*/} in
|
|
v$pkgver.tar.*|$pkgver.tar.*)
|
|
die "source ${i##*/} needs to be renamed to avoid possible collisions"
|
|
;;
|
|
esac
|
|
fi
|
|
done
|
|
fi
|
|
|
|
# verify that things listed in checksum also is listed in source
|
|
local algo=
|
|
for algo in md5 sha256 sha512; do
|
|
eval set -- \$${algo}sums
|
|
while [ $# -gt 1 ]; do
|
|
local file="$2"
|
|
shift 2
|
|
source_has $file || die "$file exists in ${algo}sums but is missing in \$source"
|
|
done
|
|
done
|
|
|
|
# common spelling errors
|
|
[ -n "$depend" ] && spell_error depend depends
|
|
[ -n "$makedepend" ] && spell_error makedepend makedepends
|
|
[ -n "$pkguser" ] && spell_error pkguser pkgusers
|
|
[ -n "$pkggroup" ] && spell_error pkggroup pkggroups
|
|
[ -n "$subpackage" ] && spell_error subpackage subpackages
|
|
[ -n "$checkdepend" ] && spell_error checkdepend checkdepends
|
|
|
|
check_maintainer || die "Provide a valid RFC822 maintainer address"
|
|
check_license || warning "Please use valid SPDX license identifiers found at: https://spdx.org/licenses"
|
|
|
|
check_depends_dev || warning "depends_dev found but no development subpackage found"
|
|
check_secfixes_comment || return 1
|
|
|
|
makedepends_has 'g++' && ! options_has toolchain && warning "g++ should not be in makedepends"
|
|
|
|
if ! options_has "!check" && [ -n "$REQUIRE_CHECK" ]; then
|
|
(unset check; . "$APKBUILD"; type check >/dev/null 2>&1) || \
|
|
die "Testsuites (abuild check) are required or need to be explicitly disabled!"
|
|
fi
|
|
|
|
check_provides || die "provides must not contain $pkgname"
|
|
|
|
return 0
|
|
}
|
|
|
|
sanitycheck() {
|
|
default_sanitycheck
|
|
}
|
|
|
|
sumcheck() {
|
|
local algo="$1" sums="$2"
|
|
local dummy f endreturnval originalparams origin file
|
|
|
|
# get number of checksums
|
|
set -- $sums
|
|
local numsums=$(( $# / 2 ))
|
|
|
|
set -- $source
|
|
if [ $# -ne $numsums ]; then
|
|
die "Number of ${algo}sums($numsums) does not correspond to number of sources($#)"
|
|
fi
|
|
fetch || return 1
|
|
msg "Checking ${algo}sums..."
|
|
cd "$srcdir" || return 1
|
|
IFS=$'\n'
|
|
endreturnval=0
|
|
for src in $sums; do
|
|
origin=$1; shift
|
|
if ! echo "$src" | ${algo}sum -c; then
|
|
endreturnval=1
|
|
is_remote $origin || continue
|
|
|
|
local csum="${src:0:8}"
|
|
local file="$SRCDEST/$(filename_from_uri $origin)"
|
|
|
|
echo "Because the remote file above failed the ${algo}sum check it will be renamed."
|
|
echo "Rebuilding will cause it to re-download which in some cases may fix the problem."
|
|
echo "Renaming: ${file##*/} to ${file##*/}.$csum"
|
|
mv "$file" "$file.$csum"
|
|
fi
|
|
done
|
|
unset IFS
|
|
return $endreturnval
|
|
}
|
|
|
|
# for compatibility
|
|
md5check() {
|
|
warning "'md5check' is deprecated. Use 'verify' instead"
|
|
sumcheck md5 "$md5sums"
|
|
}
|
|
|
|
# verify checksums
|
|
verify() {
|
|
local verified=false algo=
|
|
for algo in sha512 sha256 sha1 md5; do
|
|
local sums=
|
|
eval sums=\"\$${algo}sums\"
|
|
if [ -z "$sums" ] || [ -z "$source" ]; then
|
|
continue
|
|
fi
|
|
sumcheck "$algo" "$sums" || return 1
|
|
verified=true
|
|
break
|
|
done
|
|
if [ -n "$source" ] && ! $verified; then
|
|
die "Use 'abuild checksum' to generate/update the checksum(s)"
|
|
fi
|
|
return 0
|
|
}
|
|
|
|
# verify upstream sources
|
|
sourcecheck() {
|
|
local uri
|
|
for uri in $source; do
|
|
is_remote $uri || continue
|
|
case "$uri" in
|
|
*::*)
|
|
uri=${uri##*::}
|
|
;;
|
|
esac
|
|
wget --spider -q "$uri" || return 1
|
|
done
|
|
return 0
|
|
}
|
|
|
|
uri_fetch() {
|
|
local uri="$1"
|
|
mkdir -p "$SRCDEST"
|
|
msg "Fetching $uri"
|
|
abuild-fetch -d "$SRCDEST" "$uri"
|
|
}
|
|
|
|
is_remote() {
|
|
case "${1#*::}" in
|
|
http://*|ftp://*|https://*)
|
|
return 0;;
|
|
esac
|
|
return 1
|
|
}
|
|
|
|
filename_from_uri() {
|
|
local uri="$1"
|
|
local filename="${uri##*/}" # $(basename $uri)
|
|
case "$uri" in
|
|
*::*) filename=${uri%%::*};;
|
|
esac
|
|
echo "$filename"
|
|
}
|
|
|
|
# try download from file from mirror first
|
|
uri_fetch_mirror() {
|
|
local uri="$1"
|
|
if [ -n "$DISTFILES_MIRROR" ]; then
|
|
if is_remote "$DISTFILES_MIRROR"; then
|
|
uri_fetch "$DISTFILES_MIRROR"/$(filename_from_uri $uri)\
|
|
&& return 0
|
|
else
|
|
cp "$DISTFILES_MIRROR"/$(filename_from_uri $uri) \
|
|
"$SRCDEST" && return 0
|
|
fi
|
|
fi
|
|
uri_fetch "$uri"
|
|
}
|
|
|
|
symlinksrc() {
|
|
local s
|
|
mkdir -p "$srcdir"
|
|
for s in $source; do
|
|
if is_remote "$s"; then
|
|
ln -sf "$SRCDEST/$(filename_from_uri $s)" "$srcdir"/
|
|
else
|
|
ln -sf "$startdir/$s" "$srcdir/"
|
|
fi
|
|
done
|
|
}
|
|
|
|
default_fetch() {
|
|
local s
|
|
mkdir -p "$srcdir"
|
|
for s in $source; do
|
|
if is_remote "$s"; then
|
|
uri_fetch_mirror "$s" || return 1
|
|
ln -sf "$SRCDEST/$(filename_from_uri $s)" "$srcdir"/
|
|
else
|
|
ln -sf "$startdir/$s" "$srcdir/"
|
|
fi
|
|
done
|
|
}
|
|
|
|
fetch() {
|
|
default_fetch
|
|
}
|
|
|
|
# verify that all init.d scripts are openrc runscripts
|
|
initdcheck() {
|
|
local i line
|
|
for i in $source; do
|
|
case $i in
|
|
*.initd) line=$(head -n 1 "$srcdir"/$i);;
|
|
*) continue ;;
|
|
esac
|
|
|
|
case "$line" in
|
|
*sbin/openrc-run)
|
|
;;
|
|
*sbin/runscript)
|
|
warning "$i is not an openrc #!/sbin/openrc-run"
|
|
;;
|
|
*) error "$i is not an openrc #!/sbin/openrc-run"
|
|
return 1
|
|
;;
|
|
esac
|
|
done
|
|
}
|
|
|
|
# unpack the sources
|
|
default_unpack() {
|
|
local u
|
|
verify || return 1
|
|
initdcheck || return 1
|
|
mkdir -p "$srcdir"
|
|
local gunzip=$(command -v pigz || echo gunzip)
|
|
[ $gunzip = "/usr/bin/pigz" ] && gunzip="$gunzip -d"
|
|
for u in $source; do
|
|
local s
|
|
if is_remote "$u"; then
|
|
s="$SRCDEST/$(filename_from_uri $u)"
|
|
else
|
|
s="$startdir/$u"
|
|
fi
|
|
case "$s" in
|
|
*.tar)
|
|
msg "Unpacking $s..."
|
|
tar -C "$srcdir" -xf "$s" || return 1;;
|
|
*.tar.gz|*.tgz)
|
|
msg "Unpacking $s..."
|
|
$gunzip -c "$s" | tar -C "$srcdir" -x || return 1;;
|
|
*.tar.bz2)
|
|
msg "Unpacking $s..."
|
|
tar -C "$srcdir" -jxf "$s" || return 1;;
|
|
*.tar.lz)
|
|
msg "Unpacking $s..."
|
|
tar -C "$srcdir" --lzip -xf "$s" || return 1;;
|
|
*.tar.lzma)
|
|
msg "Unpacking $s..."
|
|
unlzma -T 0 -c "$s" | tar -C "$srcdir" -x \
|
|
|| return 1;;
|
|
*.tar.xz)
|
|
msg "Unpacking $s..."
|
|
local threads_opt
|
|
if [ $(readlink -f $(command -v unxz)) != "/bin/busybox" ]; then
|
|
threads_opt="--threads=0"
|
|
fi
|
|
unxz $threads_opt -c "$s" | tar -C "$srcdir" -x || return 1;;
|
|
*.zip)
|
|
msg "Unpacking $s..."
|
|
unzip -n -q "$s" -d "$srcdir" || return 1;;
|
|
esac
|
|
done
|
|
}
|
|
|
|
unpack() {
|
|
default_unpack
|
|
}
|
|
|
|
# cleanup source and package dir
|
|
clean() {
|
|
cleanup srcdir
|
|
cleanup pkgdir
|
|
}
|
|
|
|
# cleanup fetched sources
|
|
cleancache() {
|
|
local s
|
|
for s in $source; do
|
|
if is_remote "$s"; then
|
|
s=$(filename_from_uri $s)
|
|
msg "Cleaning downloaded $s ..."
|
|
rm -f "$SRCDEST/$s"
|
|
fi
|
|
done
|
|
}
|
|
|
|
subpkg_unset() {
|
|
unset subpkgname subpkgsplit subpkgarch
|
|
}
|
|
|
|
subpkg_set() {
|
|
subpkgname=${1%%:*}
|
|
|
|
local _splitarch=${1#*:}
|
|
[ "$_splitarch" = "$1" ] && _splitarch=""
|
|
|
|
subpkgsplit=${_splitarch%%:*}
|
|
[ -z "$subpkgsplit" ] && subpkgsplit="${subpkgname##*-}"
|
|
|
|
subpkgarch=${_splitarch#*:}
|
|
if [ "$subpkgarch" = "$_splitarch" -o -z "$subpkgarch" ]; then
|
|
case "$subpkgname" in
|
|
*-doc | *-openrc | *-lang | *-lang-*) subpkgarch="noarch" ;;
|
|
*) subpkgarch="$pkgarch" ;;
|
|
esac
|
|
fi
|
|
}
|
|
|
|
cleanpkg() {
|
|
local i
|
|
getpkgver || return 1
|
|
msg "Cleaning built packages..."
|
|
rm -f "$REPODEST/$repo/src/$pkgname-$pkgver-r$pkgrel.src.tar.gz"
|
|
for i in $allpackages; do
|
|
subpkg_set "$i"
|
|
rm -f "$REPODEST/$repo/${subpkgarch/noarch/$CARCH}/$subpkgname-$pkgver-r$pkgrel.apk"
|
|
done
|
|
subpkg_unset
|
|
|
|
# remove given packages from index
|
|
update_abuildrepo_index
|
|
}
|
|
|
|
# clean all packages except current
|
|
cleanoldpkg() {
|
|
local i j
|
|
getpkgver || return 1
|
|
msg "Cleaning all packages except $pkgver-r$pkgrel..."
|
|
for i in $allpackages; do
|
|
subpkg_set "$i"
|
|
for j in "$REPODEST"/$repo/*/$subpkgname-[0-9]*.apk ; do
|
|
[ "${j##*/}" = "$subpkgname-$pkgver-r$pkgrel.apk" ] \
|
|
&& continue
|
|
rm -f "$j"
|
|
done
|
|
done
|
|
subpkg_unset
|
|
update_abuildrepo_index
|
|
return 0
|
|
}
|
|
|
|
mkusers() {
|
|
local i
|
|
for i in $pkggroups; do
|
|
if ! getent group $i >/dev/null; then
|
|
msg "Creating group $i"
|
|
$ADDGROUP -S $i || return 1
|
|
fi
|
|
done
|
|
for i in $pkgusers; do
|
|
if ! getent passwd $i >/dev/null; then
|
|
local gopt=
|
|
msg "Creating user $i"
|
|
if getent group $i >/dev/null; then
|
|
gopt="-G $i"
|
|
fi
|
|
$ADDUSER -S -D -H $gopt $i || return 1
|
|
fi
|
|
done
|
|
}
|
|
|
|
# helper to update config.sub to a recent version
|
|
update_config_sub() {
|
|
find . -name config.sub | (local changed=false; while read f; do
|
|
if ! ./$f armv6-alpine-linux-muslgnueabihf 2>/dev/null; then
|
|
msg "Updating $f"
|
|
cp "$datadir"/${f##*/} "$f" || return 1
|
|
changed=true
|
|
else
|
|
msg "No update needed for $f"
|
|
fi
|
|
done; $changed)
|
|
}
|
|
|
|
# helper to update config.guess to a recent version
|
|
update_config_guess() {
|
|
find . -name config.guess | (local changed=false; while read f; do
|
|
if grep -q aarch64 "$f" && grep -q ppc64le "$f"; then
|
|
msg "No update needed for $f"
|
|
else
|
|
msg "Updating $f"
|
|
cp "$datadir"/${f##*/} "$f" || return 1
|
|
changed=true
|
|
fi
|
|
done; $changed)
|
|
}
|
|
|
|
runpart() {
|
|
local part=$1
|
|
[ -n "$DEBUG" ] && msg "$part"
|
|
trap "die '$part failed'" EXIT
|
|
if [ -d "$builddir" ]; then
|
|
case "$part" in
|
|
prepare|build|package|check)
|
|
cd "$builddir";;
|
|
esac
|
|
fi
|
|
$part
|
|
trap - EXIT
|
|
}
|
|
|
|
# override those in your build script
|
|
getpkgver() {
|
|
# this func is supposed to be overridden by volatile packages
|
|
if [ "$pkgver" = "volatile" ]; then
|
|
error "Please provide a getpkgver() function in your APKBUILD"
|
|
return 1
|
|
fi
|
|
}
|
|
|
|
have_patches() {
|
|
local i
|
|
for i in $source; do
|
|
case ${i%::*} in
|
|
*.patch) return 0;;
|
|
esac
|
|
done
|
|
return 1
|
|
}
|
|
|
|
default_prepare() {
|
|
local i
|
|
[ -n "$builddir" -a -d "$builddir" ] && cd "$builddir"
|
|
if ! have_patches; then
|
|
return 0
|
|
fi
|
|
[ -d "$builddir" ] || { error "Is \$builddir set correctly?"; return 1; }
|
|
for i in $source; do
|
|
case ${i%::*} in
|
|
*.patch)
|
|
msg "${i%::*}"
|
|
patch ${patch_args:--p1} -i "$srcdir/${i%::*}" || return 1
|
|
;;
|
|
esac
|
|
done
|
|
}
|
|
|
|
prepare() {
|
|
default_prepare
|
|
}
|
|
|
|
build() {
|
|
:
|
|
}
|
|
|
|
# generate a simple tar.gz package of pkgdir
|
|
targz() {
|
|
cd "$pkgdir" || return 1
|
|
mkdir -p "$REPODEST"/src
|
|
tar -czf "$REPODEST"/src/$pkgname-$pkgver-r$pkgrel.tar.gz *
|
|
}
|
|
|
|
postcheck() {
|
|
local dir="$1" name="$2" i=
|
|
msg "Running postcheck for $name"
|
|
# checking for FHS compat
|
|
if ! options_has "!fhs"; then
|
|
for i in "$dir"/srv/* "$dir"/usr/local/* "$dir"/opt/*; do
|
|
if [ -e "$i" ]; then
|
|
error "Packages must not put anything under /srv, /usr/local or /opt"
|
|
return 1
|
|
fi
|
|
done
|
|
if [ -d "$dir"/usr/var ]; then
|
|
error "Found /usr/var, localstatedir is most likely wrong"
|
|
return 1
|
|
fi
|
|
fi
|
|
|
|
# remove *.la files if libtool is not set
|
|
if ! options_has "libtool"; then
|
|
find "$dir" -name '*.la' -type f -delete
|
|
fi
|
|
|
|
# look for /usr/lib/charset.alias
|
|
if [ -e "$dir"/usr/lib/charset.alias ] \
|
|
&& ! options_has "charset.alias"; then
|
|
error "Found /usr/lib/charset.alias"
|
|
return 1
|
|
fi
|
|
# look for /etc/init.d and /etc/conf.d
|
|
if [ -e "$dir"/etc/init.d -o -e "$dir"/etc/conf.d ] \
|
|
&& ! is_openrc_pkg "$name"; then
|
|
warning "Found OpenRC directory (/etc/conf.d or /etc/init.d) but name doesn't end with -openrc"
|
|
fi
|
|
# look for static archives
|
|
if ! is_static_pkg "$name"; then
|
|
for i in $(find "$dir"/lib "$dir"/usr/lib -name '*.a' 2>/dev/null); do
|
|
warning "Found static archive on ${i##*$dir/} but name doesn't end with -static"
|
|
done
|
|
fi
|
|
# look for /usr/share/doc
|
|
if [ -e "$dir"/usr/share/doc ] \
|
|
&& ! is_doc_pkg "$name"; then
|
|
warning "Found /usr/share/doc but package name doesn't end with -doc"
|
|
fi
|
|
# look for /usr/share/man
|
|
if [ -e "$dir"/usr/share/man ]; then
|
|
if ! is_doc_pkg "$name"; then
|
|
warning "Found /usr/share/man but package name doesn't end with -doc"
|
|
fi
|
|
# check for uncompressed man pages
|
|
i=$(find "$dir"/usr/share/man -name '*.[0-8]' -type f | sed "s|^$dir|\t|")
|
|
if [ -n "$i" ]; then
|
|
error "Found uncompressed man pages:"
|
|
echo "$i"
|
|
return 1
|
|
fi
|
|
fi
|
|
# check directory permissions
|
|
i=$(find "$dir" -type d -perm -777 | sed "s|^$dir|\t|")
|
|
if [ -n "$i" ]; then
|
|
warning "World writeable directories found:"
|
|
echo "$i"
|
|
fi
|
|
# check so we dont have any suid root binaries that are not PIE
|
|
i=$(find "$dir" -type f -perm /6000 \
|
|
| xargs scanelf --nobanner --etype ET_EXEC \
|
|
| sed "s|ET_EXEC $dir|\t|")
|
|
if [ -n "$i" ]; then
|
|
error "Found non-PIE files that has SUID:"
|
|
echo "$i"
|
|
return 1
|
|
fi
|
|
# test suid bit on executable
|
|
if ! options_has "suid"; then
|
|
i=$(find "$dir" \( -perm -u+s -o -perm -g+s \) -a -type f \
|
|
-a -perm -o+x)
|
|
if [ -n "$i" ]; then
|
|
error "Found executable files with SUID bit set:"
|
|
echo "$i"
|
|
return 1
|
|
fi
|
|
fi
|
|
|
|
# test for textrels
|
|
if ! options_has "textrels"; then
|
|
local res="$(scanelf --recursive --textrel --quiet "$dir")"
|
|
if [ -n "$res" ]; then
|
|
error "Found textrels:"
|
|
echo "$res"
|
|
return 1
|
|
fi
|
|
fi
|
|
return 0
|
|
}
|
|
|
|
pre_split() {
|
|
if [ -z "$subpkgname" ]; then
|
|
return 0
|
|
fi
|
|
# the subpackages should not inherit these from main package
|
|
provides=""
|
|
install_if=""
|
|
}
|
|
|
|
prepare_subpackages() {
|
|
local i
|
|
cd "$startdir"
|
|
for i in $subpackages; do
|
|
# call abuild recursively, setting subpkg{dir,name}
|
|
( subpkg_set "$i"; msg "Running split function $subpkgsplit..."; \
|
|
subpkgdir="$pkgbasedir/$subpkgname" subpkgname="$subpkgname" subpkgarch="$subpkgarch" \
|
|
"$abuild_path" $forceroot pre_split $subpkgsplit prepare_package \
|
|
&& postcheck "$pkgbasedir/$subpkgname" "$subpkgname" ) || return 1
|
|
done
|
|
postcheck "$pkgdir" "$pkgname" || return 1
|
|
# post check for /usr/share/locale
|
|
if [ -d "$pkgdir"/usr/share/locale ]; then
|
|
warning "Found /usr/share/locale"
|
|
warning2 "Maybe add \$pkgname-lang to subpackages?"
|
|
fi
|
|
}
|
|
|
|
default_lang() {
|
|
pkgdesc="Languages for package $pkgname"
|
|
install_if="$pkgname=$pkgver-r$pkgrel lang"
|
|
|
|
local dir
|
|
for dir in ${langdir:-/usr/share/locale}; do
|
|
mkdir -p "$subpkgdir"/${dir%/*}
|
|
mv "$pkgdir"/"$dir" "$subpkgdir"/"$dir" || return 1
|
|
done
|
|
}
|
|
|
|
lang() {
|
|
default_lang
|
|
}
|
|
|
|
default_lang_subpkg() {
|
|
if [ -z "$lang" ]; then
|
|
error "lang is not set"
|
|
return 1
|
|
fi
|
|
pkgdesc="$pkgname language pack for $lang"
|
|
install_if="$pkgname=$pkgver-r$pkgrel lang-$lang"
|
|
|
|
local dir
|
|
for dir in ${langdir:-/usr/share/locale}; do
|
|
mkdir -p "$subpkgdir"/$dir
|
|
mv "$pkgdir"/$dir/$lang* \
|
|
"$subpkgdir"/$dir/ \
|
|
|| return 1
|
|
done
|
|
}
|
|
|
|
lang_subpkg() {
|
|
default_lang_subpkg
|
|
}
|
|
|
|
prepare_language_packs() {
|
|
local lang
|
|
for lang in $linguas; do
|
|
lang="$lang" \
|
|
subpkgname="$pkgname-lang-$lang" \
|
|
subpkgdir="$pkgbasedir"/$subpkgname \
|
|
"$abuild_path" $forceroot lang_subpkg prepare_package || return 1
|
|
done
|
|
}
|
|
|
|
# echo '-dirty' if git is not clean
|
|
git_dirty() {
|
|
[ $($git status -s "$startdir" | wc -l) -ne 0 ] && echo "-dirty"
|
|
}
|
|
|
|
# echo last commit hash id
|
|
git_last_commit() {
|
|
$git log --format=oneline -n 1 "$startdir" | awk '{print $1}'
|
|
}
|
|
|
|
get_maintainer() {
|
|
if [ -z "$maintainer" ]; then
|
|
maintainer=$(awk -F': ' '/# *Maintainer/ {print $2}' "$APKBUILD")
|
|
# remove surrounding whitespace
|
|
maintainer=$(echo "$maintainer" | xargs)
|
|
fi
|
|
}
|
|
|
|
check_maintainer() {
|
|
get_maintainer
|
|
if [ -z "$maintainer" ]; then
|
|
warning "No maintainer"
|
|
else
|
|
# try to check for a valid rfc822 address
|
|
case "$maintainer" in
|
|
*[A-Za-z0-9]*\ \<*@*.*\>) ;;
|
|
*) return 1 ;;
|
|
esac
|
|
fi
|
|
}
|
|
|
|
check_license() {
|
|
local ret=0
|
|
local license_list=/usr/share/spdx/license.lst
|
|
local exclude="AND OR WITH"
|
|
if options_has "!spdx" || ! [ -f "$license_list" ]; then
|
|
return 0
|
|
fi
|
|
local i; for i in $license; do
|
|
list_has "$i" $exclude && continue
|
|
if ! grep -q -x -F "$i" "$license_list"; then
|
|
ret=1
|
|
warning "\"$i\" is not a known license"
|
|
fi
|
|
done
|
|
return $ret
|
|
}
|
|
|
|
check_secfixes_comment() {
|
|
local c=$(sed -E -n -e '/^# secfixes:/,/(^[^#]|^$)/p' $APKBUILD | grep '^#')
|
|
local invalid=$(echo "$c" \
|
|
| grep -v -E '(^# secfixes:|^# +- [A-Z0-9-]+|^# [0-9]+.*:$|^#$)')
|
|
if [ -z "$invalid" ]; then
|
|
return 0
|
|
fi
|
|
|
|
# check if there are tabs
|
|
if echo "$invalid" | grep -q $'\t'; then
|
|
error "secfixes comment must not have tabs:"
|
|
echo "$c" | grep $'\t' >&2
|
|
return 1
|
|
fi
|
|
|
|
error "secfixes comment is not valid:"
|
|
echo "$invalid" >&2
|
|
return 1
|
|
}
|
|
|
|
check_depends_dev() {
|
|
if [ -z "$depends_dev" ]; then
|
|
return 0
|
|
fi
|
|
local i
|
|
for i in $pkgname $subpackages; do
|
|
case "${i%%:*}" in
|
|
*-dev) return 0 ;;
|
|
esac
|
|
done
|
|
return 1
|
|
}
|
|
|
|
check_provides() {
|
|
local i
|
|
for i in $provides; do
|
|
if [ "${i%%[<>=]*}" = "$pkgname" ]; then
|
|
return 1
|
|
fi
|
|
done
|
|
return 0
|
|
}
|
|
|
|
prepare_metafiles() {
|
|
getpkgver || return 1
|
|
local name=${subpkgname:-$pkgname}
|
|
[ -z "${name##* *}" ] && die "package name contains spaces"
|
|
local dir=${subpkgdir:-$pkgdir}
|
|
local pkg="$name-$pkgver-r$pkgrel.apk"
|
|
local pkginfo="$controldir"/.PKGINFO
|
|
local sub
|
|
|
|
[ ! -d "$dir" ] && die "Missing $dir"
|
|
cd "$dir"
|
|
mkdir -p "$controldir"
|
|
local builddate="$SOURCE_DATE_EPOCH"
|
|
|
|
# Fix package size on several filesystems
|
|
case "$(df -PT . | awk 'END {print $2}')" in
|
|
btrfs|ecryptfs|zfs)
|
|
sync;;
|
|
esac
|
|
|
|
local size=$(du -sk | awk '{print $1 * 1024}')
|
|
|
|
if [ "$arch" != "$apkbuild_arch" ]; then
|
|
local msg="Split function set arch=\"$arch\" for $name, use subpackages=pkg:split:arch format instead"
|
|
[ "$arch" != "noarch" ] && die "$msg"
|
|
warning "$msg"
|
|
subpkgarch="$arch"
|
|
fi
|
|
|
|
echo "# Generated by $(basename "$abuild_path") $program_version" >"$pkginfo"
|
|
if [ -n "$FAKEROOTKEY" ]; then
|
|
echo "# using $($FAKEROOT -v)" >> "$pkginfo"
|
|
fi
|
|
echo "# $(date -u -d @$SOURCE_DATE_EPOCH)" >> "$pkginfo"
|
|
cat >> "$pkginfo" <<-EOF
|
|
pkgname = $name
|
|
pkgver = $pkgver-r$pkgrel
|
|
pkgdesc = $pkgdesc
|
|
url = $url
|
|
builddate = $builddate
|
|
packager = ${PACKAGER:-"Unknown"}
|
|
size = $size
|
|
arch = ${subpkgarch:-$pkgarch}
|
|
origin = $pkgname
|
|
EOF
|
|
local i deps
|
|
deps="$depends"
|
|
if [ "$pkgname" != "busybox" ] && ! depends_has busybox && ! depends_has /bin/sh; then
|
|
for i in $install $triggers; do
|
|
local s=${i%=*}
|
|
[ "$name" != "${s%.*}" ] && continue
|
|
if head -n 1 "$startdir/$s" | grep '^#!/bin/sh' >/dev/null ; then
|
|
msg "Script found. /bin/sh added as a dependency for $pkg"
|
|
deps="$deps /bin/sh"
|
|
break
|
|
fi
|
|
done
|
|
fi
|
|
|
|
# store last_commit in global var so we only call git once
|
|
if [ -z "$last_commit" ]; then
|
|
last_commit="$(git_last_commit)$(git_dirty)"
|
|
fi
|
|
echo "commit = $last_commit" >> "$pkginfo"
|
|
|
|
get_maintainer
|
|
if [ -n "$maintainer" ]; then
|
|
echo "maintainer = $maintainer" >> "$pkginfo"
|
|
fi
|
|
|
|
if [ -n "$replaces_priority" ]; then
|
|
echo "replaces_priority = $replaces_priority" >> "$pkginfo"
|
|
fi
|
|
|
|
if [ -n "$provider_priority" ]; then
|
|
echo "provider_priority = $provider_priority" >> "$pkginfo"
|
|
fi
|
|
|
|
echo "license = $license" >> "$pkginfo"
|
|
for i in $replaces; do
|
|
echo "replaces = $i" >> "$pkginfo"
|
|
done
|
|
for i in $deps; do
|
|
if [ "$i" != "$name" ]; then
|
|
echo "depend = $i" >> "$pkginfo"
|
|
fi
|
|
done
|
|
for i in $provides; do
|
|
echo "provides = $i" >> "$pkginfo"
|
|
done
|
|
for i in $triggers; do
|
|
local f=${i%=*}
|
|
local dirs=${i#*=}
|
|
[ "${f%.trigger}" != "$name" ] && continue
|
|
echo "triggers = ${dirs//:/ }" >> "$pkginfo"
|
|
done
|
|
if [ -n "$install_if" ]; then
|
|
echo "install_if = $(echo $install_if)" >> "$pkginfo"
|
|
fi
|
|
|
|
local metafiles=".PKGINFO"
|
|
for i in $install $triggers; do
|
|
local f=${i%=*}
|
|
local n=${f%.*}
|
|
if [ "$n" != "$name" ]; then
|
|
continue
|
|
fi
|
|
script=${f#$name}
|
|
msg "Adding $script"
|
|
cp "$startdir/$f" "$controldir/$script" || return 1
|
|
chmod +x "$controldir/$script"
|
|
metafiles="$metafiles $script"
|
|
done
|
|
echo $metafiles | tr ' ' '\n' > "$controldir"/.metafiles
|
|
}
|
|
|
|
prepare_trace_rpaths() {
|
|
local dir=${subpkgdir:-$pkgdir}
|
|
local etype= soname= file= sover=
|
|
[ "${subpkgarch:-$pkgarch}" = "noarch" ] && return 0
|
|
options_has "!tracedeps" && return 0
|
|
# lets tell all the places we should look for .so files - all rpaths
|
|
scanelf --quiet --recursive --rpath "$dir" \
|
|
| sed -e 's/[[:space:]].*//' -e 's/:/\n/' | sort -u \
|
|
>"$controldir"/.rpaths
|
|
if grep -q -x '/usr/lib' "$controldir"/.rpaths; then
|
|
warning "Redundant /usr/lib in rpath found"
|
|
fi
|
|
if grep '^/home/' "$controldir"/.rpaths; then
|
|
error "Has /home/... in rpath"
|
|
return 1
|
|
fi
|
|
}
|
|
|
|
# search for broken symlinks so we later can pull in proper depends
|
|
prepare_symlinks() {
|
|
local target
|
|
local dir="${subpkgdir:-$pkgdir}"
|
|
options_has "!tracedeps" && return 0
|
|
cd "$dir" || return 1
|
|
find -type l | while read symlink; do
|
|
target=$(readlink "$symlink")
|
|
if ! [ -e "$dir$(normalize_target_path "$target" "$symlink")" ]; then
|
|
echo "$symlink $target" >> "$controldir"/.symlinks
|
|
fi
|
|
done
|
|
}
|
|
|
|
prepare_pkgconfig_provides() {
|
|
local dir="${subpkgdir:-$pkgdir}"
|
|
options_has "!tracedeps" && return 0
|
|
cd "$dir" || return 1
|
|
for i in usr/lib/pkgconfig/*.pc; do
|
|
if ! [ -e "$i" ]; then
|
|
continue
|
|
fi
|
|
local f=${i##*/}
|
|
local v=$(PKG_CONFIG_PATH="$dir"/usr/lib/pkgconfig PKG_CONFIG_MAXIMUM_TRAVERSE_DEPTH=1 pkg-config \
|
|
--modversion ${f%.pc})
|
|
echo "$pcprefix${f%.pc}=${v:-0}" >> "$controldir"/.provides-pc
|
|
done
|
|
}
|
|
|
|
prepare_command_provides() {
|
|
local dir="${subpkgdir:-$pkgdir}"
|
|
options_has "!tracedeps" && return 0
|
|
cd "$dir" || return 1
|
|
for i in bin/* sbin/* usr/bin/* usr/sbin/*; do
|
|
if ! [ -x "$i" ]; then
|
|
continue
|
|
fi
|
|
local f=${i##*/}
|
|
echo $f >> "$controldir"/.provides-command
|
|
done
|
|
}
|
|
|
|
# check if dir has arch specific binaries
|
|
dir_has_arch_binaries() {
|
|
local dir="$1"
|
|
# if scanelf returns something, then we have binaries
|
|
[ -n "$(scanelf -R "$dir" | head -n 1)" ] && return 0
|
|
|
|
# look for static *.a
|
|
[ -n "$(find "$dir" -type f -name '*.a' | head -n 1)" ] && return 0
|
|
|
|
return 1
|
|
}
|
|
|
|
# returns true if this is the -dev package
|
|
is_dev_pkg() {
|
|
test "${1%-dev}" != "$1"
|
|
}
|
|
|
|
# returns true if this is the -static package
|
|
is_static_pkg() {
|
|
test "${1%-static}" != "$1"
|
|
}
|
|
|
|
# returns true if this is the -doc package
|
|
is_doc_pkg() {
|
|
test "${1%-doc}" != "$1"
|
|
}
|
|
|
|
# returns true if this is the -openrc package
|
|
is_openrc_pkg() {
|
|
test "${1%-openrc}" != "$1"
|
|
}
|
|
|
|
# check that noarch is set if needed
|
|
archcheck() {
|
|
options_has "!archcheck" && return 0
|
|
if dir_has_arch_binaries "${subpkgdir:-$pkgdir}"; then
|
|
[ "${subpkgarch:-$pkgarch}" != "noarch" ] && return 0
|
|
error "Arch specific binaries found so arch must not be set to \"noarch\""
|
|
return 1
|
|
elif [ "${subpkgarch:-$pkgarch}" != "noarch" ] && ! is_dev_pkg "$subpkgname" && ! is_static_pkg "$subpkgname"; then
|
|
# we dont want -dev package go to noarch
|
|
warning "No arch specific binaries found so arch should probably be set to \"noarch\""
|
|
fi
|
|
return 0
|
|
}
|
|
|
|
prepare_package() {
|
|
msg "Preparing ${subpkgname:+sub}package ${subpkgname:-$pkgname}..."
|
|
stripbin
|
|
prepare_metafiles \
|
|
&& prepare_trace_rpaths \
|
|
&& prepare_symlinks \
|
|
&& prepare_pkgconfig_provides \
|
|
&& prepare_command_provides \
|
|
|| return 1
|
|
archcheck
|
|
}
|
|
|
|
pkginfo_val() {
|
|
local key="$1"
|
|
local file="$2"
|
|
awk -F ' = ' "\$1 == \"$key\" {print \$2}" "$file"
|
|
}
|
|
|
|
# find real path to so files
|
|
real_so_path() {
|
|
local so="$1"
|
|
shift
|
|
while [ $# -gt 0 ]; do
|
|
[ -e "$1"/$so ] && realpath "$1/$so" && return 0
|
|
shift
|
|
done
|
|
error "$so: path not found"
|
|
return 1
|
|
}
|
|
|
|
# search rpaths and /usr/lib /lib for given so files
|
|
find_so_files() {
|
|
local rpaths=$(cat "$1")
|
|
shift
|
|
while [ $# -gt 0 ]; do
|
|
real_so_path "$1" /usr/lib /lib $rpaths || return 1
|
|
shift
|
|
done
|
|
return 0
|
|
}
|
|
|
|
subpkg_provides_prefixed_so() {
|
|
[ -n "$sonameprefix" ] && grep -q -w "^$sonameprefix$1" \
|
|
"$pkgbasedir"/.control.*/.provides-so 2>/dev/null
|
|
}
|
|
|
|
subpkg_provides_so() {
|
|
grep -q -w "^$1" "$pkgbasedir"/.control.*/.provides-so 2>/dev/null
|
|
}
|
|
|
|
subpkg_provides_prefixed_pc() {
|
|
[ -n "$pcprefix" ] && grep -q -w "^$pcprefix$1" \
|
|
"$pkgbasedir"/.control.*/.provides-pc 2>/dev/null
|
|
}
|
|
|
|
subpkg_provides_pc() {
|
|
grep -q -w "^${1%%[<>=]*}" "$pkgbasedir"/.control.*/.provides-pc \
|
|
2>/dev/null
|
|
}
|
|
|
|
trace_apk_deps() {
|
|
local name="$1"
|
|
local dir="$2"
|
|
local parch="$3"
|
|
local i= found= autodeps= deppkgs= missing=
|
|
local apkroot=
|
|
|
|
case "$parch" in
|
|
$CBUILD_ARCH) ;;
|
|
$CARCH | $CTARGET_ARCH) apkroot="--root $CBUILDROOT --arch $CTARGET_ARCH" ;;
|
|
esac
|
|
|
|
msg "Tracing dependencies..."
|
|
# add pkgconfig if usr/lib/pkgconfig is found
|
|
if [ -d "$pkgbasedir"/$name/usr/lib/pkgconfig ] \
|
|
&& ! grep -q '^depend = pkgconfig' "$dir"/.PKGINFO; then
|
|
autodeps="$autodeps pkgconfig"
|
|
fi
|
|
|
|
# special case for libpthread: we need depend on libgcc
|
|
if [ "$CLIBC" = "uclibc" ] && [ -f "$dir"/.needs-so ] \
|
|
&& grep -q -w '^libpthread.so.*' "$dir"/.needs-so \
|
|
&& ! grep -q -w "^depend = libgcc" "$dir"/.PKGINFO; then
|
|
autodeps="$autodeps libgcc"
|
|
msg " added libgcc (due to libpthread)"
|
|
fi
|
|
|
|
[ -f "$dir"/.needs-so ] && for i in $(cat "$dir"/.needs-so); do
|
|
# first check if its provided by same apkbuild
|
|
grep -q -w "^$sonameprefix$i" "$dir"/.provides-so 2>/dev/null && continue
|
|
|
|
if subpkg_provides_prefixed_so "$i"; then
|
|
autodeps="$autodeps so:$sonameprefix$i"
|
|
elif subpkg_provides_so "$i" \
|
|
|| $APK $apkroot info --quiet --installed "so:$i"; then
|
|
autodeps="$autodeps so:$i"
|
|
else
|
|
missing="$missing $i"
|
|
fi
|
|
done
|
|
|
|
# find all packages that holds the so files
|
|
if [ -f "$dir"/.rpaths ]; then
|
|
local so_files=$(find_so_files "$dir"/.rpaths $missing) \
|
|
|| return 1
|
|
deppkgs=$($APK $apkroot info --quiet --who-owns $so_files) || return 1
|
|
fi
|
|
|
|
for found in $deppkgs; do
|
|
if grep -w "^depend = ${found}$" "$dir"/.PKGINFO >/dev/null ; then
|
|
warning "You can remove '$found' from depends"
|
|
continue
|
|
fi
|
|
autodeps="$autodeps $found"
|
|
done
|
|
|
|
# symlink targets
|
|
for i in $(sort -u "$dir"/.symlinks-needs 2>/dev/null); do
|
|
autodeps="$autodeps $i"
|
|
done
|
|
|
|
# pkg-config depends
|
|
for i in $(sort -u "$dir"/.needs-pc 2>/dev/null); do
|
|
# first check if its provided by same apkbuild
|
|
grep -q -w "^$pcprefix$i" "$dir"/.provides-pc 2>/dev/null && continue
|
|
|
|
if subpkg_provides_prefixed_pc "$i"; then
|
|
autodeps="$autodeps pc:$pcprefix$i"
|
|
elif subpkg_provides_pc "$i" \
|
|
|| $APK $apkroot info --quiet --installed "pc:$i"; then
|
|
local provider=$(apk $apkroot search --quiet "pc:$i")
|
|
if list_has "$provider" $depends_dev; then
|
|
warning "$provider should be removed from depends_dev"
|
|
fi
|
|
autodeps="$autodeps pc:$i"
|
|
else
|
|
warning "Could not find any provider for pc:$i"
|
|
local pcfile=/usr/lib/pkgconfig/"${i%%[<>=]*}".pc
|
|
if [ -e "$pcfile" ]; then
|
|
local owner=$($APK $apkroot info --quiet --who-owns $pcfile)
|
|
warning "${owner:-package providing $pcfile} needs to be rebuilt"
|
|
fi
|
|
fi
|
|
done
|
|
|
|
echo "# automatically detected:" >> "$dir"/.PKGINFO
|
|
if [ -f "$dir"/.provides-so ]; then
|
|
sed 's/^\(.*\) \([0-9].*\)/provides = so:\1=\2/' \
|
|
"$dir"/.provides-so | sort -u \
|
|
>> "$dir"/.PKGINFO
|
|
fi
|
|
if [ -f "$dir"/.provides-pc ]; then
|
|
sed 's/^/provides = pc:/' "$dir"/.provides-pc | sort -u \
|
|
>> "$dir"/.PKGINFO
|
|
fi
|
|
if [ -f "$dir"/.provides-command ]; then
|
|
sed 's/^/provides = cmd:/' "$dir"/.provides-command | sort -u \
|
|
>> "$dir"/.PKGINFO
|
|
fi
|
|
[ -z "$autodeps" ] && return 0
|
|
for i in $autodeps; do
|
|
echo "depend = $i"
|
|
done | sort -u >> "$dir"/.PKGINFO
|
|
# display all depends
|
|
sed -n '/^depend =/s/depend = /\t/p' "$dir"/.PKGINFO >&2
|
|
}
|
|
|
|
find_scanelf_paths() {
|
|
local datadir="$1"
|
|
local paths="$datadir/lib:$datadir/usr/lib" i= rpaths=
|
|
if [ -n "$ldpath" ]; then
|
|
paths="$paths:$(echo "${datadir}${ldpath}" | sed "s|:|:$datadir|g")"
|
|
fi
|
|
# search in all rpaths
|
|
for rpaths in "$pkgbasedir"/.control.*/.rpaths; do
|
|
[ -f "$rpaths" ] || continue
|
|
while read i; do
|
|
local dir="${datadir}${i}"
|
|
IFS=:
|
|
if [ -d "$dir" ] && ! list_has "$dir" $paths; then
|
|
paths="$paths:${dir}"
|
|
fi
|
|
unset IFS
|
|
done < "$rpaths"
|
|
done
|
|
echo "$paths"
|
|
}
|
|
|
|
scan_shared_objects() {
|
|
local name="$1" controldir="$2" datadir="$3"
|
|
local opt= i=
|
|
|
|
if [ "${subpkgarch:-$pkgarch}" = "noarch" ]; then
|
|
return 0
|
|
fi
|
|
|
|
# allow spaces in paths
|
|
IFS=:
|
|
set -- $(find_scanelf_paths "$datadir")
|
|
unset IFS
|
|
|
|
# sanity check, verify that each path is prefixed with datadir
|
|
for i; do
|
|
if [ "${i#$datadir}" = "$i" ]; then
|
|
error "Internal error in scanelf paths"
|
|
return 1
|
|
fi
|
|
done
|
|
|
|
if options_has "ldpath-recursive"; then
|
|
opt="--recursive"
|
|
fi
|
|
msg "Scanning shared objects"
|
|
# lets tell all the .so files this package provides in .provides-so
|
|
scanelf --nobanner --soname $opt "$@" | while read etype soname file; do
|
|
# if soname field is missing, soname will be the filepath
|
|
sover=0
|
|
if [ -z "$file" ]; then
|
|
file="$soname"
|
|
soname=${soname##*/}
|
|
fi
|
|
|
|
# we only want shared libs
|
|
case $soname in
|
|
*.so|*.so.[0-9]*|*.c32);;
|
|
*) continue;;
|
|
esac
|
|
|
|
case "$file" in
|
|
*.so.[0-9]*) sover=${file##*.so.};;
|
|
*.so)
|
|
# filter out sonames with version when file does not
|
|
# have version
|
|
case "$soname" in
|
|
*.so.[0-9]*)
|
|
if options_has "sover-namecheck"; then
|
|
continue
|
|
fi
|
|
esac
|
|
;;
|
|
esac
|
|
list_has "$soname" $somask && continue
|
|
echo "$sonameprefix$soname $sover"
|
|
# use awk to filter out dupes that has sover = 0
|
|
done | awk '{ if (so[$1] == 0) so[$1] = $2; }
|
|
END { for (i in so) print(i " " so[i]); }' \
|
|
| sort -u > "$controldir"/.provides-so
|
|
|
|
# verify that we dont have any duplicates
|
|
local dupes="$(cut -d' ' -f1 "$controldir"/.provides-so | uniq -d)"
|
|
if [ -n "$dupes" ]; then
|
|
die "provides multiple versions of same shared object: $dupes"
|
|
fi
|
|
|
|
# now find the so dependencies
|
|
scanelf --nobanner --recursive --needed "$datadir" | tr ' ' ':' \
|
|
| awk -F ":" '$2 != "" && ($1 == "ET_DYN" || $1 == "ET_EXEC") {print $2}' \
|
|
| sed 's:,:\n:g' | sort -u \
|
|
| while read soname; do
|
|
# only add files that are not self provided
|
|
grep -q -w "^$sonameprefix$soname" "$controldir"/.provides-so \
|
|
|| list_has "$soname" $somask \
|
|
|| echo $soname
|
|
done > "$controldir"/.needs-so
|
|
}
|
|
|
|
# normalize a symlink target path (1st arg)
|
|
# Converts a relative path to absolute with respect to the symlink
|
|
# path (2nd arg).
|
|
normalize_target_path() {
|
|
local path=$1
|
|
[ "${path:0:1}" = / ] || path=$(dirname "$2")/$path
|
|
|
|
local oifs="$IFS" pathstr= i=
|
|
IFS='/'
|
|
set -- $path
|
|
for i; do
|
|
case "$i" in
|
|
"."|"") continue;;
|
|
"..") pathstr="${pathstr%%/${pathstr##*/}}";;
|
|
*) pathstr="${pathstr}/$i";;
|
|
esac
|
|
done
|
|
echo "$pathstr"
|
|
}
|
|
|
|
# find which package provides file that symlink points to
|
|
scan_symlink_targets() {
|
|
local name="$1" dir="$2" datadir="$3"
|
|
local symfile= targetpath=
|
|
cd "$datadir"
|
|
for symfile in "$pkgbasedir"/.control.*/.symlinks; do
|
|
local d="${symfile%/.symlinks}"
|
|
if ! [ -e "$symfile" ] || [ "$d" = "$dir" ]; then
|
|
continue
|
|
fi
|
|
|
|
while read symlink target; do
|
|
targetpath=$datadir$(normalize_target_path "$target" "$symlink")
|
|
if [ -e "$targetpath" ] || [ -L "$targetpath" ]; then
|
|
echo "$name=$pkgver-r$pkgrel" \
|
|
>> "$d"/.symlinks-needs
|
|
fi
|
|
done < "$symfile"
|
|
done
|
|
}
|
|
|
|
#find pkg-config dependencies
|
|
scan_pkgconfig_depends() {
|
|
local provides_pc="$1" controldir= name= datadir=
|
|
[ -e "$provides_pc" ] || return 0
|
|
controldir="${provides_pc%/*}"
|
|
name="$(pkginfo_val pkgname "$controldir"/.PKGINFO)"
|
|
datadir="$pkgbasedir"/$name
|
|
for i in $(sort -u "$provides_pc"); do
|
|
PKG_CONFIG_PATH="$datadir"/usr/lib/pkgconfig pkg-config \
|
|
--print-requires \
|
|
--print-requires-private ${i%=*} \
|
|
| sed -E 's/\s*([<>=]+)\s*/\1/' \
|
|
| while read pc; do
|
|
# only add files that are not self provided
|
|
if ! grep -q -w "^${pc%%[<>=]*}" "$provides_pc"; then
|
|
echo "$pc" >> "$controldir"/.needs-pc
|
|
fi
|
|
done
|
|
done
|
|
}
|
|
|
|
# read size in bytes from stdin and show as human readable
|
|
human_size() {
|
|
awk '{ split("B KB MB GB TB PB", type)
|
|
for(i=5; y < 1 && $1 > 0; i--)
|
|
y = $1 / (2^(10*i))
|
|
printf("%.1f %s\n", y, type[i+2]) }'
|
|
}
|
|
|
|
create_apks() {
|
|
local file= dir= name= ver= apk= datadir= size=
|
|
local gzip=$(command -v pigz || echo gzip)
|
|
getpkgver || return 1
|
|
rmdir "$pkgdir"/usr/lib \
|
|
"$pkgdir"/usr/bin \
|
|
"$pkgdir"/usr/share \
|
|
"$pkgdir"/usr \
|
|
"$pkgdir"/etc/ \
|
|
2>/dev/null || :
|
|
if ! options_has "!tracedeps"; then
|
|
for file in "$pkgbasedir"/.control.*/.PKGINFO; do
|
|
dir="${file%/.PKGINFO}"
|
|
name="$(pkginfo_val pkgname $file)"
|
|
datadir="$pkgbasedir"/$name
|
|
subpkgname=$name
|
|
scan_shared_objects "$name" "$dir" "$datadir"
|
|
scan_symlink_targets "$name" "$dir" "$datadir"
|
|
done
|
|
for file in "$pkgbasedir"/.control.*/.provides-pc; do
|
|
scan_pkgconfig_depends "$file"
|
|
done
|
|
fi
|
|
|
|
for file in "$pkgbasedir"/.control.*/.PKGINFO; do
|
|
local dir="${file%/.PKGINFO}"
|
|
local name=$(pkginfo_val pkgname $file)
|
|
local ver=$(pkginfo_val pkgver $file)
|
|
local size=$(pkginfo_val size $file | human_size)
|
|
local apk=$name-$ver.apk
|
|
local datadir="$pkgbasedir"/$name
|
|
local subpkgname=$name
|
|
local subpkgarch=$(pkginfo_val arch $file)
|
|
|
|
trace_apk_deps "$name" "$dir" "$subpkgarch" || return 1
|
|
msg "Package size: ${size}"
|
|
msg "Compressing data..."
|
|
(
|
|
cd "$datadir"
|
|
# data.tar.gz
|
|
set -- *
|
|
if [ "$1" = '*' ]; then
|
|
touch .dummy
|
|
set -- .dummy
|
|
fi
|
|
|
|
# normalize timestamps
|
|
find . -exec touch -h -d "@$SOURCE_DATE_EPOCH" {} +
|
|
|
|
tar --xattrs -f - -c "$@" | abuild-tar --hash | $gzip -9 >"$dir"/data.tar.gz
|
|
|
|
msg "Create checksum..."
|
|
# append the hash for data.tar.gz
|
|
local sha256=$(sha256sum "$dir"/data.tar.gz | cut -f1 -d' ')
|
|
echo "datahash = $sha256" >> "$dir"/.PKGINFO
|
|
touch -h -d "@$SOURCE_DATE_EPOCH" "$dir"/.PKGINFO
|
|
|
|
# control.tar.gz
|
|
cd "$dir"
|
|
tar -f - -c $(cat "$dir"/.metafiles) | abuild-tar --cut \
|
|
| $gzip -9 > control.tar.gz
|
|
abuild-sign -q control.tar.gz || exit 1
|
|
|
|
msg "Create $apk"
|
|
mkdir -p "$REPODEST"/$repo/${subpkgarch/noarch/$CARCH}
|
|
cat control.tar.gz data.tar.gz > "$REPODEST"/$repo/${subpkgarch/noarch/$CARCH}/$apk
|
|
)
|
|
done
|
|
}
|
|
|
|
build_abuildrepo() {
|
|
local part _check=check
|
|
if options_has "checkroot"; then
|
|
_check=check_fakeroot
|
|
fi
|
|
if ! want_check; then
|
|
_check=true
|
|
fi
|
|
if ! apk_up2date || [ -n "$force" ]; then
|
|
# check early if we have abuild key
|
|
abuild-sign --installed
|
|
logcmd "building $repo/$pkgname-$pkgver-r$pkgrel"
|
|
local _starttime=$(date --utc +%s)
|
|
msg "Building $repo/$pkgname $pkgver-r$pkgrel (using $program $program_version) started $(date -R)"
|
|
for part in sanitycheck builddeps clean fetch unpack prepare mkusers build \
|
|
$_check rootpkg; do
|
|
runpart $part
|
|
done
|
|
local _endtime=$(date --utc +%s)
|
|
local _difftime=$((_endtime - _starttime))
|
|
msg "Build complete at $(date -R) elapsed time $((_difftime/3600))h $((_difftime/60%60))m $((_difftime%60))s"
|
|
cleanup $CLEANUP
|
|
fi
|
|
update_abuildrepo_index
|
|
}
|
|
|
|
update_abuildrepo_index() {
|
|
local i allarch=""
|
|
for i in $allpackages; do
|
|
subpkg_set "$i"
|
|
##NOARCH: These packages are really in $CARCH and do not need their
|
|
# own repository. --rewrite-arch is used below to make sure the index
|
|
# thinks they are for $CARCH and apk-tools will fetch them from
|
|
# correct URL path. Remainder of the script uses ${subpkgarch/noarch/$CARCH}
|
|
# when expanding to the target repository path.
|
|
[ "$subpkgarch" = "noarch" ] && subpkgarch="$CARCH"
|
|
list_has "$subpkgarch" "$allarch" || allarch="$allarch $subpkgarch"
|
|
done
|
|
subpkg_unset
|
|
|
|
if [ -z "$DESCRIPTION" ]; then
|
|
DESCRIPTION="$repo $(cd $startdir && $git describe 2>/dev/null || true)"
|
|
fi
|
|
|
|
for i in $allarch; do
|
|
cd "$REPODEST/$repo/$i"
|
|
local index=$i/APKINDEX.tar.gz
|
|
|
|
msg "Updating the $repo/$i repository index..."
|
|
local sign=".SIGN.RSA.${SIGN_PUBLIC_KEY##*/}"
|
|
local oldindex=
|
|
if [ -f APKINDEX.tar.gz ]; then
|
|
oldindex="--index APKINDEX.tar.gz"
|
|
fi
|
|
( $APK index --quiet $oldindex --output APKINDEX.tar.gz.$$ \
|
|
--description "$DESCRIPTION" --rewrite-arch $i *.apk && \
|
|
msg "Signing the index..." && \
|
|
abuild-sign -q APKINDEX.tar.gz.$$ && \
|
|
chmod 644 APKINDEX.tar.gz.$$ && \
|
|
mv APKINDEX.tar.gz.$$ APKINDEX.tar.gz \
|
|
) || (rm -f APKINDEX.tar.gz.$$ ; die "Failed to create index")
|
|
done
|
|
}
|
|
|
|
# predefined function check
|
|
default_check() {
|
|
warning "APKBUILD does not run any tests!"
|
|
msg2 "Alpine policy will soon require that packages have any relevant testsuites run during the build process."
|
|
msg2 "To fix, either define a check() function, or declare !check in \$options to indicate the package does not have a testsuite."
|
|
}
|
|
|
|
check() {
|
|
default_check
|
|
}
|
|
|
|
# predefined splitfunc doc
|
|
default_doc() {
|
|
local gzip=$(command -v pigz || echo gzip)
|
|
depends="$depends_doc"
|
|
pkgdesc="$pkgdesc (documentation)"
|
|
install_if="docs $pkgname=$pkgver-r$pkgrel"
|
|
|
|
local i
|
|
for i in doc man info html sgml licenses gtk-doc ri help; do
|
|
if [ -d "$pkgdir/usr/share/$i" ]; then
|
|
mkdir -p "$subpkgdir/usr/share"
|
|
mv "$pkgdir/usr/share/$i" "$subpkgdir/usr/share/"
|
|
fi
|
|
done
|
|
|
|
# compress man pages
|
|
local mandir="$subpkgdir"/usr/share/man
|
|
[ -d "$mandir" ] && find "$mandir" -type l \
|
|
-a \( -name \*.[0-8n] -o -name \*.[0-8][a-z]* \) \
|
|
-a \! \( -name '*.gz' -o -name '*.bz2' -o -name '*.xz' \) \
|
|
| while read symlink; do
|
|
|
|
ln -s $(readlink $symlink).gz "$symlink".gz
|
|
rm -f "$symlink"
|
|
done
|
|
[ -d "$mandir" ] && find "$mandir" -type f \
|
|
-a \( -name \*.[0-8n] -o -name \*.[0-8][a-z]* \) \
|
|
-a \! \( -name '*.gz' -o -name '*.bz2' -o -name '*.xz' \) \
|
|
-exec stat -c "%i %n" \{\} \+ | while read inode name; do
|
|
|
|
# Skip hardlinks removed in last iteration.
|
|
[ -f "$name" ] || continue
|
|
|
|
local islink=0
|
|
find "$mandir" -type f -links +1 \
|
|
-a \( -name \*.[0-8n] -o -name \*.[0-8][a-z]* \) \
|
|
-a \! \( -name '*.gz' -o -name '*.bz2' -o -name '*.xz' \) \
|
|
-exec stat -c "%i %n" \{\} \+ | while read linode lname; do
|
|
if [ "$linode" = "$inode" -a "$lname" != "$name" ]; then
|
|
islink=1
|
|
rm -f "$lname"
|
|
ln -s "${name##*/}".gz "$lname".gz
|
|
fi
|
|
done
|
|
|
|
[ $islink -eq 0 ] && $gzip -9 "$name"
|
|
done
|
|
|
|
rm -f "$subpkgdir/usr/share/info/dir"
|
|
|
|
# remove if empty, ignore error (not empty)
|
|
rmdir "$pkgdir/usr/share" "$pkgdir/usr" 2>/dev/null || :
|
|
}
|
|
|
|
doc() {
|
|
default_doc
|
|
}
|
|
|
|
# predefined splitfunc dbg
|
|
default_dbg() {
|
|
local f
|
|
pkgdesc="$pkgdesc (debug symbols)"
|
|
|
|
scanelf -R "$pkgdir" | grep ET_DYN | sed "s:$pkgdir\/::g" | sed "s:ET_DYN ::g" | while read f; do
|
|
local ddbg_srcdir=$(dirname "$pkgdir/$f")
|
|
local ddbg_srcfile=$(basename "$pkgdir/$f")
|
|
local ddbg_dstdir=$(dirname "$subpkgdir/usr/lib/debug/$f.debug")
|
|
local ddbg_dstfile=$(basename "$subpkgdir/usr/lib/debug/$f.debug")
|
|
mkdir -p "$ddbg_dstdir"
|
|
|
|
cd "$ddbg_srcdir"
|
|
local xattr=$(getfattr --match="" --dump "${ddbg_srcfile}")
|
|
${CROSS_COMPILE}objcopy --only-keep-debug "$ddbg_srcfile" "$ddbg_dstfile"
|
|
${CROSS_COMPILE}objcopy --add-gnu-debuglink="$ddbg_dstfile" "$ddbg_srcdir/$ddbg_srcfile"
|
|
mv "$ddbg_dstfile" "$ddbg_dstdir"
|
|
${CROSS_COMPILE}strip "$ddbg_srcfile"
|
|
if [ -n "$xattr" ]; then
|
|
echo "$xattr" | setfattr --restore=-
|
|
fi
|
|
done
|
|
return 0
|
|
}
|
|
|
|
dbg() {
|
|
default_dbg
|
|
}
|
|
|
|
# predefined splitfunc dev
|
|
default_dev() {
|
|
local i= j=
|
|
depends="$depends_dev"
|
|
pkgdesc="$pkgdesc (development files)"
|
|
|
|
cd "$pkgdir" || return 0
|
|
local libdirs=usr/
|
|
[ -d lib/ ] && libdirs="lib/ $libdirs"
|
|
for i in usr/include usr/lib/pkgconfig usr/share/aclocal\
|
|
usr/share/gettext usr/bin/*-config \
|
|
usr/share/vala/vapi usr/share/gir-[0-9]*\
|
|
usr/share/qt*/mkspecs \
|
|
usr/lib/qt*/mkspecs \
|
|
usr/lib/cmake \
|
|
$(find . -name include -type d) \
|
|
$(subpackage_types_has static || find $libdirs \
|
|
-name '*.a' 2>/dev/null) \
|
|
$(find $libdirs -name '*.[cho]' \
|
|
-o -name '*.prl' 2>/dev/null); do
|
|
if [ -e "$pkgdir/$i" ] || [ -L "$pkgdir/$i" ]; then
|
|
d="$subpkgdir/${i%/*}" # dirname $i
|
|
mkdir -p "$d"
|
|
mv "$pkgdir/$i" "$d"
|
|
rmdir "$pkgdir/${i%/*}" 2>/dev/null || :
|
|
fi
|
|
done
|
|
# move *.so links needed when linking the apps to -dev packages
|
|
for i in lib/*.so usr/lib/*.so; do
|
|
if [ -L "$i" ]; then
|
|
mkdir -p "$subpkgdir"/"${i%/*}"
|
|
mv "$i" "$subpkgdir/$i" || return 1
|
|
fi
|
|
done
|
|
return 0
|
|
}
|
|
|
|
dev() {
|
|
default_dev
|
|
}
|
|
|
|
# predefined splitfunc static
|
|
default_static() {
|
|
local i=
|
|
depends="$depends_static"
|
|
pkgdesc="$pkgdesc (static library)"
|
|
|
|
cd "$pkgdir" || return 0
|
|
local libdirs=usr/lib
|
|
[ -d lib/ ] && libdirs="lib/ $libdirs"
|
|
|
|
# move *.a static library
|
|
for i in $(find $libdir -name '*.a'); do
|
|
mkdir -p "$subpkgdir"/"${i%/*}"
|
|
mv "$i" "$subpkgdir/$i" || return 1
|
|
done
|
|
return 0
|
|
}
|
|
|
|
static() {
|
|
default_static
|
|
}
|
|
|
|
# predefined splitfunc libs
|
|
default_libs() {
|
|
depends="$depends_libs"
|
|
pkgdesc="$pkgdesc (libraries)"
|
|
local dir= file=
|
|
for dir in lib usr/lib; do
|
|
for file in "$pkgdir"/$dir/lib*.so.[0-9]*; do
|
|
[ -f "$file" ] || continue
|
|
mkdir -p "$subpkgdir"/$dir
|
|
mv "$file" "$subpkgdir"/$dir/
|
|
done
|
|
done
|
|
}
|
|
|
|
libs() {
|
|
default_libs
|
|
}
|
|
|
|
# predefined splitfunc openrc
|
|
default_openrc() {
|
|
depends="$depends_openrc"
|
|
pkgdesc="$pkgdesc (OpenRC init scripts)"
|
|
install_if="openrc ${subpkgname%-openrc}=$pkgver-r$pkgrel"
|
|
local dir file
|
|
for dir in conf.d init.d; do
|
|
if [ -d "$pkgdir/etc/$dir" ]; then
|
|
mkdir -p "$subpkgdir"/etc
|
|
mv "$pkgdir/etc/$dir" "$subpkgdir"/etc/
|
|
fi
|
|
done
|
|
return 0
|
|
}
|
|
|
|
openrc() {
|
|
default_openrc
|
|
}
|
|
|
|
|
|
is_function() {
|
|
type "$1" 2>&1 | head -n 1 | egrep -q "is a (shell )?function"
|
|
}
|
|
|
|
do_fakeroot() {
|
|
if [ -n "$FAKEROOT" ]; then
|
|
$FAKEROOT -- "$@"
|
|
else
|
|
"$@"
|
|
fi
|
|
}
|
|
|
|
# wrap check() with fakeroot
|
|
check_fakeroot() {
|
|
cd "$startdir"
|
|
[ -n "$FAKEROOT" ] && msg "Entering fakeroot..."
|
|
do_fakeroot "$abuild_path" $forceroot $color_opt $keep_build check
|
|
}
|
|
|
|
# build and package in fakeroot
|
|
rootpkg() {
|
|
cd "$startdir"
|
|
rm -rf "$pkgdir"
|
|
[ -n "$FAKEROOT" ] && msg "Entering fakeroot..."
|
|
do_fakeroot "$abuild_path" $forceroot $color_opt $keep_build \
|
|
package \
|
|
prepare_subpackages \
|
|
prepare_language_packs \
|
|
prepare_package \
|
|
create_apks
|
|
}
|
|
|
|
srcpkg() {
|
|
echo "Ensuring source is fetched"
|
|
fetch
|
|
getpkgver || return 1
|
|
local p="$pkgname-$pkgver-$pkgrel"
|
|
local prefix="${startdir##*/}"
|
|
local i files="$prefix/APKBUILD"
|
|
for i in $source; do
|
|
echo "Packaging source file: $i"
|
|
if [ ! -e $(filename_from_uri $i) ]; then
|
|
cp $srcdir/$(filename_from_uri $i) $(filename_from_uri $i)
|
|
fi
|
|
files="$files $prefix/$(filename_from_uri $i)"
|
|
done
|
|
|
|
for i in $install; do
|
|
echo "Packaging install file: $i"
|
|
files="$files $prefix/$i"
|
|
done
|
|
|
|
for i in $triggers; do
|
|
local f=${i%=*}
|
|
echo "Packaging trigger file: $f"
|
|
files="$files $prefix/$f"
|
|
done
|
|
|
|
mkdir -p "$REPODEST/src"
|
|
msg "Creating source package $p.src.tar.gz..."
|
|
(cd .. && tar -zcf "$REPODEST/src/$p.src.tar.gz" $files)
|
|
}
|
|
|
|
# return true if arch is supported or noarch
|
|
check_arch() {
|
|
local ret=1
|
|
local i
|
|
for i in $arch; do
|
|
case $i in
|
|
all | noarch) ret=0 ;;
|
|
"$CARCH") ret=0 ;;
|
|
"!$CARCH") return 1 ;;
|
|
esac
|
|
done
|
|
return $ret
|
|
}
|
|
|
|
# return true if libc is not masked in options
|
|
check_libc() {
|
|
! list_has "!libc_$CLIBC" $options
|
|
}
|
|
|
|
# check if package is up to date
|
|
apk_up2date() {
|
|
getpkgver || return 1
|
|
|
|
local i s
|
|
for i in $allpackages; do
|
|
subpkg_set "$i"
|
|
if [ ! -f "$REPODEST/$repo/${subpkgarch/noarch/$CARCH}/$subpkgname-$pkgver-r$pkgrel.apk" ]; then
|
|
subpkg_unset
|
|
return 1
|
|
fi
|
|
done
|
|
subpkg_unset
|
|
[ -n "$keep" ] && return 0
|
|
|
|
cd "$startdir"
|
|
for i in $source APKBUILD; do
|
|
if is_remote "$i"; then
|
|
s="$SRCDEST/$(filename_from_uri $i)"
|
|
else
|
|
s="$startdir/${i##*/}"
|
|
fi
|
|
if [ "$s" -nt "$REPODEST/$repo/${pkgarch/noarch/$CARCH}/$pkgname-$pkgver-r$pkgrel.apk" ]; then
|
|
return 1
|
|
fi
|
|
done
|
|
return 0
|
|
}
|
|
|
|
abuildindex_up2date() {
|
|
local i
|
|
getpkgver || return 1
|
|
|
|
for i in $allpackages; do
|
|
subpkg_set "$i"
|
|
local dir="$REPODEST"/$repo/${subpkgarch/noarch/$CARCH}
|
|
local idx="$dir"/APKINDEX.tar.gz
|
|
local file="$dir"/$subpkgname-$pkgver-r$pkgrel.apk
|
|
|
|
# if any file is missing or .apk is newer then index
|
|
# the index needs to be updated
|
|
if [ ! -f "$idx" -o ! -f "$file" -o "$file" -nt "$idx" ]; then
|
|
subpkg_unset
|
|
return 1
|
|
fi
|
|
done
|
|
subpkg_unset
|
|
|
|
return 0
|
|
}
|
|
|
|
up2date() {
|
|
check_arch || return 0
|
|
check_libc || return 0
|
|
apk_up2date && abuildindex_up2date
|
|
}
|
|
|
|
# rebuild package and abuildrepo index if needed
|
|
abuildindex() {
|
|
up2date && return 0
|
|
build_abuildrepo
|
|
}
|
|
|
|
# source all APKBUILDs and output:
|
|
# 1) origin of package
|
|
# 2) all dependencies
|
|
# the output is i in a format easy parseable for awk
|
|
parse_aports_makedepends() {
|
|
# lets run this in a subshell since we source all APKBUILD here
|
|
(
|
|
aportsdir=$(realpath ${APKBUILD%/APKBUILD}/..)
|
|
for i in $aportsdir/*/APKBUILD; do
|
|
# no forks in this loop or it will be painfully slow!
|
|
pkgname=
|
|
subpackages=
|
|
depends=
|
|
makedepends=
|
|
checkdepends=
|
|
. $i
|
|
dir=${i%/APKBUILD}
|
|
deps=
|
|
# filter out conflicts from deps and version info
|
|
wantdepends="$depends $makedepends"
|
|
want_check && wantdepends="$wantdepends $checkdepends"
|
|
for j in $wantdepends; do
|
|
case "$j" in
|
|
!*) continue;;
|
|
esac
|
|
deps="$deps ${j%%[<>=]*}"
|
|
done
|
|
for j in $pkgname $subpackages; do
|
|
echo "o ${j%%:*} $dir"
|
|
set -- $deps
|
|
if [ $# -eq 0 ]; then
|
|
echo "d ${j%%:*}"
|
|
continue
|
|
fi
|
|
echo -n "d ${j%%:*} $1"
|
|
shift
|
|
while [ $# -gt 0 ]; do
|
|
echo -n ",$1"
|
|
shift
|
|
done
|
|
echo
|
|
done
|
|
done
|
|
)
|
|
}
|
|
|
|
trace_makedepends() {
|
|
local deps= i=
|
|
# strip versions from deps
|
|
for i in "$@"; do
|
|
deps="$deps ${i%%[<>=]*}"
|
|
done
|
|
[ -z "$deps" ] && return 0
|
|
( parse_aports_makedepends
|
|
if [ -z "$upgrade" ]; then
|
|
# list installed pkgs and prefix with 'i '
|
|
$APK info --quiet | sort | sed 's/^/i /'
|
|
fi
|
|
) | awk -v pkgs="$deps" '
|
|
|
|
function depgraph(pkg, a, i) {
|
|
if (visited[pkg])
|
|
return 0;
|
|
visited[pkg] = 1;
|
|
split(deps[pkg], a, ",");
|
|
for (i in a)
|
|
depgraph(a[i]);
|
|
print pkg ":" origin[pkg];
|
|
|
|
}
|
|
|
|
$1 == "i" { visited[$2] = 1 }
|
|
$1 == "o" { origin[$2] = $3 }
|
|
$1 == "d" { deps[$2] = $3 }
|
|
END {
|
|
split(pkgs, pkgarray);
|
|
for (i in pkgarray)
|
|
depgraph(pkgarray[i]);
|
|
}
|
|
'
|
|
}
|
|
|
|
calcdeps() {
|
|
builddeps=
|
|
hostdeps=
|
|
|
|
if cross_compiling && [ -n "$makedepends_build" -o -n "$makedepends_host" ]; then
|
|
for i in $EXTRADEPENDS_BUILD $1 $makedepends_build; do
|
|
list_has $i $hostdeps && continue
|
|
builddeps="$builddeps $i"
|
|
done
|
|
for i in $EXTRADEPENDS_HOST $EXTRADEPENDS_TARGET $makedepends_host; do
|
|
[ "$pkgname" = "${i%%[<>=]*}" ] && continue
|
|
list_has $i $hostdeps && continue
|
|
subpackages_has ${i%%[<>=]*} || hostdeps="$hostdeps $i"
|
|
done
|
|
else
|
|
[ -z "$makedepends" ] && makedepends="$makedepends_build $makedepends_host"
|
|
want_check && makedepends="$makedepends $checkdepends"
|
|
for i in $EXTRADEPENDS_BUILD $EXTRADEPENDS_HOST $1 $depends $makedepends; do
|
|
[ "$pkgname" = "${i%%[<>=]*}" ] && continue
|
|
list_has $i $builddeps && continue
|
|
subpackages_has ${i%%[<>=]*} || builddeps="$builddeps $i"
|
|
done
|
|
hostdeps="$EXTRADEPENDS_TARGET"
|
|
fi
|
|
}
|
|
|
|
get_missing_deps() {
|
|
local cmd="$APK info --quiet --installed $1"
|
|
shift
|
|
|
|
while [ "$1" ]; do
|
|
local cp=${1#\!}
|
|
if [ $cp != $1 ]; then
|
|
if $cmd $cp; then
|
|
error "Conflicting package installed: $cp"
|
|
return 1
|
|
fi
|
|
elif [ "$upgrade" ] || ! $cmd $1; then
|
|
echo $1
|
|
fi
|
|
shift
|
|
done
|
|
}
|
|
|
|
# build and install dependencies
|
|
builddeps() {
|
|
local pkg= i= BUILD_BASE=
|
|
[ -n "$nodeps" ] && return 0
|
|
|
|
msg "Analyzing dependencies..."
|
|
case "$BOOTSTRAP" in
|
|
no*) BUILD_BASE="";;
|
|
*) if cross_creating || cross_compiling; then
|
|
BUILD_BASE="build-base-$CTARGET_ARCH"
|
|
else
|
|
BUILD_BASE="build-base"
|
|
fi
|
|
esac
|
|
calcdeps "$BUILD_BASE"
|
|
|
|
# find which deps are missing
|
|
local mbd mhd missing
|
|
mbd=$(get_missing_deps "" $builddeps) || return 1
|
|
mhd=$(get_missing_deps "--root $CBUILDROOT --arch $CTARGET_ARCH" $hostdeps) || return 1
|
|
missing=$(echo $mbd $mhd)
|
|
|
|
if [ -z "$install_deps" ] && [ -z "$recursive" ]; then
|
|
# if we dont have any missing deps we are done now
|
|
[ -z "$missing" ] && return 0
|
|
error "Missing dependencies (use -r to autoinstall or -R to build them): $missing"
|
|
return 1
|
|
fi
|
|
|
|
uninstall_after=".makedepends-$pkgname $uninstall_after"
|
|
if [ -n "$install_deps" ] && [ -z "$recursive" ]; then
|
|
# make a --simulate run first to detect missing deps
|
|
# apk-tools --virtual is no goot at reporting those.
|
|
deps "--quiet --simulate" || return 1
|
|
deps || return 1
|
|
return 0
|
|
fi
|
|
|
|
[ -z "$recursive" ] && return 1
|
|
|
|
if [ -n "$CBUILDROOT" ]; then
|
|
error "Recursive rebuilding (-R) is not supported when cross compiling."
|
|
return 1
|
|
fi
|
|
|
|
# find dependencies that are installed but missing in repo.
|
|
for i in $builddeps; do
|
|
local m=$($APK search --repository "$REPODEST/$repo" ${i%%[<>=]*})
|
|
if [ -z "$m" ]; then
|
|
missing="$missing $i"
|
|
fi
|
|
done
|
|
|
|
for i in $(trace_makedepends $missing); do
|
|
# i = pkg:dir
|
|
local dir=${i#*:}
|
|
local pkg=${i%:*}
|
|
|
|
# ignore if dependency is in other repo
|
|
[ -d "$dir" ] || continue
|
|
|
|
# check if dep is blacklisted
|
|
if list_has $pkg $ABUILD_BLACKLIST; then
|
|
error "$pkg is blacklisted"
|
|
return 1
|
|
fi
|
|
|
|
# break circular deps
|
|
list_has $pkg $ABUILD_VISITED && continue
|
|
export ABUILD_VISITED="$ABUILD_VISITED $pkg"
|
|
|
|
msg "Entering $dir"
|
|
cd "$dir" && "$abuild_path" $forceroot $keep $keep_build $quiet \
|
|
$install_deps $recursive $upgrade $color_opt \
|
|
abuildindex || return 1
|
|
done
|
|
$SUDO_APK add --upgrade --repository "$REPODEST/$repo" \
|
|
$apk_opt_wait \
|
|
--virtual .makedepends-$pkgname $builddeps \
|
|
|| return 1
|
|
}
|
|
|
|
# replace the md5sums in the APKBUILD
|
|
checksum() {
|
|
local s files
|
|
[ -z "$source" ] && [ -n "${md5sums}${sha256sums}${sha512sums}" ] \
|
|
&& msg "Removing checksums from APKBUILD"
|
|
sed -E -i -e '/^(md5|sha[0-9]+)sums=".*[^"]$/,/"$/d' \
|
|
-e '/^(md5|sha[0-9]+)sums=".*"$/d' \
|
|
\
|
|
-e "/^(md5|sha[0-9]+)sums='.*[^']\$/,/'\$/d" \
|
|
-e "/^(md5|sha[0-9]+)sums='.*'\$/d" \
|
|
APKBUILD
|
|
|
|
[ -z "$source" ] && return 0
|
|
fetch
|
|
for s in $source; do
|
|
files="$files $(filename_from_uri $s)"
|
|
done
|
|
|
|
msg "Updating the sha512sums in APKBUILD..."
|
|
md5sums=
|
|
sha256sums=
|
|
sha512sums="$(cd "$srcdir" && sha512sum $files)" \
|
|
|| die "sha512sum failed"
|
|
echo "sha512sums=\"$sha512sums\"" >>"$APKBUILD"
|
|
}
|
|
|
|
rootbld_actions() {
|
|
local part _check=check
|
|
if options_has "checkroot"; then
|
|
_check=check_fakeroot
|
|
fi
|
|
if ! want_check; then
|
|
_check=true
|
|
fi
|
|
for part in symlinksrc unpack prepare build $_check rootpkg; do
|
|
runpart $part
|
|
done
|
|
}
|
|
|
|
rootbld() {
|
|
if apk_up2date && [ -z "$force" ]; then
|
|
msg "Package is up to date"
|
|
return
|
|
fi
|
|
|
|
[ "$CBUILD" = "$CHOST" ] || die "rootbld: cross-building not supported currently"
|
|
apk info -eq abuild-rootbld || die "rootbld: abuild-rootbld package not installed"
|
|
|
|
logcmd "chroot building building $repo/$pkgname-$pkgver-r$pkgrel"
|
|
|
|
# check early if we have abuild key
|
|
abuild-sign --installed
|
|
|
|
# networking business
|
|
sanitycheck
|
|
clean
|
|
fetch
|
|
verify
|
|
|
|
msg "Preparing build chroot..."
|
|
|
|
mkusers
|
|
|
|
BUILD_ROOT=$(mktemp -d /var/tmp/abuild.XXXXXXXXXX)
|
|
local aportsgit=${APORTSDIR:-${startdir}}
|
|
|
|
mkdir -p "$BUILD_ROOT/proc" "$BUILD_ROOT/etc/apk/keys" \
|
|
"$BUILD_ROOT/$HOME/.abuild" "$BUILD_ROOT/$aportsgit" \
|
|
"$BUILD_ROOT/$SRCDEST" "$BUILD_ROOT/$REPODEST" \
|
|
"$BUILD_ROOT/tmp/pkg" "$BUILD_ROOT/tmp/src" \
|
|
"$BUILD_ROOT/usr/bin" "$pkgbasedir" "$REPODEST" \
|
|
"$srcdir"
|
|
|
|
cp /etc/abuild.conf /etc/group /etc/passwd "$BUILD_ROOT/etc"
|
|
cp /etc/apk/keys/* "$BUILD_ROOT/etc/apk/keys"
|
|
|
|
if options_has "net"; then
|
|
cp /etc/resolv.conf "$BUILD_ROOT/etc"
|
|
fi
|
|
|
|
local version="edge" buildhost="edge" gitref
|
|
if gitref="$(expr "$(git symbolic-ref --short HEAD)" : '\([0-9]\+\(\.[0-9]\+\)*\)-')"; then
|
|
version=v${gitref}
|
|
buildhost=${gitref/./-}
|
|
fi
|
|
|
|
local repo_template=$aportsgit/$repo/.rootbld-repositories
|
|
[ -s "$repo_template" ] || die "rootbld: $repo_template does not exist"
|
|
(
|
|
for key in $(git config --list --name-only); do
|
|
k=${key#abuild.}
|
|
[ $k != $key ] && \
|
|
eval "export $k=\"$(git config --get $key)\""
|
|
done
|
|
|
|
export mirror version
|
|
[ "$mirror" ] || mirror=http://dl-cdn.alpinelinux.org/alpine
|
|
|
|
envsubst
|
|
echo "$REPODEST/$repo"
|
|
) < "$repo_template" > "$BUILD_ROOT/etc/apk/repositories"
|
|
|
|
calcdeps
|
|
$SUDO_APK add --initdb --root "$BUILD_ROOT" --update \
|
|
abuild alpine-base build-base git $hostdeps $builddeps
|
|
|
|
local bwrap_opts=""
|
|
options_has "net" || bwrap_opts="$bwrap_opts --unshare-net"
|
|
bwrap --unshare-ipc --unshare-uts $bwrap_opts \
|
|
--ro-bind "$BUILD_ROOT" / \
|
|
--proc /proc \
|
|
--dev-bind /dev /dev \
|
|
--bind "$BUILD_ROOT/$HOME" "$HOME" \
|
|
--ro-bind "$HOME/.abuild" "$HOME/.abuild" \
|
|
--ro-bind "$aportsgit" "$aportsgit" \
|
|
--bind "$SRCDEST" "$SRCDEST" \
|
|
--bind "$BUILD_ROOT/tmp" /tmp \
|
|
--bind "$BUILD_ROOT/tmp/src" "$srcdir" \
|
|
--bind "$BUILD_ROOT/tmp/pkg" "$pkgbasedir" \
|
|
--bind "$REPODEST" "$REPODEST" \
|
|
--hostname "build-$buildhost-$CARCH" \
|
|
--chdir "$startdir" \
|
|
--setenv PATH /bin:/usr/bin:/sbin:/usr/sbin \
|
|
/usr/bin/abuild $force rootbld_actions
|
|
update_abuildrepo_index
|
|
cleanup $CLEANUP
|
|
}
|
|
|
|
stripbin() {
|
|
local bin
|
|
if options_has "!strip" || [ "${subpkgarch:-$pkgarch}" = "noarch" ]; then
|
|
return 0
|
|
fi
|
|
cd "${subpkgdir:-$pkgdir}" || return 1
|
|
|
|
local stripcmd=strip
|
|
case "${subpkgarch:-$pkgarch}" in
|
|
$CBUILD_ARCH) stripcmd="strip" ;;
|
|
$CARCH) stripcmd="${CHOST}-strip" ;;
|
|
$CTARGET_ARCH) stripcmd="${CTARGET}-strip" ;;
|
|
esac
|
|
|
|
msg "Stripping binaries"
|
|
scanelf --recursive --nobanner --osabi --etype "ET_DYN,ET_EXEC" . \
|
|
| while read type osabi filename; do
|
|
|
|
# scanelf may have picked up a temp file so verify that file still exists
|
|
[ -e "$filename" ] || continue
|
|
|
|
[ "$osabi" != "STANDALONE" ] || continue
|
|
local XATTR=$(getfattr --match="" --dump "${filename}")
|
|
"${stripcmd}" "${filename}"
|
|
if [ -n "$XATTR" ]; then
|
|
echo "$XATTR" | setfattr --restore=-
|
|
fi
|
|
done
|
|
}
|
|
|
|
# simply list target apks
|
|
listpkg() {
|
|
local name
|
|
getpkgver || return 1
|
|
for name in $allpackages ; do
|
|
subpkg_set $name
|
|
echo "$subpkgname-$pkgver-r$pkgrel.apk"
|
|
done
|
|
subpkg_unset
|
|
}
|
|
|
|
source_has() {
|
|
local i
|
|
for i in $source; do
|
|
[ "$1" = "${i##*/}" ] && return 0
|
|
[ "$1" = "${i%%::*}" ] && return 0
|
|
done
|
|
return 1
|
|
}
|
|
|
|
subpackages_has() {
|
|
local i
|
|
for i in $subpackages; do
|
|
[ "$1" = "${i%%:*}" ] && return 0
|
|
done
|
|
return 1
|
|
}
|
|
|
|
subpackage_types_has() {
|
|
local i
|
|
for i in $subpackages; do
|
|
local _name="${i%%:*}"
|
|
[ "$1" = "${_name##*-}" ] && return 0
|
|
done
|
|
return 1
|
|
}
|
|
|
|
list_has() {
|
|
local needle="$1"
|
|
local i
|
|
shift
|
|
for i in $@; do
|
|
[ "$needle" = "$i" ] && return 0
|
|
[ "$needle" = "!$i" ] && return 1
|
|
done
|
|
return 1
|
|
}
|
|
|
|
# same as list_has but we filter version info
|
|
deplist_has() {
|
|
local needle="$1"
|
|
local i
|
|
shift
|
|
for i in $@; do
|
|
i=${i%%[<>=]*}
|
|
[ "$needle" = "$i" ] && return 0
|
|
[ "$needle" = "!$i" ] && return 1
|
|
done
|
|
return 1
|
|
}
|
|
|
|
options_has() {
|
|
list_has "$1" $options
|
|
}
|
|
|
|
depends_has() {
|
|
deplist_has "$1" $depends
|
|
}
|
|
|
|
makedepends_has() {
|
|
deplist_has "$1" $makedepends
|
|
}
|
|
|
|
md5sums_has() {
|
|
list_has "$1" $md5sums
|
|
}
|
|
|
|
install_has() {
|
|
list_has "$1" $install
|
|
}
|
|
|
|
deps() {
|
|
[ -z "$hostdeps" -a -z "$builddeps" ] && calcdeps
|
|
|
|
local _quiet="$1"
|
|
[ -z "$_quiet" ] && msg "Installing for build:$builddeps"
|
|
$SUDO_APK add $_quiet $apk_opt_wait --repository "$REPODEST/$repo" \
|
|
--virtual .makedepends-$pkgname \
|
|
$builddeps \
|
|
|| return 1
|
|
if [ -n "$CBUILDROOT" ]; then
|
|
[ -z "$_quiet" ] && msg "Installing for host:$hostdeps"
|
|
$SUDO_APK add $_quiet --root "$CBUILDROOT" --arch "$CTARGET_ARCH" --repository "$REPODEST/$repo" $apk_opt_wait \
|
|
--no-scripts --virtual .hostdepends-$pkgname $hostdeps || return 1
|
|
fi
|
|
}
|
|
|
|
undeps() {
|
|
local _quiet="$@"
|
|
$SUDO_APK del $_quiet $apk_opt_wait .makedepends-$pkgname || :
|
|
if [ -n "$CBUILDROOT" ]; then
|
|
$SUDO_APK del $_quiet --root "$CBUILDROOT" --arch "$CTARGET_ARCH" $apk_opt_wait \
|
|
--no-scripts .hostdepends-$pkgname || :
|
|
fi
|
|
}
|
|
|
|
# compat
|
|
installdeps() { deps; }
|
|
uninstalldeps() { undeps; }
|
|
index() { update_abuildrepo_index; }
|
|
|
|
all() {
|
|
if ! [ -n "$force" ]; then
|
|
if ! check_arch; then
|
|
echo "Package not available for the target architecture ($CARCH). Aborting."
|
|
return 0
|
|
fi
|
|
check_libc || return 0
|
|
fi
|
|
if up2date && [ -z "$force" ]; then
|
|
msg "Package is up to date"
|
|
else
|
|
build_abuildrepo
|
|
fi
|
|
}
|
|
|
|
# This abuild hook will checkout an svn or git repository by specifying
|
|
# $svnurl or $giturl in APKBUILD. You can checkout a specific branch in
|
|
# git by adding -b $branch in $giturl. $reporev will select the correct
|
|
# commit, revision or tag for you. If you specify $disturl your distfile
|
|
# will automatically be uploaded with rsync to the url provided.
|
|
# Base version defaults to 0 except if specified by $verbase.
|
|
|
|
snapshot() {
|
|
# check if we setup vars correctly
|
|
[ -z "$disturl" ] && warning "Missing disturl in APKBUILD, auto uploading disabled."
|
|
[ -z "$svnurl" ] && [ -z "$giturl" ] && die "Missing repository url in APKBUILD!"
|
|
[ -n "$svnurl" ] && [ -n "$giturl" ] && die "You can only use a single repository!"
|
|
local _date=$(date +%Y%m%d)
|
|
local _format="tar.gz"
|
|
# remove any repositories left in srcdir
|
|
abuild clean
|
|
mkdir -p "$srcdir" && cd "$srcdir"
|
|
# clone git repo and archive
|
|
if [ -n "$giturl" ]; then
|
|
local _version=${verbase:-0}_git${_date}
|
|
[ "$git" = "true" ] && die "Missing git! Install git to support git clone."
|
|
local _rev="${reporev:-HEAD}"
|
|
[ "$_rev" = "HEAD" ] && local _depth="--depth=1"
|
|
msg "Creating git snapshot: $pkgname-$_version"
|
|
git clone $_depth --bare $giturl $pkgname-$_version || return 1
|
|
git --git-dir $pkgname-$_version archive \
|
|
--format=$_format \
|
|
-o $pkgname-$_version.$_format \
|
|
--prefix=$pkgname-$_version/ $_rev \
|
|
|| return 1
|
|
fi
|
|
# export svn repo and archive
|
|
if [ -n "$svnurl" ]; then
|
|
local _version=${verbase:-0}_svn${_date}
|
|
command -v svn >/dev/null || \
|
|
die "Missing svn! Install subverion to support svn export."
|
|
[ -n "$reporev" ] && local _rev="-r $reporev"
|
|
msg "Creating svn snapshot: $pkgname-$_version"
|
|
svn co $_rev $svnurl $pkgname-$_version || return 1
|
|
tar zcf $pkgname-$_version.$_format $pkgname-$_version || return 1
|
|
fi
|
|
# upload to defined distfiles url
|
|
if [ -n "$disturl" ]; then
|
|
command -v rsync >/dev/null || \
|
|
die "Missing rsync! Install rsync to enable automatic uploads."
|
|
msg "Uploading to $disturl"
|
|
rsync --progress -La $pkgname-$_version.$_format \
|
|
$disturl || return 1
|
|
cd "$startdir"
|
|
# set the pkgver to current date and update checksum
|
|
sed -i -e "s/^pkgver=.*/pkgver=${_version}/" \
|
|
APKBUILD || return 1
|
|
abuild checksum
|
|
fi
|
|
}
|
|
|
|
usage() {
|
|
cat <<-EOF
|
|
usage: $program [options] [-P REPODEST] [-s SRCDEST] [-D DESCRIPTION] [cmd] ...
|
|
$program [-c] -n PKGNAME[-PKGVER]
|
|
Options:
|
|
-A Print CARCH and exit
|
|
-c Enable colored output
|
|
-d Disable dependency checking
|
|
-D Set APKINDEX description (default: \$repo \$(git describe))
|
|
-f Force specified cmd (skip checks: apk up to date, arch, libc)
|
|
-F Force run as root
|
|
-h Show this help
|
|
-k Keep built packages, even if APKBUILD or sources are newer
|
|
-K Keep buildtime temp dirs and files (srcdir/pkgdir/deps)
|
|
-m Disable colors (monochrome)
|
|
-P Set REPODEST as the repository location for created packages
|
|
-q Quiet
|
|
-r Install missing dependencies from system repository (using sudo)
|
|
-R Recursively build and install missing dependencies (using sudo)
|
|
-s Set source package destination directory
|
|
-u Recursively build and upgrade all dependencies (using sudo)
|
|
-v Verbose: show every command as it is run (very noisy)
|
|
|
|
Commands:
|
|
build Compile and install package into \$pkgdir
|
|
check Run any defined tests concerning the package
|
|
checksum Generate checksum to be included in APKBUILD
|
|
clean Remove temp build and install dirs
|
|
cleancache Remove downloaded files from \$SRCDEST
|
|
cleanoldpkg Remove binary packages except current version
|
|
cleanpkg Remove already built binary and source package
|
|
deps Install packages listed in makedepends and depends
|
|
fetch Fetch sources to \$SRCDEST (consider: 'abuild fetch verify')
|
|
index Regenerate indexes in \$REPODEST
|
|
listpkg List target packages
|
|
package Install project into $pkgdir
|
|
prepare Apply patches
|
|
rootbld Build package in clean chroot
|
|
rootpkg Run 'package', the split functions and create apks as fakeroot
|
|
sanitycheck Basic sanity check of APKBUILD
|
|
snapshot Create a \$giturl or \$svnurl snapshot and upload to \$disturl
|
|
sourcecheck Check if remote source package exists upstream
|
|
srcpkg Make a source package
|
|
undeps Uninstall packages listed in makedepends and depends
|
|
unpack Unpack sources to \$srcdir
|
|
up2date Compare target and sources dates
|
|
verify Verify checksums
|
|
|
|
To activate cross compilation specify in environment:
|
|
CHOST Arch or hostspec of machine to generate packages for
|
|
CTARGET Arch or hostspec of machine to generate compiler for
|
|
|
|
EOF
|
|
exit 0
|
|
}
|
|
|
|
APKBUILD="${APKBUILD:-./APKBUILD}"
|
|
unset force
|
|
unset recursive
|
|
while getopts ":AcdD:fFhkKmnP:qrRs:uv" opt; do
|
|
case $opt in
|
|
'A') echo "$CARCH"; exit 0;;
|
|
'c') enable_colors
|
|
color_opt="-c";;
|
|
'd') nodeps="-d";;
|
|
'D') DESCRIPTION=$OPTARG;;
|
|
'f') force="-f";;
|
|
'F') forceroot="-F";;
|
|
'h') usage;;
|
|
'k') keep="-k";;
|
|
'K') keep_build="-K";;
|
|
'm') disable_colors
|
|
color_opt="-m";;
|
|
'n') die "Use newapkbuild to create new aports";;
|
|
'P') REPODEST=$OPTARG;;
|
|
'q') quiet="-q";;
|
|
'r') install_deps="-r";;
|
|
'R') recursive="-R";;
|
|
's') SRCDEST=$OPTARG;;
|
|
'u') upgrade="-u"
|
|
recursive="-R";;
|
|
'v') set -x;;
|
|
'?') die "Unrecognized option: $OPTARG";;
|
|
esac
|
|
done
|
|
shift $(( $OPTIND - 1 ))
|
|
|
|
# check so we are not root
|
|
if [ $(id -u) -eq 0 ] && [ -z "$FAKEROOTKEY" ]; then
|
|
[ -z "$forceroot" ] && die "Do not run abuild as root"
|
|
FAKEROOT=
|
|
fi
|
|
|
|
# find startdir
|
|
[ -f "$APKBUILD" ] || die "Could not find $APKBUILD (PWD=$PWD)"
|
|
APKBUILD=$(readlink -f "$APKBUILD")
|
|
|
|
startdir="${APKBUILD%/*}"
|
|
srcdir=${srcdir:-"$startdir/src"}
|
|
pkgbasedir=${pkgbasedir:-"$startdir/pkg"}
|
|
|
|
repo=${startdir%/*}
|
|
repo=${repo##*/}
|
|
|
|
SRCDEST=${SRCDEST:-$startdir}
|
|
|
|
BUILD_ROOT=
|
|
|
|
# set a default CC
|
|
: ${CC:=gcc}
|
|
export CC
|
|
|
|
cd "$startdir" || die
|
|
. "$APKBUILD"
|
|
|
|
builddir=${builddir:-"$srcdir/$pkgname-$pkgver"}
|
|
|
|
# If REPODEST is set then it will override the PKGDEST
|
|
if [ -z "$REPODEST" ]; then
|
|
warning "REPODEST is not set and is now required. Defaulting to $HOME/packages"
|
|
[ -n "$PKGDEST" ] && die "PKGDEST is no longer supported."
|
|
REPODEST="$HOME/packages"
|
|
fi
|
|
|
|
# for recursive action
|
|
export REPODEST SRCDEST
|
|
|
|
# add dbg subpackage if its enabled globally
|
|
if [ -n "$DEFAULT_DBG" ] && ! subpackage_types_has "dbg" && ! options_has "!dbg" && [ "$arch" != "noarch" ]; then
|
|
subpackages="$pkgname-dbg $subpackages"
|
|
fi
|
|
|
|
# if we want build debug package
|
|
if [ -n "$DEBUG" ] || subpackage_types_has "dbg"; then
|
|
CFLAGS="$CFLAGS -g"
|
|
CXXFLAGS="$CXXFLAGS -g"
|
|
options="$options !strip"
|
|
fi
|
|
|
|
if [ -n "$subpkgname" ]; then
|
|
# If we are handling a sub package then reset subpackages and install
|
|
origsubpackages="$subpackages"
|
|
subpackages=
|
|
else
|
|
allpackages="$pkgname $subpackages"
|
|
for i in $linguas; do
|
|
allpackages="$allpackages $pkgname-lang-$i::noarch"
|
|
done
|
|
fi
|
|
apkbuild_arch="$arch"
|
|
pkgdir="$pkgbasedir/$pkgname"
|
|
if [ -z "$pkgarch" ]; then
|
|
pkgarch=$CARCH
|
|
list_has noarch $arch && pkgarch=noarch
|
|
fi
|
|
controldir="$pkgbasedir"/.control.${subpkgname:-$pkgname}
|
|
|
|
trap 'die "Aborted by user"' INT
|
|
|
|
[ -z "$subpkgdir" ] && set_xterm_title "abuild${CROSS_COMPILE+-$CARCH}: $pkgname"
|
|
|
|
if [ -z "$1" ]; then
|
|
set all
|
|
fi
|
|
|
|
while [ $# -gt 0 ]; do
|
|
runpart $1
|
|
shift
|
|
done
|
|
|
|
cleanup
|