mirror of
https://gitlab.alpinelinux.org/alpine/abuild.git
synced 2024-12-23 15:33:28 +00:00
65edee22f2
so subfunctions don't overwrite the global variables like depends, pkgdesc etc.
1885 lines
45 KiB
Bash
Executable File
1885 lines
45 KiB
Bash
Executable File
#!/bin/sh
|
|
|
|
# script to build apk packages (light version of makepkg)
|
|
# Copyright (c) 2008 Natanael Copa <natanael.copa@gmail.com>
|
|
#
|
|
# Distributed under GPL-2
|
|
#
|
|
# Depends on: busybox utilities, fakeroot
|
|
#
|
|
|
|
abuild_ver=@VERSION@
|
|
sysconfdir=@sysconfdir@
|
|
abuildrepo_base=@abuildrepo@
|
|
datadir=@datadir@
|
|
|
|
program=${0##*/}
|
|
abuild_path=$(readlink -f $0)
|
|
|
|
# defaults
|
|
BUILD_BASE="build-base"
|
|
FAKEROOT=${FAKEROOT:-"fakeroot"}
|
|
|
|
: ${SUDO_APK:=abuild-apk}
|
|
: ${APK:=apk}
|
|
: ${ADDUSER:=abuild-adduser}
|
|
: ${ADDGROUP:=abuild-addgroup}
|
|
|
|
apk_opt_wait="--wait 30"
|
|
|
|
# read config
|
|
ABUILD_CONF=${ABUILD_CONF:-"$sysconfdir/abuild.conf"}
|
|
[ -f "$ABUILD_CONF" ] && . "$ABUILD_CONF"
|
|
|
|
default_colors() {
|
|
NORMAL="\033[1;0m"
|
|
STRONG="\033[1;1m"
|
|
RED="\033[1;31m"
|
|
GREEN="\033[1;32m"
|
|
YELLOW="\033[1;33m"
|
|
BLUE="\033[1;34m"
|
|
}
|
|
|
|
monochrome() {
|
|
NORMAL=""
|
|
STRONG=""
|
|
RED=""
|
|
GREEN=""
|
|
YELLOW=""
|
|
BLUE=""
|
|
}
|
|
|
|
#colors
|
|
if [ -n "$USE_COLORS" ]; then
|
|
default_colors
|
|
fi
|
|
|
|
# run optional log command for remote logging
|
|
logcmd() {
|
|
${ABUILD_LOG_CMD:-true} "$@"
|
|
return 0
|
|
}
|
|
|
|
# functions
|
|
msg() {
|
|
[ -n "$quiet" ] && return 0
|
|
local prompt="$GREEN>>>${NORMAL}"
|
|
local fake="${FAKEROOTKEY:+${BLUE}*${NORMAL}}"
|
|
local name="${STRONG}${subpkgname:-$pkgname}${NORMAL}"
|
|
printf "${prompt} ${name}${fake}: %s\n" "$1" >&2
|
|
}
|
|
|
|
msg2() {
|
|
[ -n "$quiet" ] && return 0
|
|
# ">>> %s"
|
|
printf " %s\n" "$1" >&2
|
|
}
|
|
|
|
warning() {
|
|
local prompt="${YELLOW}>>> WARNING:${NORMAL}"
|
|
local fake="${FAKEROOTKEY:+${BLUE}*${NORMAL}}"
|
|
local name="${STRONG}${subpkgname:-$pkgname}${NORMAL}"
|
|
printf "${prompt} ${name}${fake}: %s\n" "$1" >&2
|
|
}
|
|
|
|
warning2() {
|
|
# ">>> WARNING:
|
|
printf " %s\n" "$1" >&2
|
|
}
|
|
|
|
error() {
|
|
local prompt="${RED}>>> ERROR:${NORMAL}"
|
|
local fake="${FAKEROOTKEY:+${BLUE}*${NORMAL}}"
|
|
local name="${STRONG}${subpkgname:-$pkgname}${NORMAL}"
|
|
printf "${prompt} ${name}${fake}: %s\n" "$1" >&2
|
|
logcmd "ERROR: $pkgname: $1"
|
|
}
|
|
|
|
error2() {
|
|
# ">>> ERROR:
|
|
printf " %s\n" "$1" >&2
|
|
}
|
|
|
|
set_xterm_title() {
|
|
if [ "$TERM" = xterm ] && [ -n "$USE_COLORS" ]; then
|
|
printf "\033]0;$1\007" >&2
|
|
fi
|
|
}
|
|
|
|
cleanup() {
|
|
set_xterm_title ""
|
|
if [ -z "$install_after" ] && [ -n "$uninstall_after" ]; then
|
|
msg "Uninstalling dependencies..."
|
|
$SUDO_APK del --quiet $apk_opt_wait $uninstall_after
|
|
fi
|
|
if [ -n "$CLEANUP_FILES" ]; then
|
|
rm -f $CLEANUP_FILES
|
|
fi
|
|
}
|
|
|
|
die() {
|
|
error "$@"
|
|
cleanup
|
|
exit 1
|
|
}
|
|
|
|
# check if apkbuild is basicly sane
|
|
default_sanitycheck() {
|
|
local i= j= suggestion=
|
|
msg "Checking sanity of $APKBUILD..."
|
|
[ -z "$pkgname" ] && die "Missing pkgname in APKBUILD"
|
|
[ -z "${pkgname##* *}" ] && die "pkgname contains spaces"
|
|
[ -z "$pkgver" ] && die "Missing pkgver in APKBUILD"
|
|
if [ "$pkgver" != "volatile" ] && [ -z "$nodeps" ]; then
|
|
$APK version --check -q "$pkgver" ||\
|
|
die "$pkgver is not a valid version"
|
|
fi
|
|
[ -z "$pkgrel" ] && die "Missing pkgrel in APKBUILD"
|
|
[ -z "$pkgdesc" ] && die "Missing pkgdesc in APKBUILD"
|
|
[ -z "$url" ] && die "Missing url in APKBUILD"
|
|
[ -z "$license" ] && die "Missing license in APKBULID"
|
|
if [ $(echo "$pkgdesc" | wc -c) -gt 128 ]; then
|
|
die "pkgdesc is too long"
|
|
fi
|
|
|
|
if [ -n "$replaces_priority" ] \
|
|
&& ! echo $replaces_priority | egrep -q '^[0-9]+$'; then
|
|
die "replaces_priority must be a number"
|
|
fi
|
|
# check so no package names starts with -
|
|
for i in $pkgname $subpackages; do
|
|
case $i in
|
|
-*) die "${i%:*} is not a valid package name";;
|
|
esac
|
|
done
|
|
|
|
# check if CARCH, CBUILD, CHOST and CTARGET is set
|
|
if [ -z "$CARCH" ]; then
|
|
case "$(uname -m)" in
|
|
i[0-9]86) suggestion=" (Suggestion: CARCH=x86)";;
|
|
x86_64) suggestion=" (Suggestion: CARCH=x86_64)";;
|
|
esac
|
|
die "Please set CARCH in /etc/abuild.conf$suggestion"
|
|
fi
|
|
[ -z "$CHOST" ] && die "Please set CHOST in /etc/abuild.conf"
|
|
|
|
for i in $install; do
|
|
local n=${i%.*}
|
|
local suff=${i##*.}
|
|
case "$suff" in
|
|
pre-install|post-install|pre-upgrade|post-upgrade|pre-deinstall|post-deinstall);;
|
|
*) die "$i: unknown install script suffix"
|
|
esac
|
|
if ! subpackages_has "$n" && [ "$n" != "$pkgname" ]; then
|
|
die "$i: install script does not match pkgname or any subpackage"
|
|
fi
|
|
[ -e "$startdir/$i" ] || die "install script $i is missing"
|
|
for j in chown chmod chgrp; do
|
|
if grep -q $j "$startdir"/$i; then
|
|
warning "$i: found $j"
|
|
warning2 "Permissions should be fixed in APKBUILD package()"
|
|
fi
|
|
done
|
|
done
|
|
|
|
for i in $triggers; do
|
|
local f=${i%=*}
|
|
local p=${f%.trigger}
|
|
[ "$f" = "$i" ] && die "$f: triggers must contain '='"
|
|
[ "$p" = "$f" ] && die "$f: triggers scripts must have .trigger suffix"
|
|
if ! subpackages_has "$p" && [ "$p" != "$pkgname" ]; then
|
|
die "$p: trigger script does not match pkgname or any subpackage"
|
|
fi
|
|
|
|
[ -e "$startdir"/$f ] || die "trigger script $f is missing"
|
|
done
|
|
if [ -n "$source" ]; then
|
|
for i in $source; do
|
|
if install_has "$i"; then
|
|
warning "You should not have \$install in source"
|
|
continue
|
|
fi
|
|
case "$i" in
|
|
*::*) i=${i%%::*};;
|
|
https://*) makedepends_has wget && warning "wget no longer need to be in makedepends when source has https://" ;;
|
|
esac
|
|
list_has ${i##*/} $md5sums $sha256sums $sha512sums \
|
|
|| die "${i##*/} is missing in checksums"
|
|
done
|
|
fi
|
|
|
|
# verify that things listed in checksum also is listed in source
|
|
local algo=
|
|
for algo in md5 sha256 sha512; do
|
|
eval set -- \$${algo}sums
|
|
while [ $# -gt 1 ]; do
|
|
local file="$2"
|
|
shift 2
|
|
source_has $file || die "$file exists in ${algo}sums but is missing in source"
|
|
done
|
|
done
|
|
|
|
# common spelling errors
|
|
[ -n "$depend" ] && die "APKBUILD contains 'depend'. It should be depends"
|
|
[ -n "$makedepend" ] && die "APKBUILD contains 'makedepend'. It should be makedepends"
|
|
|
|
grep '^# Maintainer:' $APKBUILD >/dev/null || warning "No maintainer"
|
|
|
|
makedepends_has 'g++' && warning "g++ should not be in makedepends"
|
|
return 0
|
|
}
|
|
|
|
sanitycheck() {
|
|
default_sanitycheck
|
|
}
|
|
|
|
sumcheck() {
|
|
local algo="$1" sums="$2"
|
|
local dummy f endreturnval originalparams origin file
|
|
|
|
# get number of checksums
|
|
set -- $sums
|
|
local numsums=$(( $# / 2 ))
|
|
|
|
set -- $source
|
|
if [ $# -ne $numsums ]; then
|
|
die "Number of ${algo}sums($numsums) does not correspond to number of sources($#)"
|
|
fi
|
|
fetch || return 1
|
|
msg "Checking ${algo}sums..."
|
|
cd "$srcdir" || return 1
|
|
IFS=$'\n'
|
|
endreturnval=0
|
|
for src in $sums; do
|
|
origin=$1; shift
|
|
echo "$src" | ${algo}sum -c
|
|
if [ $? -ne 0 ]; then
|
|
endreturnval=1
|
|
is_remote $origin || continue
|
|
echo "Because the remote file above failed the ${algo}sum check it will be deleted."
|
|
echo "Rebuilding will cause it to re-download which in some cases may fix the problem."
|
|
file=`echo "$src" | sed 's/.*[ \t\n]\(.*\)/\1/'`
|
|
echo "Deleting: $file"
|
|
rm $file
|
|
fi
|
|
done
|
|
unset IFS
|
|
return $endreturnval
|
|
}
|
|
|
|
# for compatibility
|
|
md5check() {
|
|
warning "'md5check' is deprecated. Use 'verify' instead"
|
|
sumcheck md5 "$md5sums"
|
|
}
|
|
|
|
# verify checksums
|
|
verify() {
|
|
local verified=false algo=
|
|
for algo in md5 sha1 sha256 sha512; do
|
|
local sums=
|
|
eval sums=\"\$${algo}sums\"
|
|
if [ -z "$sums" ] || [ -z "$source" ]; then
|
|
continue
|
|
fi
|
|
sumcheck "$algo" "$sums" || return 1
|
|
verified=true
|
|
done
|
|
if [ -n "$source" ] && ! $verified; then
|
|
die "Use 'abuild checksum' to generate/update the checksum(s)"
|
|
fi
|
|
return 0
|
|
}
|
|
|
|
# verify upstream sources
|
|
sourcecheck() {
|
|
local uri
|
|
for uri in $source; do
|
|
is_remote $uri || continue
|
|
case "$uri" in
|
|
saveas-*://*)
|
|
uri=${uri#saveas-}
|
|
uri=${uri%/*}
|
|
;;
|
|
*::*)
|
|
uri=${uri##*::}
|
|
;;
|
|
esac
|
|
wget -q -s "$uri" || return 1
|
|
done
|
|
return 0
|
|
}
|
|
|
|
# convert curl options to wget options and call wget instead of curl
|
|
wget_fallback() {
|
|
local wget_opts= outfile= opt=
|
|
while getopts "C:Lko:s" opt; do
|
|
case $opt in
|
|
'L') ;; # --location. wget does this by default
|
|
'f') ;; # --fail. wget does this by default
|
|
'C') wget_opts="$wget_opts -c";; # --continue-at
|
|
's') wget_opts="$wget_opts -q";; # --silent
|
|
'o') wget_opts="$wget_opts -O $OPTARG";; # --output
|
|
'k') wget_opts="$wget_opts --no-check-certificate";; #gnu wget
|
|
esac
|
|
done
|
|
shift $(( $OPTIND - 1 ))
|
|
wget $wget_opts "$1"
|
|
}
|
|
|
|
uri_fetch() {
|
|
local uri="$1"
|
|
local d="${uri##*/}" # $(basename $uri)
|
|
local opts
|
|
[ -n "$quiet" ] && opts="-s"
|
|
|
|
local lockfile="$SRCDEST/$d".lock
|
|
|
|
# fix saveas-*://* URIs
|
|
case "$uri" in
|
|
# remove 'saveas-' from beginning and
|
|
# '/filename' from end of URI
|
|
saveas-*://*) uri="${uri:7:$(expr ${#uri} - 7 - ${#d} - 1)}";;
|
|
|
|
*::*)
|
|
d=${uri%%::*}
|
|
uri=${uri#$d::}
|
|
;;
|
|
esac
|
|
|
|
case "$uri" in
|
|
https://*) opts="-k";;
|
|
esac
|
|
|
|
mkdir -p "$SRCDEST"
|
|
|
|
CLEANUP_FILES="$CLEANUP_FILES $lockfile"
|
|
(
|
|
flock -n -x 9 || msg "Waiting for ${lockfile##*/}..."
|
|
flock -x 9
|
|
|
|
[ -f "$SRCDEST/$d" ] && exit 0 # use exit since its a subshell
|
|
|
|
if [ -f "$SRCDEST/$d.part" ]; then
|
|
msg "Partial download found. Trying to resume"
|
|
opts="$opts -C -"
|
|
fi
|
|
msg "Fetching $uri"
|
|
|
|
# fallback to wget if curl is missing. useful for bootstrapping
|
|
local fetcher=
|
|
if ! [ -x "$(which curl)" ]; then
|
|
fetcher=wget_fallback
|
|
else
|
|
fetcher=curl
|
|
opts="$opts -L -f -k"
|
|
fi
|
|
|
|
$fetcher $opts -o "$SRCDEST/$d.part" "$uri" \
|
|
&& mv "$SRCDEST/$d.part" "$SRCDEST/$d"
|
|
|
|
) 9>$lockfile
|
|
|
|
local rc=$?
|
|
rm -f "$lockfile"
|
|
return $rc
|
|
}
|
|
|
|
is_remote() {
|
|
case "${1#*::}" in
|
|
http://*|ftp://*|https://*|saveas-*://*)
|
|
return 0;;
|
|
esac
|
|
return 1
|
|
}
|
|
|
|
filename_from_uri() {
|
|
local uri="$1"
|
|
local filename="${uri##*/}" # $(basename $uri)
|
|
case "$uri" in
|
|
*::*) filename=${uri%%::*};;
|
|
esac
|
|
echo "$filename"
|
|
}
|
|
|
|
# try download from file from mirror first
|
|
uri_fetch_mirror() {
|
|
local uri="$1"
|
|
if [ -n "$DISTFILES_MIRROR" ]; then
|
|
if is_remote "$DISTFILES_MIRROR"; then
|
|
uri_fetch "$DISTFILES_MIRROR"/$(filename_from_uri $uri)\
|
|
&& return 0
|
|
else
|
|
cp "$DISTFILES_MIRROR"/$(filename_from_uri $uri) \
|
|
"$SRCDEST" && return 0
|
|
fi
|
|
fi
|
|
uri_fetch "$uri"
|
|
}
|
|
|
|
default_fetch() {
|
|
local s
|
|
mkdir -p "$srcdir"
|
|
for s in $source; do
|
|
if is_remote "$s"; then
|
|
uri_fetch_mirror "$s" || return 1
|
|
ln -sf "$SRCDEST/$(filename_from_uri $s)" "$srcdir"/
|
|
else
|
|
ln -sf "$startdir/$s" "$srcdir/"
|
|
fi
|
|
done
|
|
}
|
|
|
|
fetch() {
|
|
default_fetch
|
|
}
|
|
|
|
# verify that all init.d scripts are openrc runscripts
|
|
initdcheck() {
|
|
local i
|
|
for i in $source; do
|
|
case $i in
|
|
*.initd)
|
|
head -n 1 "$srcdir"/$i | grep -q '/sbin/runscript' \
|
|
&& continue
|
|
error "$i is not an openrc #!/sbin/runscript"
|
|
return 1
|
|
;;
|
|
esac
|
|
done
|
|
}
|
|
|
|
# unpack the sources
|
|
default_unpack() {
|
|
local u
|
|
if [ -z "$force" ]; then
|
|
verify || return 1
|
|
initdcheck || return 1
|
|
fi
|
|
mkdir -p "$srcdir"
|
|
for u in $source; do
|
|
local s="$SRCDEST/$(filename_from_uri $u)"
|
|
case "$s" in
|
|
*.tar)
|
|
msg "Unpacking $s..."
|
|
tar -C "$srcdir" -xf "$s" || return 1;;
|
|
*.tar.gz|*.tgz)
|
|
msg "Unpacking $s..."
|
|
tar -C "$srcdir" -zxf "$s" || return 1;;
|
|
*.tar.bz2)
|
|
msg "Unpacking $s..."
|
|
tar -C "$srcdir" -jxf "$s" || return 1;;
|
|
*.tar.lzma)
|
|
msg "Unpacking $s..."
|
|
unlzma -c "$s" | tar -C "$srcdir" -x \
|
|
|| return 1;;
|
|
*.tar.xz)
|
|
msg "Unpacking $s..."
|
|
unxz -c "$s" | tar -C "$srcdir" -x || return 1;;
|
|
*.zip)
|
|
msg "Unpacking $s..."
|
|
unzip "$s" -d "$srcdir" || return 1;;
|
|
esac
|
|
done
|
|
}
|
|
|
|
unpack() {
|
|
default_unpack
|
|
}
|
|
|
|
# cleanup source and package dir
|
|
clean() {
|
|
msg "Cleaning temporary build dirs..."
|
|
rm -rf "$srcdir"
|
|
rm -rf "$pkgbasedir"
|
|
}
|
|
|
|
# cleanup fetched sources
|
|
cleancache() {
|
|
local s
|
|
for s in $source; do
|
|
if is_remote "$s"; then
|
|
s=$(filename_from_uri $s)
|
|
msg "Cleaning downloaded $s ..."
|
|
rm -f "$SRCDEST/$s"
|
|
fi
|
|
done
|
|
}
|
|
|
|
listpkgnames() {
|
|
local i
|
|
for i in $pkgname $subpackages; do
|
|
echo ${i%:*}
|
|
done
|
|
for i in $linguas; do
|
|
echo $pkgname-lang-$i
|
|
done
|
|
}
|
|
|
|
cleanpkg() {
|
|
local i
|
|
getpkgver || return 1
|
|
msg "Cleaning built packages..."
|
|
for i in $(listpkgnames); do
|
|
local p="${i%:*}-$pkgver-r$pkgrel"
|
|
rm -f "$PKGDEST/$p.apk" "$PKGDEST/$p.src.tar.gz" \
|
|
"$abuildrepo"/$p.apk "$abuildrepo"/*/$p.apk
|
|
done
|
|
# remove given packages from index
|
|
update_abuildrepo_index
|
|
}
|
|
|
|
# clean all packages except current
|
|
cleanoldpkg() {
|
|
local i j
|
|
getpkgver || return 1
|
|
msg "Cleaning all packages except $pkgver-r$pkgrel..."
|
|
for i in $(listpkgnames); do
|
|
local pn=${i%:*}
|
|
for j in "$PKGDEST"/$pn-[0-9]*.apk ; do
|
|
[ "$j" = "$PKGDEST/$pn-$pkgver-r$pkgrel.apk" ] \
|
|
&& continue
|
|
rm -f "$j" "$abuildrepo"/*/${j##*/}
|
|
done
|
|
done
|
|
return 0
|
|
}
|
|
|
|
mkusers() {
|
|
local i
|
|
for i in $pkggroups; do
|
|
if ! getent group $i >/dev/null; then
|
|
msg "Creating group $i"
|
|
$ADDGROUP -S $i || return 1
|
|
fi
|
|
done
|
|
for i in $pkgusers; do
|
|
if ! getent passwd $i >/dev/null; then
|
|
local gopt=
|
|
msg "Creating user $i"
|
|
if getent group $i >/dev/null; then
|
|
gopt="-G $i"
|
|
fi
|
|
$ADDUSER -S -D -H $gopt $i || return 1
|
|
fi
|
|
done
|
|
}
|
|
|
|
|
|
runpart() {
|
|
local part=$1
|
|
[ -n "$DEBUG" ] && msg "$part"
|
|
$part || die "$part failed"
|
|
}
|
|
|
|
# override those in your build script
|
|
getpkgver() {
|
|
# this func is supposed to be overridden by volatile packages
|
|
if [ "$pkgver" = "volatile" ]; then
|
|
error "Please provide a getpkgver() function in your APKBUILD"
|
|
return 1
|
|
fi
|
|
}
|
|
|
|
prepare() {
|
|
:
|
|
}
|
|
|
|
build() {
|
|
:
|
|
}
|
|
|
|
# generate a simple tar.gz package of pkgdir
|
|
targz() {
|
|
cd "$pkgdir" || return 1
|
|
mkdir -p "$PKGDEST"
|
|
tar -czf "$PKGDEST"/$pkgname-$pkgver-r$pkgrel.tar.gz *
|
|
}
|
|
|
|
get_split_func() {
|
|
# get the 'func' from "sub-pkg:func"
|
|
local func=${1##*:}
|
|
|
|
# get 'func' from "sub-pkg-func" if there was no :func
|
|
[ "$func" = "$1" ] && func=${func##*-}
|
|
echo $func
|
|
}
|
|
|
|
postcheck() {
|
|
local dir="$1" name="$2" i=
|
|
msg "Running postcheck for $name"
|
|
# checking for FHS compat
|
|
if ! options_has "!fhs"; then
|
|
for i in "$dir"/srv/* "$dir"/usr/local/* "$dir"/opt/*; do
|
|
if [ -e "$i" ]; then
|
|
error "Packages must not put anything under /srv, /usr/local or /opt"
|
|
return 1
|
|
fi
|
|
done
|
|
fi
|
|
# look for *.la files
|
|
i=$(find "$dir" -name '*.la' | sed "s|^$dir|\t|")
|
|
if [ -n "$i" ] && ! options_has "libtool"; then
|
|
error "Libtool archives (*.la) files found and \$options has no 'libtool' flag:"
|
|
echo "$i"
|
|
return 1
|
|
fi
|
|
# check directory permissions
|
|
i=$(find "$dir" -type d -perm -777 | sed "s|^$dir|\t|")
|
|
if [ -n "$i" ]; then
|
|
warning "World writeable directories found:"
|
|
echo "$i"
|
|
fi
|
|
# check so we dont have any suid root binaries that are not
|
|
i=$(find "$dir" -type f -perm +6000 \
|
|
| xargs scanelf --nobanner --etype ET_EXEC \
|
|
| sed "s|ET_EXEC $dir|\t|")
|
|
if [ -n "$i" ]; then
|
|
error "Found non-PIE files that has SUID:"
|
|
echo "$i"
|
|
return 1
|
|
fi
|
|
# test for textrels
|
|
if ! options_has "textrels"; then
|
|
local res="$(scanelf --recursive --textrel --quiet "$dir")"
|
|
if [ -n "$res" ]; then
|
|
error "Found textrels:"
|
|
echo "$res"
|
|
return 1
|
|
fi
|
|
fi
|
|
return 0
|
|
}
|
|
|
|
prepare_subpackages() {
|
|
local i
|
|
cd "$startdir"
|
|
for i in $subpackages; do
|
|
local func=$(get_split_func $i)
|
|
# call abuild recursively, setting subpkg{dir,name}
|
|
msg "Running split function $func..."
|
|
local dir="$pkgbasedir/${i%:*}" name="${i%:*}"
|
|
( subpkgdir="$dir" subpkgname="$name" \
|
|
$0 $func prepare_package \
|
|
&& postcheck "$dir" "$name" ) || return 1
|
|
done
|
|
postcheck "$pkgdir" "$pkgname" || return 1
|
|
# post check for /usr/share/locale
|
|
if [ -d "$pkgdir"/usr/share/locale ]; then
|
|
warning "Found /usr/share/locale"
|
|
warning2 "Maybe add \$pkgname-lang to subpackages?"
|
|
fi
|
|
}
|
|
|
|
default_lang() {
|
|
pkgdesc="Languages for package $pkgname"
|
|
install_if="$pkgname=$pkgver-r$pkgrel lang"
|
|
arch="noarch"
|
|
local dir
|
|
for dir in ${langdir:-/usr/share/locale}; do
|
|
mkdir -p "$subpkgdir"/${dir%/*}
|
|
mv "$pkgdir"/"$dir" "$subpkgdir"/"$dir" || return 1
|
|
done
|
|
}
|
|
|
|
lang() {
|
|
default_lang
|
|
}
|
|
|
|
default_lang_subpkg() {
|
|
if [ -z "$lang" ]; then
|
|
error "lang is not set"
|
|
return 1
|
|
fi
|
|
pkgdesc="$pkgname language pack for $lang"
|
|
arch="noarch"
|
|
install_if="$pkgname=$pkgver-r$pkgrel lang-$lang"
|
|
local dir
|
|
for dir in ${langdir:-/usr/share/locale}; do
|
|
mkdir -p "$subpkgdir"/$dir
|
|
mv "$pkgdir"/$dir/$lang* \
|
|
"$subpkgdir"/$dir/ \
|
|
|| return 1
|
|
done
|
|
}
|
|
|
|
lang_subpkg() {
|
|
default_lang_subpkg
|
|
}
|
|
|
|
prepare_language_packs() {
|
|
for lang in $linguas; do
|
|
lang="$lang" \
|
|
subpkgname="$pkgname-lang-$lang" \
|
|
subpkgdir="$pkgbasedir"/$subpkgname \
|
|
$0 lang_subpkg prepare_package || return 1
|
|
done
|
|
}
|
|
|
|
# echo '-dirty' if git is not clean
|
|
git_dirty() {
|
|
if [ $(git status -s "$startdir" | wc -l) -ne 0 ]; then
|
|
echo "-dirty"
|
|
fi
|
|
}
|
|
|
|
# echo last commit hash id
|
|
git_last_commit() {
|
|
git log --format=oneline -n 1 "$startdir" | awk '{print $1}'
|
|
}
|
|
|
|
get_maintainer() {
|
|
if [ -z "$maintainer" ]; then
|
|
maintainer=$(awk -F': ' '/\# *Maintainer/ {print $2}' "$APKBUILD")
|
|
fi
|
|
}
|
|
|
|
prepare_metafiles() {
|
|
getpkgver || return 1
|
|
local name=${subpkgname:-$pkgname}
|
|
[ -z "${name##* *}" ] && die "package name contains spaces"
|
|
local dir=${subpkgdir:-$pkgdir}
|
|
local pkg="$name-$pkgver-r$pkgrel.apk"
|
|
local pkginfo="$controldir"/.PKGINFO
|
|
local sub
|
|
|
|
[ ! -d "$dir" ] && die "Missing $dir"
|
|
cd "$dir"
|
|
mkdir -p "$controldir"
|
|
local builddate=$(date -u "+%s")
|
|
local size=$(du -sk | awk '{print $1 * 1024}')
|
|
local parch="$CARCH"
|
|
|
|
# we need to wait with setting noarch til our build infra can handle it
|
|
# if [ "$arch" = "noarch" ]; then
|
|
# parch="noarch"
|
|
# fi
|
|
|
|
echo "# Generated by $(basename $0) $abuild_ver" >"$pkginfo"
|
|
if [ -n "$FAKEROOTKEY" ]; then
|
|
echo "# using $($FAKEROOT -v)" >> "$pkginfo"
|
|
fi
|
|
echo "# $(date -u)" >> "$pkginfo"
|
|
cat >> "$pkginfo" <<EOF
|
|
pkgname = $name
|
|
pkgver = $pkgver-r$pkgrel
|
|
pkgdesc = $pkgdesc
|
|
url = $url
|
|
builddate = $builddate
|
|
packager = ${PACKAGER:-"Unknown"}
|
|
size = $size
|
|
arch = $parch
|
|
origin = $pkgname
|
|
EOF
|
|
local i deps
|
|
deps="$depends"
|
|
if [ "$pkgname" != "busybox" ] && ! depends_has busbox; then
|
|
for i in $install $triggers; do
|
|
local s=${i%=*}
|
|
[ "$name" != "${s%.*}" ] && continue
|
|
if head -n 1 "$startdir/$s" | grep '^#!/bin/sh' >/dev/null ; then
|
|
msg "Script found. busybox added as a dependency for $pkg"
|
|
deps="$deps busybox"
|
|
break
|
|
fi
|
|
done
|
|
fi
|
|
|
|
local last_commit="$(git_last_commit)$(git_dirty)"
|
|
if [ -n "$last_commit" ]; then
|
|
echo "commit = $last_commit" >> "$pkginfo"
|
|
fi
|
|
|
|
get_maintainer
|
|
if [ -n "$maintainer" ]; then
|
|
echo "maintainer = $maintainer" >> "$pkginfo"
|
|
fi
|
|
|
|
if [ -n "$replaces_priority" ]; then
|
|
echo "replaces_priority = $replaces_priority" >> "$pkginfo"
|
|
fi
|
|
|
|
for i in $license; do
|
|
echo "license = $i" >> "$pkginfo"
|
|
done
|
|
for i in $replaces; do
|
|
echo "replaces = $i" >> "$pkginfo"
|
|
done
|
|
for i in $deps; do
|
|
echo "depend = $i" >> "$pkginfo"
|
|
done
|
|
for i in $conflicts; do
|
|
echo "conflict = $i" >> "$pkginfo"
|
|
done
|
|
for i in $provides; do
|
|
echo "provides = $i" >> "$pkginfo"
|
|
done
|
|
for i in $triggers; do
|
|
local f=${i%=*}
|
|
local dirs=${i#*=}
|
|
[ "${f%.trigger}" != "$name" ] && continue
|
|
echo "triggers = ${dirs//:/ }" >> "$pkginfo"
|
|
done
|
|
if [ -n "$install_if" ]; then
|
|
echo "install_if = $(echo $install_if)" >> "$pkginfo"
|
|
fi
|
|
|
|
local metafiles=".PKGINFO"
|
|
for i in $install $triggers; do
|
|
local f=${i%=*}
|
|
local n=${f%.*}
|
|
if [ "$n" != "$name" ]; then
|
|
continue
|
|
fi
|
|
script=${f#$name}
|
|
msg "Adding $script"
|
|
cp "$startdir/$f" "$controldir/$script" || return 1
|
|
chmod +x "$controldir/$script"
|
|
metafiles="$metafiles $script"
|
|
done
|
|
echo $metafiles | tr ' ' '\n' > "$controldir"/.metafiles
|
|
}
|
|
|
|
prepare_tracedeps() {
|
|
local dir=${subpkgdir:-$pkgdir}
|
|
local etype= soname= file= sover=
|
|
[ "$arch" = "noarch" ] && return 0
|
|
options_has "!tracedeps" && return 0
|
|
# lets tell all the places we should look for .so files - all rpaths
|
|
scanelf --quiet --recursive --rpath "$dir" \
|
|
| sed -e 's/[[:space:]].*//' -e 's/:/\n/' | sort -u \
|
|
>"$controldir"/.rpaths
|
|
if grep -q -x '/usr/lib' "$controldir"/.rpaths; then
|
|
warning "Redundat /usr/lib in rpath found"
|
|
fi
|
|
if grep '^/home/' "$controldir"/.rpaths; then
|
|
error "Has /home/... in rpath"
|
|
return 1
|
|
fi
|
|
}
|
|
|
|
# check if dir has arch specific binaries
|
|
dir_has_arch_binaries() {
|
|
local dir="$1"
|
|
# if scanelf returns something, then we have binaries
|
|
[ -n "$(scanelf -R "$dir" | head -n 1)" ] && return 0
|
|
|
|
# look for static *.a
|
|
[ -n "$(find "$dir" -type f -name '*.a' | head -n 1)" ] && return 0
|
|
|
|
return 1
|
|
}
|
|
|
|
# returns true if this is the -dev package
|
|
is_dev_pkg() {
|
|
test "${subpkgname%-dev}" != "$subpkgname"
|
|
}
|
|
|
|
# check that noarch is set if needed
|
|
archcheck() {
|
|
options_has "!archcheck" && return 0
|
|
if dir_has_arch_binaries "${subpkgdir:-$pkgdir}"; then
|
|
[ "$arch" != "noarch" ] && return 0
|
|
error "Arch specific binaries found so arch must not be set to \"noarch\""
|
|
return 1
|
|
elif [ "$arch" != "noarch" ] && ! is_dev_pkg; then
|
|
# we dont want -dev package go to noarch
|
|
warning "No arch specific binaries found so arch should probably be set to \"noarch\""
|
|
fi
|
|
return 0
|
|
}
|
|
|
|
prepare_package() {
|
|
msg "Preparing ${subpkgname:+sub}package ${subpkgname:-$pkgname}..."
|
|
stripbin
|
|
prepare_metafiles && prepare_tracedeps || return 1
|
|
archcheck
|
|
}
|
|
|
|
pkginfo_val() {
|
|
local key="$1"
|
|
local file="$2"
|
|
awk -F ' = ' "\$1 == \"$key\" {print \$2}" "$file"
|
|
}
|
|
|
|
# find real path to so files
|
|
real_so_path() {
|
|
local so="$1"
|
|
shift
|
|
while [ $# -gt 0 ]; do
|
|
[ -e "$1"/$so ] && realpath "$1/$so" && return 0
|
|
shift
|
|
done
|
|
error "$so: path not found"
|
|
return 1
|
|
}
|
|
|
|
# search rpaths and /usr/lib /lib for given so files
|
|
find_so_files() {
|
|
local rpaths=$(cat "$1")
|
|
shift
|
|
while [ $# -gt 0 ]; do
|
|
real_so_path "$1" /usr/lib /lib $rpaths || return 1
|
|
shift
|
|
done
|
|
return 0
|
|
}
|
|
|
|
subpkg_provides() {
|
|
grep -q -w "^$1" "$pkgbasedir"/.control.*/.provides-so 2>/dev/null
|
|
}
|
|
|
|
trace_apk_deps() {
|
|
local name="$1"
|
|
local dir="$2"
|
|
local i= found= autodeps= deppkgs= missing=
|
|
msg "Tracing dependencies..."
|
|
# add pkgconfig if usr/lib/pkgconfig is found
|
|
if [ -d "$pkgbasedir"/$name/usr/lib/pkgconfig ] \
|
|
&& ! grep -q '^depend = pkgconfig' "$dir"/.PKGINFO; then
|
|
msg " added pkgconfig (found /usr/lib/pkgconfig)"
|
|
autodeps="$autodeps pkgconfig"
|
|
fi
|
|
|
|
# special case for libpthread: we need depend on libgcc
|
|
if [ -f "$dir"/.needs-so ] && grep -q -w '^libpthread.so.*' "$dir"/.needs-so \
|
|
&& ! grep -q -w "^depend = libgcc" "$dir"/.PKGINFO; then
|
|
autodeps="$autodeps libgcc"
|
|
msg " added libgcc (due to libpthread)"
|
|
fi
|
|
[ -f "$dir"/.needs-so ] && for i in $(cat "$dir"/.needs-so); do
|
|
# first check if its provided by same apkbuild
|
|
grep -q -w "^$i" "$dir"/.provides-so 2>/dev/null && continue
|
|
|
|
if subpkg_provides "$i" || $APK info -q -e "so:$i"; then
|
|
autodeps="$autodeps so:$i"
|
|
else
|
|
missing="$missing $i"
|
|
fi
|
|
done
|
|
|
|
# find all packages that holds the so files
|
|
if [ -f "$dir"/.rpaths ]; then
|
|
local so_files=$(find_so_files "$dir"/.rpaths $missing) \
|
|
|| return 1
|
|
deppkgs=$($APK info -q -W $so_files) || return 1
|
|
fi
|
|
|
|
for found in $deppkgs; do
|
|
if grep -w "^depend = ${found}$" "$dir"/.PKGINFO >/dev/null ; then
|
|
warning "You can remove '$found' from depends"
|
|
continue
|
|
fi
|
|
autodeps="$autodeps $found"
|
|
done
|
|
|
|
echo "# automatically detected:" >> "$dir"/.PKGINFO
|
|
if [ -f "$dir"/.provides-so ]; then
|
|
sed 's/^\(.*\) \([0-9].*\)/provides = so:\1=\2/' "$dir"/.provides-so \
|
|
>> "$dir"/.PKGINFO
|
|
fi
|
|
[ -z "$autodeps" ] && return 0
|
|
for i in $autodeps; do
|
|
echo "depend = $i"
|
|
done | sort -u >> "$dir"/.PKGINFO
|
|
# display all depends
|
|
sed -n '/^depend =/s/depend = /\t/p' "$dir"/.PKGINFO >&2
|
|
}
|
|
|
|
find_scanelf_paths() {
|
|
local datadir="$1"
|
|
local paths="$datadir/lib:$datadir/usr/lib" i= rpaths=
|
|
if [ -n "$ldpath" ]; then
|
|
paths="$paths:$(echo "$ldpath" | sed "s|\(^\|:\)|\1$datadir|g")"
|
|
fi
|
|
# search in all rpaths
|
|
for rpaths in "$pkgbasedir"/.control.*/.rpaths; do
|
|
[ -f "$rpaths" ] || continue
|
|
while read i; do
|
|
local dir="${datadir}${i}"
|
|
IFS=:
|
|
if [ -d "$dir" ] && ! list_has "$dir" $paths; then
|
|
paths="$paths:${dir}"
|
|
fi
|
|
unset IFS
|
|
done < "$rpaths"
|
|
done
|
|
echo "$paths"
|
|
}
|
|
|
|
scan_shared_objects() {
|
|
local name="$1" controldir="$2" datadir="$3"
|
|
local opt=
|
|
|
|
# allow spaces in paths
|
|
IFS=:
|
|
set -- $(find_scanelf_paths "$datadir")
|
|
unset IFS
|
|
|
|
if options_has "ldpath-recursive"; then
|
|
opt="--recursive"
|
|
fi
|
|
msg "Scanning shared objects"
|
|
# lets tell all the .so files this package provides in .provides-so
|
|
scanelf --nobanner --soname $opt "$@" | while read etype soname file; do
|
|
# if soname field is missing, soname will be the filepath
|
|
sover=0
|
|
if [ -z "$file" ]; then
|
|
file="$soname"
|
|
soname=${soname##*/}
|
|
fi
|
|
|
|
# we only want shared libs
|
|
case $soname in
|
|
*.so|*.so.[0-9]*|*.c32);;
|
|
*) continue;;
|
|
esac
|
|
|
|
case "$file" in
|
|
*.so.[0-9]*) sover=${file##*.so.};;
|
|
*.so)
|
|
# filter out sonames with version when file does not
|
|
# have version
|
|
case "$soname" in
|
|
*.so.[0-9]*)
|
|
if options_has "sover-namecheck"; then
|
|
continue
|
|
fi
|
|
esac
|
|
;;
|
|
esac
|
|
list_has "$soname" $somask && continue
|
|
echo "$soname $sover"
|
|
# use awk to filter out dupes that has sover = 0
|
|
done | awk '{ if (so[$1] == 0) so[$1] = $2; }
|
|
END { for (i in so) print(i " " so[i]); }' \
|
|
| sort -u > "$controldir"/.provides-so
|
|
|
|
# verify that we dont have any duplicates
|
|
local dupes="$(cut -d' ' -f1 "$controldir"/.provides-so | uniq -d)"
|
|
if [ -n "$dupes" ]; then
|
|
die "provides multiple versions of same shared object: $dupes"
|
|
fi
|
|
|
|
# now find the so dependencies
|
|
scanelf --nobanner --recursive --needed "$datadir" | tr ' ' ':' \
|
|
| awk -F ":" '$1 == "ET_DYN" || $1 == "ET_EXEC" {print $2}' \
|
|
| sed 's:,:\n:g' | sort -u \
|
|
| while read soname; do
|
|
# only add files that are not self provided
|
|
grep -q -w "^$soname" "$controldir"/.provides-so \
|
|
|| list_has "$soname" $somask \
|
|
|| echo $soname
|
|
done > "$controldir"/.needs-so
|
|
}
|
|
|
|
# read size in bytes from stdin and show as human readable
|
|
human_size() {
|
|
awk '{ split("B KB MB GB TB PB", type)
|
|
for(i=5; y < 1; i--)
|
|
y = $1 / (2**(10*i))
|
|
printf("%.1f %s\n", y, type[i+2]) }'
|
|
}
|
|
|
|
create_apks() {
|
|
local file= dir= name= ver= apk= datadir= size=
|
|
getpkgver || return 1
|
|
mkdir -p "$PKGDEST"
|
|
if [ "$arch" != "noarch" ] && ! options_has "!tracedeps"; then
|
|
for file in "$pkgbasedir"/.control.*/.PKGINFO; do
|
|
dir="${file%/.PKGINFO}"
|
|
name="$(pkginfo_val pkgname $file)"
|
|
datadir="$pkgbasedir"/$name
|
|
subpkgname=$name
|
|
scan_shared_objects "$name" "$dir" "$datadir"
|
|
done
|
|
fi
|
|
for file in "$pkgbasedir"/.control.*/.PKGINFO; do
|
|
dir="${file%/.PKGINFO}"
|
|
name=$(pkginfo_val pkgname $file)
|
|
ver=$(pkginfo_val pkgver $file)
|
|
size=$(pkginfo_val size $file | human_size)
|
|
apk=$name-$ver.apk
|
|
datadir="$pkgbasedir"/$name
|
|
subpkgname=$name
|
|
|
|
trace_apk_deps "$name" "$dir" || return 1
|
|
msg "Package size: ${size}"
|
|
msg "Compressing data..."
|
|
(
|
|
cd "$datadir"
|
|
# data.tar.gz
|
|
set -- *
|
|
if [ "$1" = '*' ]; then
|
|
touch .dummy
|
|
set -- .dummy
|
|
fi
|
|
tar -c "$@" | abuild-tar --hash | gzip -9 >"$dir"/data.tar.gz
|
|
|
|
msg "Create checksum..."
|
|
# append the hash for data.tar.gz
|
|
local sha256=$(sha256sum "$dir"/data.tar.gz | cut -f1 -d' ')
|
|
echo "datahash = $sha256" >> "$dir"/.PKGINFO
|
|
|
|
# control.tar.gz
|
|
cd "$dir"
|
|
tar -c $(cat "$dir"/.metafiles) | abuild-tar --cut \
|
|
| gzip -9 > control.tar.gz
|
|
abuild-sign -q control.tar.gz || exit 1
|
|
|
|
msg "Create $apk"
|
|
# create the final apk
|
|
cat control.tar.gz data.tar.gz > "$PKGDEST"/$apk
|
|
)
|
|
done
|
|
subpkgname=
|
|
}
|
|
|
|
clean_abuildrepo() {
|
|
local apk
|
|
cd "$abuildrepo" || return 1
|
|
|
|
# remove compat symlink
|
|
for d in "$abuildrepo/$CARCH"; do
|
|
[ -L "$d" ] && rm "$d"
|
|
done
|
|
|
|
# remove broken links from abuildrepo
|
|
for apk in *.apk */*.apk; do
|
|
if [ -L "$apk" ] && [ ! -f "$apk" ]; then
|
|
rm -f "$apk"
|
|
fi
|
|
done
|
|
}
|
|
|
|
mklinks_abuildrepo() {
|
|
[ -n "$REPODEST" ] && return 0
|
|
local apk
|
|
mkdir -p "$abuildrepo"/$CARCH
|
|
cd "$abuildrepo" || return 1
|
|
# create links for this package
|
|
for apk in $(listpkg); do
|
|
[ -f "$PKGDEST"/$apk ] || continue
|
|
ln -sf "$PKGDEST"/$apk "$abuildrepo"/$CARCH/$apk
|
|
done
|
|
}
|
|
|
|
build_abuildrepo() {
|
|
local d apk
|
|
if ! apk_up2date || [ -n "$force" ]; then
|
|
logcmd "building $pkgname"
|
|
sanitycheck && builddeps && clean && fetch && unpack \
|
|
&& prepare && mkusers && rootpkg || return 1
|
|
fi
|
|
update_abuildrepo_index
|
|
}
|
|
|
|
update_abuildrepo_index() {
|
|
clean_abuildrepo
|
|
mklinks_abuildrepo
|
|
|
|
cd "$abuildrepo"
|
|
local index=$CARCH/APKINDEX.tar.gz
|
|
|
|
msg "Updating the cached abuild repository index..."
|
|
local sign=".SIGN.RSA.${SIGN_PUBLIC_KEY##*/}"
|
|
local oldindex=
|
|
if [ -f "$index" ]; then
|
|
oldindex="--index $index"
|
|
fi
|
|
$APK index --quiet $oldindex --output "$index".unsigned \
|
|
--description "$repo $(cd $startdir && git describe)" \
|
|
--rewrite-arch $CARCH \
|
|
$CARCH/*.apk || exit 1
|
|
msg "Signing the index..."
|
|
abuild-sign -q "$index".unsigned || exit 1
|
|
mv "$index".unsigned "$index"
|
|
chmod 644 "$index"
|
|
}
|
|
|
|
# predefined splitfunc doc
|
|
default_doc() {
|
|
depends="$depends_doc"
|
|
pkgdesc="$pkgdesc (documentation)"
|
|
arch=${arch_doc:-"noarch"}
|
|
|
|
local i
|
|
for i in doc man info html sgml licenses gtk-doc ri; do
|
|
if [ -d "$pkgdir/usr/share/$i" ]; then
|
|
mkdir -p "$subpkgdir/usr/share"
|
|
mv "$pkgdir/usr/share/$i" "$subpkgdir/usr/share/"
|
|
fi
|
|
done
|
|
|
|
rm -f "$subpkgdir/usr/share/info/dir"
|
|
|
|
# remove if empty, ignore error (not empty)
|
|
rmdir "$pkgdir/usr/share" "$pkgdir/usr" 2>/dev/null
|
|
|
|
return 0
|
|
}
|
|
|
|
doc() {
|
|
default_doc
|
|
}
|
|
|
|
# predefined splitfunc dev
|
|
default_dev() {
|
|
local i= j=
|
|
depends="$pkgname $depends_dev"
|
|
pkgdesc="$pkgdesc (development files)"
|
|
|
|
for i in $origsubpackages; do
|
|
[ "${i%:*}" = "$subpkgname" ] || depends="$depends ${i%:*}"
|
|
done
|
|
|
|
cd "$pkgdir" || return 0
|
|
for i in usr/include usr/lib/pkgconfig usr/share/aclocal\
|
|
usr/share/gettext usr/bin/*-config \
|
|
usr/share/vala/vapi usr/share/gir-[0-9]*\
|
|
usr/share/qt/mkspecs \
|
|
$(find -name include -type d) \
|
|
$(find usr/ -name '*.[acho]' 2>/dev/null); do
|
|
if [ -e "$pkgdir/$i" ] || [ -L "$pkgdir/$i" ]; then
|
|
d="$subpkgdir/${i%/*}" # dirname $i
|
|
mkdir -p "$d"
|
|
mv "$pkgdir/$i" "$d"
|
|
rmdir "$pkgdir/${i%/*}" 2>/dev/null
|
|
fi
|
|
done
|
|
# move *.so links needed when linking the apps to -dev packages
|
|
for i in lib/*.so usr/lib/*.so; do
|
|
if [ -L "$i" ]; then
|
|
mkdir -p "$subpkgdir"/"${i%/*}"
|
|
mv "$i" "$subpkgdir/$i" || return 1
|
|
fi
|
|
done
|
|
return 0
|
|
}
|
|
|
|
dev() {
|
|
default_dev
|
|
}
|
|
|
|
# predefined splitfunc libs
|
|
default_libs() {
|
|
pkgdesc="$pkgdesc (libraries)"
|
|
local dir= file=
|
|
for dir in lib usr/lib; do
|
|
for file in "$pkgdir"/$dir/lib*.so.[0-9]*; do
|
|
[ -f "$file" ] || continue
|
|
mkdir -p "$subpkgdir"/$dir
|
|
mv "$file" "$subpkgdir"/$dir/
|
|
done
|
|
done
|
|
}
|
|
|
|
libs() {
|
|
default_libs
|
|
}
|
|
|
|
is_function() {
|
|
type "$1" 2>&1 | head -n 1 | egrep -q "is a (shell )?function"
|
|
}
|
|
|
|
do_fakeroot() {
|
|
if [ -n "$FAKEROOT" ]; then
|
|
$FAKEROOT -- "$@"
|
|
else
|
|
"$@"
|
|
fi
|
|
}
|
|
|
|
# build and package in fakeroot
|
|
rootpkg() {
|
|
local do_build=build
|
|
cd "$startdir"
|
|
if is_function package; then
|
|
build || return 1
|
|
do_build=package
|
|
fi
|
|
cd "$startdir"
|
|
[ -n "$FAKEROOT" ] && msg "Entering fakeroot..."
|
|
do_fakeroot "$abuild_path" $color_opt $do_build \
|
|
prepare_subpackages \
|
|
prepare_language_packs \
|
|
prepare_package \
|
|
create_apks
|
|
}
|
|
|
|
srcpkg() {
|
|
getpkgver || return 1
|
|
local p="$pkgname-$pkgver-$pkgrel"
|
|
local prefix="${startdir##*/}"
|
|
local i files="$prefix/APKBUILD"
|
|
for i in $source; do
|
|
files="$files $prefix/$(filename_from uri $i)"
|
|
done
|
|
mkdir -p "$PKGDEST"
|
|
msg "Creating source package $p.src.tar.gz..."
|
|
(cd .. && tar -zcf "$PKGDEST/$p.src.tar.gz" $files)
|
|
}
|
|
|
|
# return true if arch is supported or noarch
|
|
check_arch() {
|
|
list_has $CARCH $arch || [ "$arch" = "noarch" ] || [ "$arch" = "all" ]
|
|
}
|
|
|
|
# check if package is up to date
|
|
apk_up2date() {
|
|
getpkgver || return 1
|
|
local pkg="$PKGDEST/$pkgname-$pkgver-r$pkgrel.apk"
|
|
local i s
|
|
cd "$startdir"
|
|
for i in $pkgname $subpackages; do
|
|
[ -f "$PKGDEST/$pkgname-$pkgver-r$pkgrel.apk" ] || return 1
|
|
done
|
|
[ -n "$keep" ] && return 0
|
|
|
|
for i in $source APKBUILD; do
|
|
local s
|
|
if is_remote "$i"; then
|
|
s="$SRCDEST/$(filename_from_uri $i)"
|
|
else
|
|
s="$startdir/${i##*/}"
|
|
fi
|
|
if [ "$s" -nt "$pkg" ]; then
|
|
return 1
|
|
fi
|
|
done
|
|
return 0
|
|
}
|
|
|
|
abuildindex_up2date() {
|
|
local i
|
|
getpkgver || return 1
|
|
local dir="$abuildrepo"/$CARCH
|
|
local apk="${pkgname%:*}-$pkgver-r$pkgrel.apk"
|
|
local idx="$dir"/APKINDEX.tar.gz
|
|
local file="$dir"/$apk
|
|
|
|
# check if index is missing
|
|
[ -f "$idx" ] || return 1
|
|
|
|
# if link or file is missing, then we need update abuildrepo index
|
|
[ -f "$file" ] || return 1
|
|
|
|
# if file exists and is newer than index, then we need update index
|
|
[ "$file" -nt "$idx" ] && return 1
|
|
|
|
return 0
|
|
}
|
|
|
|
up2date() {
|
|
check_arch || return 0
|
|
apk_up2date && abuildindex_up2date
|
|
}
|
|
|
|
# rebuild package and abuildrepo index if needed
|
|
abuildindex() {
|
|
up2date && return 0
|
|
build_abuildrepo
|
|
}
|
|
|
|
# source all APKBUILDs and output:
|
|
# 1) origin of package
|
|
# 2) all dependencies
|
|
# the output is i in a format easy parseable for awk
|
|
depparse_aports() {
|
|
# lets run this in a subshell since we source all APKBUILD here
|
|
(
|
|
aportsdir=$(realpath ${APKBUILD%/APKBUILD}/..)
|
|
for i in $aportsdir/*/APKBUILD; do
|
|
pkgname=
|
|
subpackages=
|
|
depends=
|
|
makedepends=
|
|
. $i
|
|
dir=${i%/APKBUILD}
|
|
deps=
|
|
# filter out conflicts from deps and version info
|
|
for j in $depends $makedepends; do
|
|
case "$j" in
|
|
!*) continue;;
|
|
esac
|
|
deps="$deps ${j%%[<>=]*}"
|
|
done
|
|
for j in $pkgname $subpackages; do
|
|
echo "o ${j%%:*} $dir"
|
|
set -- $deps
|
|
echo -n "d ${j%%:*} $1"
|
|
shift
|
|
while [ $# -gt 0 ]; do
|
|
echo -n ",$1"
|
|
shift
|
|
done
|
|
echo
|
|
done
|
|
done
|
|
)
|
|
}
|
|
|
|
deptrace() {
|
|
local deps= i=
|
|
# strip versions from deps
|
|
for i in "$@"; do
|
|
deps="$deps ${i%%[<>=]*}"
|
|
done
|
|
[ -z "$deps" ] && return 0
|
|
( depparse_aports
|
|
if [ -z "$upgrade" ]; then
|
|
# list installed pkgs and prefix with 'i '
|
|
$APK info -q | sort | sed 's/^/i /'
|
|
fi
|
|
) | awk -v pkgs="$deps" '
|
|
|
|
function depgraph(pkg, a, i) {
|
|
if (visited[pkg])
|
|
return 0;
|
|
visited[pkg] = 1;
|
|
split(deps[pkg], a, ",");
|
|
for (i in a)
|
|
depgraph(a[i]);
|
|
print pkg ":" origin[pkg];
|
|
|
|
}
|
|
|
|
$1 == "i" { visited[$2] = 1 }
|
|
$1 == "o" { origin[$2] = $3 }
|
|
$1 == "d" { deps[$2] = $3 }
|
|
END {
|
|
split(pkgs, pkgarray);
|
|
for (i in pkgarray)
|
|
depgraph(pkgarray[i]);
|
|
}
|
|
'
|
|
}
|
|
|
|
# build and install dependencies
|
|
builddeps() {
|
|
local deps= alldeps= pkg= i= dir= ver= missing= installed_deps=
|
|
local filtered_deps= conflicts=
|
|
[ -n "$nodeps" ] && return 0
|
|
msg "Analyzing dependencies..."
|
|
|
|
# add depends unless it is a subpackage or package itself
|
|
for i in $BUILD_BASE $depends $makedepends; do
|
|
[ "$pkgname" = "${i%%[<>=]*}" ] && continue
|
|
subpackages_has ${i%%[<>=]*} || deps="$deps $i"
|
|
done
|
|
|
|
installed_deps=$($APK info -e $deps)
|
|
# find which deps are missing
|
|
for i in $deps; do
|
|
if [ "${i#\!}" != "$i" ]; then
|
|
$APK info -q -e "${i#\!}" \
|
|
&& conflicts="$conflicts ${i#\!}"
|
|
elif ! deplist_has $i $installed_deps || [ -n "$upgrade" ]; then
|
|
missing="$missing $i"
|
|
fi
|
|
done
|
|
|
|
if [ -n "$conflicts" ]; then
|
|
error "Conflicting package(s) installed:$conflicts"
|
|
return 1
|
|
fi
|
|
|
|
if [ -z "$install_deps" ] && [ -z "$recursive" ]; then
|
|
# if we dont have any missing deps we are done now
|
|
[ -z "$missing" ] && return 0
|
|
error "Missing dependencies: $missing Use -r to autoinstall or -R to build"
|
|
return 1
|
|
fi
|
|
|
|
uninstall_after=".makedepends-$pkgname $uninstall_after"
|
|
if [ -n "$install_deps" ] && [ -z "$recursive" ] && [ -n "$deps" ]; then
|
|
# make a --simluate run first to detect missing deps
|
|
# apk-tools --virtual is no goot at reporting those.
|
|
$SUDO_APK add --repository "$abuildrepo" \
|
|
$apk_opt_wait \
|
|
--simulate --quiet $deps || return 1
|
|
$SUDO_APK add --repository "$abuildrepo" \
|
|
$apk_opt_wait \
|
|
--virtual .makedepends-$pkgname $deps \
|
|
&& return 0
|
|
fi
|
|
|
|
[ -z "$recursive" ] && return 1
|
|
|
|
# find dependencies that are installed but missing in repo.
|
|
for i in $deps; do
|
|
local m=$($APK search --repository "$abuildrepo" ${i%%[<>=]*})
|
|
if [ -z "$m" ]; then
|
|
missing="$missing $i"
|
|
fi
|
|
done
|
|
|
|
for i in $(deptrace $missing); do
|
|
# i = pkg:dir
|
|
local dir=${i#*:}
|
|
local pkg=${i%:*}
|
|
|
|
# ignore if dependency is in other repo
|
|
[ -d "$dir" ] || continue
|
|
|
|
# break circular deps
|
|
list_has $pkg $ABUILD_VISITED && continue
|
|
export ABUILD_VISITED="$ABUILD_VISITED $pkg"
|
|
|
|
msg "Entering $dir"
|
|
cd "$dir" && $0 $forceroot $keep $quiet $install_deps \
|
|
$recursive $upgrade $color_opt abuildindex || return 1
|
|
done
|
|
$SUDO_APK add -u --repository "$abuildrepo" \
|
|
$apk_opt_wait \
|
|
--virtual .makedepends-$pkgname $deps
|
|
}
|
|
|
|
# replace the md5sums in the APKBUILD
|
|
checksum() {
|
|
local s files
|
|
[ -z "$source" ] && [ -n "${md5sums}${sha256sums}${sha512sums}" ] \
|
|
&& msg "Removing checksums from APKBUILD"
|
|
sed -i -e '/^md5sums="/,/"\$/d; /^md5sums=''/,/''\$/d' "$APKBUILD"
|
|
sed -i -e '/^sha512sums="/,/"\$/d; /^sha512sums=''/,/''\$/d' "$APKBUILD"
|
|
sed -i -e '/^sha256sums="/,/"\$/d; /^sha256sums=''/,/''\$/d' "$APKBUILD"
|
|
[ -z "$source" ] && return 0
|
|
fetch
|
|
for s in $source; do
|
|
files="$files $(filename_from_uri $s)"
|
|
done
|
|
|
|
# for compatibility/backporting reasons we still add md5sum
|
|
msg "Updating the md5sums in APKBUILD..."
|
|
md5sums="$(cd "$srcdir" && md5sum $files)" || die "md5sum failed"
|
|
echo "md5sums=\"$md5sums\"" >>"$APKBUILD"
|
|
|
|
msg "Updating the sha256sums in APKBUILD..."
|
|
sha256sums="$(cd "$srcdir" && sha256sum $files)" \
|
|
|| die "sha256sum failed"
|
|
echo "sha256sums=\"$sha256sums\"" >>"$APKBUILD"
|
|
|
|
msg "Updating the sha512sums in APKBUILD..."
|
|
sha512sums="$(cd "$srcdir" && sha512sum $files)" \
|
|
|| die "sha512sum failed"
|
|
echo "sha512sums=\"$sha512sums\"" >>"$APKBUILD"
|
|
}
|
|
|
|
stripbin() {
|
|
local bin
|
|
if options_has "!strip" || [ "$arch" = "noarch" ]; then
|
|
return 0
|
|
fi
|
|
cd "${subpkgdir:-$pkgdir}" || return 1
|
|
msg "Stripping binaries"
|
|
scanelf --recursive --nobanner --etype "ET_DYN,ET_EXEC" . \
|
|
| sed -e 's:^ET_DYN ::' -e 's:^ET_EXEC ::' \
|
|
| xargs -r strip
|
|
}
|
|
|
|
# simply list target apks
|
|
listpkg() {
|
|
local name
|
|
getpkgver || return 1
|
|
for name in $(listpkgnames) ; do
|
|
echo "$name-$pkgver-r$pkgrel.apk"
|
|
done
|
|
}
|
|
|
|
source_has() {
|
|
local i
|
|
for i in $source; do
|
|
[ "$1" = "${i##*/}" ] && return 0
|
|
[ "$1" = "${i%%::*}" ] && return 0
|
|
done
|
|
return 1
|
|
}
|
|
|
|
subpackages_has() {
|
|
local i
|
|
for i in $subpackages; do
|
|
[ "$1" = "${i%:*}" ] && return 0
|
|
done
|
|
return 1
|
|
}
|
|
|
|
list_has() {
|
|
local needle="$1"
|
|
local i
|
|
shift
|
|
for i in $@; do
|
|
[ "$needle" = "$i" ] && return 0
|
|
[ "$needle" = "!$i" ] && return 1
|
|
done
|
|
return 1
|
|
}
|
|
|
|
# same as list_has but we filter version info
|
|
deplist_has() {
|
|
local needle="$1"
|
|
local i
|
|
shift
|
|
for i in $@; do
|
|
i=${i%%[<>=]*}
|
|
[ "$needle" = "$i" ] && return 0
|
|
[ "$needle" = "!$i" ] && return 1
|
|
done
|
|
return 1
|
|
}
|
|
|
|
options_has() {
|
|
list_has "$1" $options
|
|
}
|
|
|
|
depends_has() {
|
|
deplist_has "$1" $depends
|
|
}
|
|
|
|
makedepends_has() {
|
|
deplist_has "$1" $makedepends
|
|
}
|
|
|
|
md5sums_has() {
|
|
list_has "$1" $md5sums
|
|
}
|
|
|
|
install_has() {
|
|
list_has "$1" $install
|
|
}
|
|
|
|
# install package after build
|
|
post_add() {
|
|
getpkgver || return 1
|
|
local pkgf="$PKGDEST/$1-$pkgver-r$pkgrel.apk"
|
|
local deps i
|
|
if ! subpackages_has $1 && [ "$1" != "$pkgname" ]; then
|
|
die "$1 is not built by this APKBUILD"
|
|
fi
|
|
# recursively install dependencies that are provided by this APKBUILD
|
|
deps=$($APK index "$pkgf" 2>/dev/null | awk -F: '$1=="D" { print $2 }')
|
|
for i in $deps; do
|
|
if subpackages_has $i || [ "$i" = "$pkgname" ]; then
|
|
post_add $i || return 1
|
|
fi
|
|
done
|
|
$SUDO_APK add $apk_opt_wait -u "$pkgf" || die "Failed to install $1"
|
|
}
|
|
|
|
installdeps() {
|
|
local deps i
|
|
$SUDO_APK add $apk_opt_wait --repository "$abuildrepo" \
|
|
--virtual .makedepends-$pkgname \
|
|
$makedepends
|
|
}
|
|
|
|
uninstalldeps (){
|
|
$SUDO_APK del $apk_opt_wait .makedepends-$pkgname
|
|
}
|
|
|
|
all() {
|
|
if ! [ -n "$force" ]; then
|
|
check_arch || return 0
|
|
fi
|
|
if up2date && [ -z "$force" ]; then
|
|
msg "Package is up to date"
|
|
else
|
|
build_abuildrepo
|
|
fi
|
|
}
|
|
|
|
# This abuild hook will checkout an svn or git repository by specifying
|
|
# $svnurl or $giturl in APKBUILD. You can checkout a specific branch in
|
|
# git by adding -b $branch in $giturl. $reporev will select the correct
|
|
# commit, revision or tag for you. If you specify $disturl your distfile
|
|
# will automatically be uploaded with rsync to the url provided.
|
|
# Base version defaults to 0 except if specified by $basever.
|
|
|
|
snapshot() {
|
|
# check if we setup vars correctly
|
|
[ -z "$disturl" ] && warning "Missing disturl in APKBUILD, auto uploading disabled."
|
|
[ -z "$svnurl" ] && [ -z "$giturl" ] && die "Missding repository url in APKBUILD!"
|
|
[ -n "$svnurl" ] && [ -n "$giturl" ] && die "You can only use a single repository!"
|
|
local _date=$(date +%Y%m%d)
|
|
local _format="tar.gz"
|
|
# remove any repositories left in srcdir
|
|
abuild clean
|
|
mkdir -p "$srcdir" && cd "$srcdir"
|
|
# clone git repo and archive
|
|
if [ -n "$giturl" ]; then
|
|
local _version=${verbase:-0}_git${_date}
|
|
command -v git >/dev/null || \
|
|
die "Missing git! Install git to support git clone."
|
|
[ -z "$reporev" ] && local _rev="HEAD" && local _depth="--depth=1"
|
|
msg "Creating git snapshot: $pkgname-$_version"
|
|
git clone $_depth --bare $giturl $pkgname-$_version || return 1
|
|
git --git-dir $pkgname-$_version archive \
|
|
--format=$_format \
|
|
-o $pkgname-$_version.$_format \
|
|
--prefix=$pkgname-$_version/ $_rev \
|
|
|| return 1
|
|
fi
|
|
# export svn repo and archive
|
|
if [ -n "$svnurl" ]; then
|
|
local _version=${verbase:-0}_svn${_date}
|
|
command -v svn >/dev/null || \
|
|
die "Missing svn! Install subverion to support svn export."
|
|
[ -n "$reporev" ] && local _rev="-r $reporev"
|
|
msg "Creating svn snapshot: $pkgname-$_version"
|
|
svn export $_rev $svnurl $pkgname-$_version || return 1
|
|
tar zcf $pkgname-$_version.$_format $pkgname-$_version || return 1
|
|
fi
|
|
# upload to defined distfiles url
|
|
if [ -n "$disturl" ]; then
|
|
command -v rsync >/dev/null || \
|
|
die "Missing rsync! Install rsync to enable automatic uploads."
|
|
msg "Uploading to $disturl"
|
|
rsync --progress -La $pkgname-$_version.$_format \
|
|
$disturl || return 1
|
|
cd "$startdir"
|
|
# set the pkgver to current date and update checksum
|
|
sed -i -e "s/^pkgver=.*/pkgver=${_version}/" \
|
|
APKBUILD || return 1
|
|
abuild checksum
|
|
fi
|
|
}
|
|
|
|
usage() {
|
|
echo "$program $abuild_ver"
|
|
echo "usage: $program [options] [-i PKG] [-P REPODEST] [-p PKGDEST]"
|
|
echo " [-s SRCDEST] [cmd] ..."
|
|
echo " $program [-c] -n PKGNAME[-PKGVER]"
|
|
echo "Options:"
|
|
echo " -c Enable colored output"
|
|
echo " -d Disable dependency checking"
|
|
echo " -f Force specified cmd, even if they are already done"
|
|
echo " -F Force run as root"
|
|
echo " -h Show this help"
|
|
echo " -i Install PKG after successful build"
|
|
echo " -k Keep built packages, even if APKBUILD or sources are newer"
|
|
echo " -m Disable colors (monochrome)"
|
|
echo " -p Set package destination directory"
|
|
echo " -P Set PKGDEST to REPODEST/<repo>/\$CARCH, where repo is the parents dir name"
|
|
echo " -q Quiet"
|
|
echo " -r Install missing dependencies from system repository (using sudo)"
|
|
echo " -R Recursively build and install missing dependencies (using sudo)"
|
|
echo " -s Set source package destination directory"
|
|
echo " -u Recursively build and upgrade all dependencies (using sudo)"
|
|
echo ""
|
|
echo "Commands:"
|
|
echo " checksum Generate checksum to be included in APKBUILD"
|
|
echo " fetch Fetch sources to \$SRCDEST and verify checksums"
|
|
echo " sanitycheck Basic sanity check of APKBUILD"
|
|
echo " verify Verify checksums"
|
|
echo " unpack Unpack sources to \$srcdir"
|
|
echo " build Compile and install package into \$pkgdir"
|
|
echo " listpkg List target packages"
|
|
echo " package Create package in \$PKGDEST"
|
|
echo " rootpkg Run '$0 build package' as fakeroot"
|
|
echo " clean Remove temp build and install dirs"
|
|
echo " cleanoldpkg Remove binary packages except current version"
|
|
echo " cleanpkg Remove already built binary and source package"
|
|
echo " cleancache Remove downloaded files from \$SRCDEST"
|
|
echo " srcpkg Make a source package"
|
|
echo " sourcecheck Check if remote source package exists upstream"
|
|
echo " up2date Compare target and sources dates"
|
|
echo " installdeps Install packages listed in makedepends and depends"
|
|
echo " uninstalldeps Uninstall packages listed in makedepends and depends"
|
|
echo " snapshot Create a \$giturl or \$svnurl snapshot and upload to \$disturl"
|
|
echo ""
|
|
exit 0
|
|
}
|
|
|
|
APKBUILD="${APKBUILD:-./APKBUILD}"
|
|
unset force
|
|
unset recursive
|
|
while getopts "cdfFhi:kimnp:P:qrRs:u" opt; do
|
|
case $opt in
|
|
'c') default_colors
|
|
color_opt="-c";;
|
|
'd') nodeps=1;;
|
|
'f') force="-f";;
|
|
'F') forceroot="-F";;
|
|
'h') usage;;
|
|
'i') install_after="$install_after $OPTARG";;
|
|
'k') keep="-k";;
|
|
'm') monochrome
|
|
color_opt="-m";;
|
|
'n') die "Use newapkbuild to create new aports";;
|
|
'p') PKGDEST=$OPTARG;;
|
|
'P') REPODEST=$OPTARG;;
|
|
'q') quiet="-q";;
|
|
'r') install_deps="-r";;
|
|
'R') recursive="-R";;
|
|
's') SRCDEST=$OPTARG;;
|
|
'u') upgrade="-u"
|
|
recursive="-R";;
|
|
esac
|
|
done
|
|
shift $(( $OPTIND - 1 ))
|
|
|
|
# check so we are not root
|
|
if [ "$(whoami)" = "root" ] && [ -z "$FAKEROOTKEY" ]; then
|
|
[ -z "$forceroot" ] && die "Do not run abuild as root"
|
|
FAKEROOT=
|
|
fi
|
|
|
|
# find startdir
|
|
[ -f "$APKBUILD" ] || die "Could not find $APKBUILD (PWD=$PWD)"
|
|
APKBUILD=$(readlink -f "$APKBUILD")
|
|
|
|
startdir="${APKBUILD%/*}"
|
|
srcdir=${srcdir:-"$startdir/src"}
|
|
pkgbasedir=${pkgbasedir:-"$startdir/pkg"}
|
|
pkgrel=0
|
|
repo=${startdir%/*}
|
|
repo=${repo##*/}
|
|
|
|
SRCDEST=${SRCDEST:-$startdir}
|
|
PKGDEST=${PKGDEST:-$startdir}
|
|
|
|
cd "$startdir" || die
|
|
. "$APKBUILD"
|
|
|
|
# If REPODEST is set then it will override the PKGDEST
|
|
if [ -n "$REPODEST" ]; then
|
|
PKGDEST="$REPODEST/$repo/$CARCH"
|
|
# for recursive action
|
|
export REPODEST
|
|
abuildrepo="$REPODEST"/$repo
|
|
else
|
|
abuildrepo="$abuildrepo_base"/$repo
|
|
fi
|
|
|
|
# if we want build debug package
|
|
if [ -n "$DEBUG" ]; then
|
|
CFLAGS="$CFLAGS -g"
|
|
options="$options !strip"
|
|
fi
|
|
|
|
# If we are handling a sub package then reset subpackages and install
|
|
if [ -n "$subpkgname" ]; then
|
|
origsubpackages="$subpackages"
|
|
subpackages=
|
|
fi
|
|
pkgdir="$pkgbasedir/$pkgname"
|
|
controldir="$pkgbasedir"/.control.${subpkgname:-$pkgname}
|
|
|
|
trap 'die "Aborted by user"' INT
|
|
set_xterm_title "abuild: $pkgname"
|
|
|
|
if [ -z "$1" ]; then
|
|
set all
|
|
fi
|
|
|
|
while [ $# -gt 0 ]; do
|
|
runpart $1
|
|
shift
|
|
done
|
|
|
|
for i in $install_after; do
|
|
post_add $i
|
|
done
|
|
|
|
cleanup
|
|
|