xbps-src: clean up fetch hook and allow mirroring

This commit is contained in:
Duncaen 2022-08-06 04:54:05 +02:00 committed by Đoàn Trần Công Danh
parent 6bf9e1b1e2
commit 500a3c8f49
3 changed files with 110 additions and 97 deletions

View File

@ -4,5 +4,6 @@ wrksrc="${XBPS_BUILDDIR}/${pkgname#R-cran-}"
# default to cran # default to cran
if [ -z "$distfiles" ]; then if [ -z "$distfiles" ]; then
distfiles="https://cran.r-project.org/src/contrib/${pkgname#R-cran-}_${version//r/-}.tar.gz" distfiles="https://cran.r-project.org/src/contrib/${pkgname#R-cran-}_${version//r/-}.tar.gz
https://cran.r-project.org/src/contrib/Archive/${pkgname#R-cran-}/${pkgname#R-cran-}_${version//r/-}.tar.gz"
fi fi

View File

@ -2,24 +2,6 @@
# the $distfiles variable and then verifies its sha256 checksum comparing # the $distfiles variable and then verifies its sha256 checksum comparing
# its value with the one stored in the $checksum variable. # its value with the one stored in the $checksum variable.
# Get the checksum for $curfile at index $dfcount
get_cksum() {
local curfile="$1" dfcount="$2" ckcount cksum i
ckcount=0
cksum=0
for i in ${checksum}; do
if [ $dfcount -eq $ckcount -a -n "$i" ]; then
cksum=$i
fi
ckcount=$((ckcount + 1))
done
if [ -z "$cksum" ]; then
msg_error "$pkgver: cannot find checksum for $curfile.\n"
fi
echo "$cksum"
}
# Return the checksum of the contents of a tarball # Return the checksum of the contents of a tarball
contents_cksum() { contents_cksum() {
local curfile="$1" cursufx cksum local curfile="$1" cursufx cksum
@ -110,9 +92,7 @@ contents_cksum() {
# Verify the checksum for $curfile stored at $distfile and index $dfcount # Verify the checksum for $curfile stored at $distfile and index $dfcount
verify_cksum() { verify_cksum() {
local curfile="$1" distfile="$2" dfcount="$3" filesum cksum local curfile="$1" distfile="$2" cksum="$3" filesum
cksum=$(get_cksum $curfile $dfcount)
# If the checksum starts with an commercial at (@) it is the contents checksum # If the checksum starts with an commercial at (@) it is the contents checksum
if [ "${cksum:0:1}" = "@" ]; then if [ "${cksum:0:1}" = "@" ]; then
@ -121,7 +101,7 @@ verify_cksum() {
filesum=$(contents_cksum "$curfile") filesum=$(contents_cksum "$curfile")
if [ "${cksum}" != "$filesum" ]; then if [ "${cksum}" != "$filesum" ]; then
echo echo
msg_red "SHA256 mismatch for '$curfile:'\n@$filesum\n" msg_red "SHA256 mismatch for '${curfile}:'\n@${filesum}\n"
errors=$((errors + 1)) errors=$((errors + 1))
else else
msg_normal_append "OK.\n" msg_normal_append "OK.\n"
@ -131,7 +111,7 @@ verify_cksum() {
filesum=$(${XBPS_DIGEST_CMD} "$distfile") filesum=$(${XBPS_DIGEST_CMD} "$distfile")
if [ "$cksum" != "$filesum" ]; then if [ "$cksum" != "$filesum" ]; then
echo echo
msg_red "SHA256 mismatch for '$curfile:'\n$filesum\n" msg_red "SHA256 mismatch for '${curfile}:'\n${filesum}\n"
errors=$((errors + 1)) errors=$((errors + 1))
else else
if [ ! -f "$XBPS_SRCDISTDIR/by_sha256/${cksum}_${curfile}" ]; then if [ ! -f "$XBPS_SRCDISTDIR/by_sha256/${cksum}_${curfile}" ]; then
@ -145,22 +125,20 @@ verify_cksum() {
# Link an existing cksum $distfile for $curfile at index $dfcount # Link an existing cksum $distfile for $curfile at index $dfcount
link_cksum() { link_cksum() {
local curfile="$1" distfile="$2" dfcount="$3" filesum cksum local curfile="$1" distfile="$2" cksum="$3"
cksum=$(get_cksum $curfile $dfcount)
if [ -n "$cksum" -a -f "$XBPS_SRCDISTDIR/by_sha256/${cksum}_${curfile}" ]; then if [ -n "$cksum" -a -f "$XBPS_SRCDISTDIR/by_sha256/${cksum}_${curfile}" ]; then
ln -f "$XBPS_SRCDISTDIR/by_sha256/${cksum}_${curfile}" "$distfile" ln -f "$XBPS_SRCDISTDIR/by_sha256/${cksum}_${curfile}" "$distfile"
msg_normal "$pkgver: using known distfile $curfile.\n" msg_normal "$pkgver: using known distfile $curfile.\n"
return 0
fi fi
return 1
} }
try_mirrors() { try_mirrors() {
local curfile="$1" distfile="$2" dfcount="$3" subdir="$4" f="$5" local curfile="$1" distfile="$2" cksum="$3" f="$4"
local filesum cksum basefile mirror path scheme local filesum basefile mirror path scheme good
[ -z "$XBPS_DISTFILES_MIRROR" ] && return [ -z "$XBPS_DISTFILES_MIRROR" ] && return 1
basefile="${f##*/}" basefile="${f##*/}"
cksum=$(get_cksum $curfile $dfcount)
for mirror in $XBPS_DISTFILES_MIRROR; do for mirror in $XBPS_DISTFILES_MIRROR; do
scheme="file" scheme="file"
if [[ $mirror == *://* ]]; then if [[ $mirror == *://* ]]; then
@ -179,28 +157,80 @@ try_mirrors() {
fi fi
if [[ "$mirror" == *voidlinux* ]]; then if [[ "$mirror" == *voidlinux* ]]; then
# For distfiles.voidlinux.* append the subdirectory # For distfiles.voidlinux.* append the subdirectory
mirror="$mirror/$subdir" mirror="$mirror/$pkgname-$version"
fi fi
msg_normal "$pkgver: fetching distfile '$curfile' from '$mirror'...\n" msg_normal "$pkgver: fetching distfile '$curfile' from mirror '$mirror'...\n"
$fetch_cmd "$mirror/$curfile" $fetch_cmd "$mirror/$curfile"
# If basefile was not found, but a curfile file may exist, try to fetch it # If basefile was not found, but a curfile file may exist, try to fetch it
if [ ! -f "$distfile" -a "$basefile" != "$curfile" ]; then # if [ ! -f "$distfile" -a "$basefile" != "$curfile" ]; then
$fetch_cmd "$mirror/$basefile" # msg_normal "$pkgver: fetching distfile '$basefile' from mirror '$mirror'...\n"
fi # $fetch_cmd "$mirror/$basefile"
# fi
[ ! -f "$distfile" ] && continue [ ! -f "$distfile" ] && continue
flock -n ${distfile}.part rm -f ${distfile}.part flock -n ${distfile}.part rm -f ${distfile}.part
filesum=$(${XBPS_DIGEST_CMD} "$distfile") filesum=$(${XBPS_DIGEST_CMD} "$distfile")
[ "$cksum" == "$filesum" ] && break if [ "$cksum" == "$filesum" ]; then
return 0
fi
msg_normal "$pkgver: checksum failed - removing '$curfile'...\n" msg_normal "$pkgver: checksum failed - removing '$curfile'...\n"
rm -f ${distfile} rm -f ${distfile}
done done
return 1
}
try_urls() {
local curfile="$1"
local good=
for i in ${_file_idxs["$curfile"]}; do
local cksum=${_checksums["$i"]}
local url=${_distfiles["$i"]}
# If distfile does not exist, download it from the original location.
if [[ "$FTP_RETRIES" && "${url}" =~ ^ftp:// ]]; then
max_retries="$FTP_RETRIES"
else
max_retries=1
fi
for retry in $(seq 1 1 $max_retries); do
if [ ! -f "$distfile" ]; then
if [ "$retry" == 1 ]; then
msg_normal "$pkgver: fetching distfile '$curfile' from '$url'...\n"
else
msg_normal "$pkgver: fetch attempt $retry of $max_retries...\n"
fi
flock "${distfile}.part" $fetch_cmd "$url"
fi
done
if [ ! -f "$distfile" ]; then
continue
fi
# distfile downloaded, verify sha256 hash.
flock -n "${distfile}.part" rm -f "${distfile}.part"
verify_cksum "$f" "$distfile" "$cksum"
return 0
done
return 1
} }
hook() { hook() {
local srcdir="$XBPS_SRCDISTDIR/$pkgname-$version" local srcdir="$XBPS_SRCDISTDIR/$pkgname-$version"
local dfcount=0 dfgood=0 errors=0 max_retries local dfcount=0 dfgood=0 errors=0 max_retries
if [ ! -d "$srcdir" ]; then local -a _distfiles=($distfiles)
local -a _checksums=($checksum)
local -A _file_idxs
# Create a map from target file to index in _distfiles/_checksums
for i in ${!_distfiles[@]}; do
f="${_distfiles[$i]}"
curfile="${f#*>}"
curfile="${curfile##*/}"
_file_idxs["$curfile"]+=" $i"
done
if [[ ! -d "$srcdir" ]]; then
mkdir -p -m775 "$srcdir" mkdir -p -m775 "$srcdir"
chgrp $(id -g) "$srcdir" chgrp $(id -g) "$srcdir"
fi fi
@ -212,87 +242,68 @@ hook() {
# Detect bsdtar and GNU tar (in that order of preference) # Detect bsdtar and GNU tar (in that order of preference)
TAR_CMD="$(command -v bsdtar)" TAR_CMD="$(command -v bsdtar)"
if [ -z "$TAR_CMD" ]; then if [[ -z "$TAR_CMD" ]]; then
TAR_CMD="$(command -v tar)" TAR_CMD="$(command -v tar)"
fi fi
# Detect distfiles with obsolete checksum and purge them from the cache # Detect distfiles with obsolete checksum and purge them from the cache
for f in ${distfiles}; do for f in ${!_file_idxs[@]}; do
curfile="${f#*>}" distfile="$srcdir/$f"
curfile="${curfile##*/}" for i in ${_file_idxs["$f"]}; do
distfile="$srcdir/$curfile" if [[ -f $distfile ]]; then
cksum=${_checksums["$i"]}
if [ -f "$distfile" ]; then if [[ ${cksum:0:1} = @ ]]; then
cksum=$(get_cksum $curfile $dfcount) cksum=${cksum:1}
if [ "${cksum:0:1}" = "@" ]; then filesum=$(contents_cksum "$distfile")
cksum=${cksum:1} else
filesum=$(contents_cksum "$distfile") filesum=$(${XBPS_DIGEST_CMD} "$distfile")
else fi
filesum=$(${XBPS_DIGEST_CMD} "$distfile") if [[ $cksum = $filesum ]]; then
dfgood=$((dfgood + 1))
else
inode=$(stat "$distfile" --printf "%i")
msg_warn "$pkgver: wrong checksum found for ${curfile} - purging\n"
find ${XBPS_SRCDISTDIR} -inum ${inode} -delete -print
fi
fi fi
if [ "$cksum" = "$filesum" ]; then dfcount=$((dfcount + 1))
dfgood=$((dfgood + 1)) done
else
inode=$(stat "$distfile" --printf "%i")
msg_warn "$pkgver: wrong checksum found for ${curfile} - purging\n"
find ${XBPS_SRCDISTDIR} -inum ${inode} -delete -print
fi
fi
dfcount=$((dfcount + 1))
done done
# We're done, if all distfiles were found and had good checksums # We're done, if all distfiles were found and had good checksums
[ $dfcount -eq $dfgood ] && return [[ $dfcount -eq $dfgood ]] && return
# Download missing distfiles and verify their checksums # Download missing distfiles and verify their checksums
dfcount=0 for curfile in ${!_file_idxs[@]}; do
for f in ${distfiles}; do
curfile="${f#*>}"
curfile="${curfile##*/}"
distfile="$srcdir/$curfile" distfile="$srcdir/$curfile"
# If file lock cannot be acquired wait until it's available. # If file lock cannot be acquired wait until it's available.
while true; do while ! flock -w 1 "${distfile}.part" true; do
flock -w 1 ${distfile}.part true
[ $? -eq 0 ] && break
msg_warn "$pkgver: ${curfile} is already being downloaded, waiting for 1s ...\n" msg_warn "$pkgver: ${curfile} is already being downloaded, waiting for 1s ...\n"
done done
if [[ -f "$distfile" ]]; then
continue
fi
# If distfile does not exist, try to link to it. # If distfile does not exist, try to link to it.
if [ ! -f "$distfile" ]; then if link_cksum "$curfile" "$distfile" "${_checksums[0]}"; then
link_cksum $curfile $distfile $dfcount continue
fi fi
# If distfile does not exist, download it from a mirror location. # If distfile does not exist, download it from a mirror location.
if [ ! -f "$distfile" ]; then if try_mirrors "$curfile" "$distfile" "${_checksums[0]}" "${_distfiles[0]}"; then
try_mirrors $curfile $distfile $dfcount $pkgname-$version $f continue
fi fi
# If distfile does not exist, download it from the original location.
if [[ "$FTP_RETRIES" && "${f}" =~ ^ftp:// ]]; then if ! try_urls "$curfile"; then
max_retries="$FTP_RETRIES" msg_error "$pkgver: failed to fetch '$curfile'.\n"
else
max_retries=1
fi fi
for retry in $(seq 1 1 $max_retries); do
if [ ! -f "$distfile" ]; then
if [ "$retry" == 1 ]; then
msg_normal "$pkgver: fetching distfile '$curfile'...\n"
else
msg_normal "$pkgver: fetch attempt $retry of $max_retries...\n"
fi
flock "${distfile}.part" $fetch_cmd "$f"
fi
done
if [ ! -f "$distfile" ]; then
msg_error "$pkgver: failed to fetch $curfile.\n"
fi
# distfile downloaded, verify sha256 hash.
flock -n ${distfile}.part rm -f ${distfile}.part
verify_cksum $curfile $distfile $dfcount
dfcount=$((dfcount + 1))
done done
unset TAR_CMD unset TAR_CMD
if [ $errors -gt 0 ]; then if [[ $errors -gt 0 ]]; then
msg_error "$pkgver: couldn't verify distfiles, exiting...\n" msg_error "$pkgver: couldn't verify distfiles, exiting...\n"
fi fi
} }

View File

@ -11,4 +11,5 @@ short_desc="Coloured Formatting for Columns"
maintainer="Florian Wagner <florian@wagner-flo.net>" maintainer="Florian Wagner <florian@wagner-flo.net>"
license="GPL-3.0-or-later" license="GPL-3.0-or-later"
homepage="https://github.com/r-lib/pillar/" homepage="https://github.com/r-lib/pillar/"
checksum=0ce5d15364dab761dab5b159ec2a4586ed1635f058fa13975725a1921e43e672 checksum="0ce5d15364dab761dab5b159ec2a4586ed1635f058fa13975725a1921e43e672
0ce5d15364dab761dab5b159ec2a4586ed1635f058fa13975725a1921e43e672"