fetchurl: fix downloadToTemp & hashedMirrors (#445592)

Changed files
+26 -3
pkgs
build-support
+6 -3
pkgs/build-support/fetchurl/builder.sh
···
tryDownload() {
local url="$1"
+
local target="$2"
echo
echo "trying $url"
local curlexit=18;
···
# if we get error code 18, resume partial download
while [ $curlexit -eq 18 ]; do
# keep this inside an if statement, since on failure it doesn't abort the script
-
if "${curl[@]}" -C - --fail "$url" --output "$downloadedFile"; then
+
if "${curl[@]}" -C - --fail "$url" --output "$target"; then
success=1
break
else
···
if "${curl[@]}" --retry 0 --connect-timeout "${NIX_CONNECT_TIMEOUT:-15}" \
--fail --silent --show-error --head "$url" \
--write-out "%{http_code}" --output /dev/null > code 2> log; then
-
tryDownload "$url"
+
# Directly download to $out, because postFetch doesn't need to run,
+
# since hashed mirrors provide pre-built derivation outputs.
+
tryDownload "$url" "$out"
# We skip postFetch here, because hashed-mirrors are
# already content addressed. So if $outputHash is in the
···
;;
esac
fi
-
tryDownload "$url"
+
tryDownload "$url" "$downloadedFile"
if test -n "$success"; then finish; fi
done
+20
pkgs/build-support/fetchurl/tests.nix
···
${jq}/bin/jq -r '.headers.Hello' $out | ${moreutils}/bin/sponge $out
'';
};
+
# Tests that downloadToTemp works with hashedMirrors
+
no-skipPostFetch = testers.invalidateFetcherByDrvHash fetchurl {
+
# Make sure that we can only download from hashed mirrors
+
url = "http://broken";
+
# A file with this hash is definitely on tarballs.nixos.org
+
sha256 = "1j1y3cq6ys30m734axc0brdm2q9n2as4h32jws15r7w5fwr991km";
+
+
# No chance
+
curlOptsList = [
+
"--retry"
+
"0"
+
];
+
+
downloadToTemp = true;
+
# Usually postFetch is needed with downloadToTemp to populate $out from
+
# $downloadedFile, but here we know that because the URL is broken, it will
+
# have to fallback to fetching the previously-built derivation from
+
# tarballs.nixos.org, which provides pre-built derivation outputs.
+
+
};
}