From c8df88885891bdb6d1f207cf203e4b0af48d5486 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 8 May 2014 14:57:20 +0200 Subject: [PATCH] Add a function "fetchzip" This function downloads and unpacks a file in one fixed-output derivation. This is primarily useful for dynamically generated zip files, such as GitHub's /archive URLs, where the unpacked content of the zip file doesn't change, but the zip file itself may (e.g. due to minor changes in the compression algorithm, or changes in timestamps). Fetchzip is implemented by extending fetchurl with a "postFetch" hook that is executed after the file has been downloaded. This hook can thus perform arbitrary checks or transformations on the downloaded file. --- pkgs/build-support/fetchurl/builder.sh | 8 ++++- pkgs/build-support/fetchurl/default.nix | 22 ++++++++++-- pkgs/build-support/fetchzip/default.nix | 42 ++++++++++++++++++++++ pkgs/tools/networking/dd-agent/default.nix | 6 ++-- pkgs/top-level/all-packages.nix | 2 ++ 5 files changed, 73 insertions(+), 7 deletions(-) create mode 100644 pkgs/build-support/fetchzip/default.nix diff --git a/pkgs/build-support/fetchurl/builder.sh b/pkgs/build-support/fetchurl/builder.sh index 402fe0cba5b8..0c6e16551b4a 100644 --- a/pkgs/build-support/fetchurl/builder.sh +++ b/pkgs/build-support/fetchurl/builder.sh @@ -17,12 +17,16 @@ curl="curl \ $NIX_CURL_FLAGS" +downloadedFile="$out" +if [ -n "$downloadToTemp" ]; then downloadedFile="$TMPDIR/file"; fi + + tryDownload() { local url="$1" echo header "trying $url" success= - if $curl --fail "$url" --output "$out"; then + if $curl --fail "$url" --output "$downloadedFile"; then success=1 fi stopNest @@ -30,6 +34,8 @@ tryDownload() { finish() { + set +o noglob + runHook postFetch stopNest exit 0 } diff --git a/pkgs/build-support/fetchurl/default.nix b/pkgs/build-support/fetchurl/default.nix index af4a6700153c..eac38a773c16 100644 --- a/pkgs/build-support/fetchurl/default.nix +++ b/pkgs/build-support/fetchurl/default.nix @@ -54,6 +54,9 @@ in # first element of `urls'). name ? "" +, # A string to be appended to the name, if the name is derived from `url'. + nameSuffix ? "" + # Different ways of specifying the hash. , outputHash ? "" , outputHashAlgo ? "" @@ -61,6 +64,17 @@ in , sha1 ? "" , sha256 ? "" +, recursiveHash ? false + +, # Shell code executed after the file has been fetched + # succesfully. This can do things like check or transform the file. + postFetch ? "" + +, # Whether to download to a temporary path rather than $out. Useful + # in conjunction with postFetch. The location of the temporary file + # is communicated to postFetch via $downloadedFile. + downloadToTemp ? false + , # If set, don't download the file, but write a list of all possible # URLs (resulting from resolving mirror:// URLs) to $out. showURLs ? false @@ -83,11 +97,11 @@ stdenv.mkDerivation { name = if showURLs then "urls" else if name != "" then name - else baseNameOf (toString (builtins.head urls_)); + else baseNameOf (toString (builtins.head urls_)) + nameSuffix; builder = ./builder.sh; - buildInputs = [curl]; + buildInputs = [ curl ]; urls = urls_; @@ -101,7 +115,9 @@ stdenv.mkDerivation { outputHash = if outputHash != "" then outputHash else if sha256 != "" then sha256 else if sha1 != "" then sha1 else md5; - inherit curlOpts showURLs mirrorsFile impureEnvVars; + outputHashMode = if recursiveHash then "recursive" else "flat"; + + inherit curlOpts showURLs mirrorsFile impureEnvVars postFetch downloadToTemp; # Doing the download on a remote machine just duplicates network # traffic, so don't do that. diff --git a/pkgs/build-support/fetchzip/default.nix b/pkgs/build-support/fetchzip/default.nix new file mode 100644 index 000000000000..6b77b6474efc --- /dev/null +++ b/pkgs/build-support/fetchzip/default.nix @@ -0,0 +1,42 @@ +# This function downloads and unpacks a zip file. This is primarily +# useful for dynamically generated zip files, such as GitHub's +# /archive URLs, where the unpacked content of the zip file doesn't +# change, but the zip file itself may (e.g. due to minor changes in +# the compression algorithm, or changes in timestamps). + +{ lib, fetchurl, unzip }: + +{ # Optionally move the contents of the unpacked tree up one level. + stripRoot ? true +, ... } @ args: + +fetchurl (args // { + # Apply a suffix to the name. Otherwise, unpackPhase will get + # confused by the .zip extension. + nameSuffix = "-unpacked"; + + recursiveHash = true; + + downloadToTemp = true; + + postFetch = + '' + export PATH=${unzip}/bin:$PATH + mkdir $out + cd $out + renamed="$TMPDIR/''${name%-unpacked}" + mv "$downloadedFile" "$renamed" + unpackFile "$renamed" + '' + # FIXME: handle zip files that contain a single regular file. + + lib.optionalString stripRoot '' + shopt -s dotglob + if [ "$(ls -d $out/* | wc -l)" != 1 ]; then + echo "error: zip file must contain a single directory." + exit 1 + fi + fn=$(cd "$out" && echo *) + mv $out/$fn/* "$out/" + rmdir "$out/$fn" + ''; +}) diff --git a/pkgs/tools/networking/dd-agent/default.nix b/pkgs/tools/networking/dd-agent/default.nix index 3f06f2d1354a..ca240f76d84e 100644 --- a/pkgs/tools/networking/dd-agent/default.nix +++ b/pkgs/tools/networking/dd-agent/default.nix @@ -1,13 +1,13 @@ -{ stdenv, fetchurl, python, pythonPackages, sysstat, unzip, tornado +{ stdenv, fetchzip, python, pythonPackages, sysstat, unzip, tornado , makeWrapper }: stdenv.mkDerivation rec { version = "4.2.1"; name = "dd-agent-${version}"; - src = fetchurl { + src = fetchzip { url = "https://github.com/DataDog/dd-agent/archive/${version}.zip"; - sha256 = "0s1lg7rqx86z0y111105gwkknzplq149cxd7v3yg30l22wn68dmv"; + sha256 = "06f9nkvnpfzs2nw75cac2y9wnp2bay4sg94zz0wjm8886rigjjjm"; }; buildInputs = [ python unzip makeWrapper pythonPackages.psycopg2 ]; diff --git a/pkgs/top-level/all-packages.nix b/pkgs/top-level/all-packages.nix index ea44c4934bb0..f402eb50f482 100644 --- a/pkgs/top-level/all-packages.nix +++ b/pkgs/top-level/all-packages.nix @@ -338,6 +338,8 @@ let # linked curl in the case of stdenv-linux). fetchurlBoot = stdenv.fetchurlBoot; + fetchzip = import ../build-support/fetchzip { inherit lib fetchurl unzip; }; + resolveMirrorURLs = {url}: fetchurl { showURLs = true; inherit url;