Merge pull request #288829 from samuela/samuela/jaxlib-bin

python3Packages.jaxlib-bin: cudatoolkit -> redist packages
This commit is contained in:
Samuel Ainsworth 2024-02-27 21:52:16 -05:00 committed by GitHub
commit 442dba6ba4
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 57 additions and 26 deletions

View File

@ -1,6 +1,7 @@
{ lib
, blas
, buildPythonPackage
, callPackage
, setuptools
, importlib-metadata
, fetchFromGitHub
@ -126,6 +127,23 @@ buildPythonPackage rec {
pythonImportsCheck = [ "jax" ];
# Test CUDA-enabled jax and jaxlib. Running CUDA-enabled tests is not
# currently feasible within the nix build environment so we have to maintain
# this script separately. See https://github.com/NixOS/nixpkgs/pull/256230
# for a possible remedy to this situation.
#
# Run these tests with eg
#
# NIXPKGS_ALLOW_UNFREE=1 nixglhost -- nix run --impure .#python3Packages.jax.passthru.tests.test_cuda_jaxlibBin
passthru.tests = {
test_cuda_jaxlibSource = callPackage ./test-cuda.nix {
jaxlib = jaxlib.override { cudaSupport = true; };
};
test_cuda_jaxlibBin = callPackage ./test-cuda.nix {
jaxlib = jaxlib-bin.override { cudaSupport = true; };
};
};
meta = with lib; {
description = "Differentiate, compile, and transform Numpy code";
homepage = "https://github.com/google/jax";

View File

@ -0,0 +1,17 @@
{ jax
, jaxlib
, pkgs
}:
pkgs.writers.writePython3Bin "jax-test-cuda" { libraries = [ jax jaxlib ]; } ''
import jax
from jax import random
assert jax.devices()[0].platform == "gpu"
rng = random.PRNGKey(0)
x = random.normal(rng, (100, 100))
x @ x
print("success!")
''

View File

@ -2,16 +2,7 @@
# backend will require some additional work. Those wheels are located here:
# https://storage.googleapis.com/jax-releases/libtpu_releases.html.
# For future reference, the easiest way to test the GPU backend is to run
# NIX_PATH=.. nix-shell -p python3 python3Packages.jax "python3Packages.jaxlib-bin.override { cudaSupport = true; }"
# export XLA_FLAGS=--xla_gpu_force_compilation_parallelism=1
# python -c "from jax.lib import xla_bridge; assert xla_bridge.get_backend().platform == 'gpu'"
# python -c "from jax import random; random.PRNGKey(0)"
# python -c "from jax import random; x = random.normal(random.PRNGKey(0), (100, 100)); x @ x"
# There's no convenient way to test the GPU backend in the derivation since the
# nix build environment blocks access to the GPU. See also:
# * https://github.com/google/jax/issues/971#issuecomment-508216439
# * https://github.com/google/jax/issues/5723#issuecomment-913038780
# See `python3Packages.jax.passthru` for CUDA tests.
{ absl-py
, autoPatchelfHook
@ -32,12 +23,21 @@
}:
let
inherit (cudaPackagesGoogle) autoAddOpenGLRunpathHook cudatoolkit cudnn cudaVersion;
inherit (cudaPackagesGoogle) autoAddOpenGLRunpathHook cudaVersion;
version = "0.4.24";
inherit (python) pythonVersion;
cudaLibPath = lib.makeLibraryPath (with cudaPackagesGoogle; [
cuda_cudart.lib # libcudart.so
cuda_cupti.lib # libcupti.so
cudnn.lib # libcudnn.so
libcufft.lib # libcufft.so
libcusolver.lib # libcusolver.so
libcusparse.lib # libcusparse.so
]);
# As of 2023-06-06, google/jax upstream is no longer publishing CPU-only wheels to their GCS bucket. Instead the
# official instructions recommend installing CPU-only versions via PyPI.
cpuSrcs =
@ -189,18 +189,12 @@ buildPythonPackage {
# autoPatchelfHook. That means we need to sneak them into rpath. This step
# must be done after autoPatchelfHook and the automatic stripping of
# artifacts. autoPatchelfHook runs in postFixup and auto-stripping runs in the
# patchPhase. Dependencies:
# * libcudart.so.11.0 -> cudatoolkit_11.lib
# * libcublas.so.11 -> cudatoolkit_11
# * libcuda.so.1 -> opengl driver in /run/opengl-driver/lib
# patchPhase.
preInstallCheck = lib.optional cudaSupport ''
shopt -s globstar
for file in $out/**/*.so; do
rpath=$(patchelf --print-rpath $file)
# For some reason `makeLibraryPath` on `cudatoolkit_11` maps to
# <cudatoolkit_11.lib>/lib which is different from <cudatoolkit_11>/lib.
patchelf --set-rpath "$rpath:${cudatoolkit}/lib:${lib.makeLibraryPath [ cudatoolkit.lib cudnn ]}" $file
patchelf --add-rpath "${cudaLibPath}" "$file"
done
'';
@ -211,12 +205,14 @@ buildPythonPackage {
scipy
];
# Note that cudatoolkit is snecessary since jaxlib looks for "ptxas" in $PATH.
# See https://github.com/NixOS/nixpkgs/pull/164176#discussion_r828801621 for
# more info.
# jaxlib looks for ptxas at runtime, eg when running `jax.random.PRNGKey(0)`.
# Linking into $out is the least bad solution. See
# * https://github.com/NixOS/nixpkgs/pull/164176#discussion_r828801621
# * https://github.com/NixOS/nixpkgs/pull/288829#discussion_r1493852211
# for more info.
postInstall = lib.optional cudaSupport ''
mkdir -p $out/bin
ln -s ${cudatoolkit}/bin/ptxas $out/bin/ptxas
mkdir -p $out/${python.sitePackages}/jaxlib/cuda/bin
ln -s ${lib.getExe' cudaPackagesGoogle.cuda_nvcc "ptxas"} $out/${python.sitePackages}/jaxlib/cuda/bin/ptxas
'';
inherit (jaxlib-build) pythonImportsCheck;
@ -229,8 +225,8 @@ buildPythonPackage {
maintainers = with maintainers; [ samuela ];
platforms = [ "aarch64-darwin" "x86_64-linux" "x86_64-darwin" ];
broken =
!(cudaSupport -> (cudaPackagesGoogle ? cudatoolkit) && lib.versionAtLeast cudatoolkit.version "11.1")
|| !(cudaSupport -> (cudaPackagesGoogle ? cudnn) && lib.versionAtLeast cudnn.version "8.2")
!(cudaSupport -> lib.versionAtLeast cudaVersion "11.1")
|| !(cudaSupport -> lib.versionAtLeast cudaPackagesGoogle.cudnn.version "8.2")
|| !(cudaSupport -> stdenv.isLinux)
|| !(cudaSupport -> (gpuSrcs ? "cuda${cudaVersion}-${pythonVersion}"));
};