python3Packages.flashinfer: 0.2.14 -> 0.3.1

Diff:
https://github.com/flashinfer-ai/flashinfer/compare/v0.2.14...v0.3.1

Changed files
+26 -11
pkgs
development
python-modules
flashinfer
+26 -11
pkgs/development/python-modules/flashinfer/default.nix
···
config,
buildPythonPackage,
fetchFromGitHub,
setuptools,
-
cudaPackages,
cmake,
ninja,
numpy,
torch,
-
pynvml,
-
einops,
}:
buildPythonPackage rec {
pname = "flashinfer";
-
version = "0.2.14";
pyproject = true;
src = fetchFromGitHub {
owner = "flashinfer-ai";
repo = "flashinfer";
tag = "v${version}";
-
hash = "sha256-MZiZwdedz+Vxa1+VBfHDKf4NVSiOAytGboIJ0DvCXmk=";
fetchSubmodules = true;
};
build-system = [ setuptools ];
···
export MAX_JOBS="$NIX_BUILD_CORES"
'';
-
TORCH_CUDA_ARCH_LIST = lib.concatStringsSep ";" torch.cudaCapabilities;
dependencies = [
numpy
-
torch
pynvml
-
einops
];
-
meta = with lib; {
broken = !torch.cudaSupport || !config.cudaSupport;
homepage = "https://flashinfer.ai/";
description = "Library and kernel generator for Large Language Models";
···
and inference, and delivers state-of-the-art performance across diverse
scenarios.
'';
-
license = licenses.asl20;
-
maintainers = with maintainers; [ breakds ];
};
}
···
config,
buildPythonPackage,
fetchFromGitHub,
+
+
# build-system
setuptools,
+
+
# nativeBuildInputs
cmake,
ninja,
+
cudaPackages,
+
+
# dependencies
+
click,
+
einops,
numpy,
+
pynvml,
+
tabulate,
torch,
+
tqdm,
}:
buildPythonPackage rec {
pname = "flashinfer";
+
version = "0.3.1";
pyproject = true;
src = fetchFromGitHub {
owner = "flashinfer-ai";
repo = "flashinfer";
tag = "v${version}";
fetchSubmodules = true;
+
hash = "sha256-e9PfLfU0DdoLKlXiHylCbGd125c7Iw9y4NDIOAP0xHs=";
};
build-system = [ setuptools ];
···
export MAX_JOBS="$NIX_BUILD_CORES"
'';
+
FLASHINFER_CUDA_ARCH_LIST = lib.concatStringsSep ";" torch.cudaCapabilities;
+
pythonRemoveDeps = [
+
"nvidia-cudnn-frontend"
+
];
dependencies = [
+
click
+
einops
numpy
pynvml
+
tabulate
+
torch
+
tqdm
];
+
meta = {
broken = !torch.cudaSupport || !config.cudaSupport;
homepage = "https://flashinfer.ai/";
description = "Library and kernel generator for Large Language Models";
···
and inference, and delivers state-of-the-art performance across diverse
scenarios.
'';
+
license = lib.licenses.asl20;
+
maintainers = with lib.maintainers; [ breakds ];
};
}