Merge pull request #10814 from NarrativeScience/no_large_file_test

numpy: patch to remove large file test

Changed files
+41
pkgs
development
top-level
+35
pkgs/development/python-modules/numpy-no-large-files.patch
···
+
--- numpy/lib/tests/test_format.py 2015-08-11 12:03:43.000000000 -0500
+
+++ numpy/lib/tests/test_format_no_large_files.py 2015-11-03 16:03:30.328084827 -0600
+
@@ -810,32 +810,5 @@
+
format.write_array_header_1_0(s, d)
+
assert_raises(ValueError, format.read_array_header_1_0, s)
+
+
-
+
-def test_large_file_support():
+
- from nose import SkipTest
+
- if (sys.platform == 'win32' or sys.platform == 'cygwin'):
+
- raise SkipTest("Unknown if Windows has sparse filesystems")
+
- # try creating a large sparse file
+
- tf_name = os.path.join(tempdir, 'sparse_file')
+
- try:
+
- # seek past end would work too, but linux truncate somewhat
+
- # increases the chances that we have a sparse filesystem and can
+
- # avoid actually writing 5GB
+
- import subprocess as sp
+
- sp.check_call(["truncate", "-s", "5368709120", tf_name])
+
- except:
+
- raise SkipTest("Could not create 5GB large file")
+
- # write a small array to the end
+
- with open(tf_name, "wb") as f:
+
- f.seek(5368709120)
+
- d = np.arange(5)
+
- np.save(f, d)
+
- # read it back
+
- with open(tf_name, "rb") as f:
+
- f.seek(5368709120)
+
- r = np.load(f)
+
- assert_array_equal(r, d)
+
-
+
-
+
if __name__ == "__main__":
+
run_module_suite()
+6
pkgs/top-level/python-packages.nix
···
buildInputs = [ pkgs.gfortran self.nose ];
propagatedBuildInputs = [ support.openblas ];
+
# This patch removes the test of large file support, which takes forever
+
# and can cause the machine to run out of disk space when run.
+
patchPhase = ''
+
patch -p0 < ${../development/python-modules/numpy-no-large-files.patch}
+
'';
+
meta = {
description = "Scientific tools for Python";
homepage = "http://numpy.scipy.org/";