1#!/usr/bin/env bash
2# shellcheck disable=SC2016
3# shellcheck disable=SC2317
4# shellcheck disable=SC2192
5
6# Tests lib.fileset
7# Run:
8# [nixpkgs]$ lib/fileset/tests.sh
9# or:
10# [nixpkgs]$ nix-build lib/tests/release.nix
11
12set -euo pipefail
13shopt -s inherit_errexit dotglob
14
15die() {
16 # The second to last entry contains the line number of the top-level caller
17 lineIndex=$(( ${#BASH_LINENO[@]} - 2 ))
18 echo >&2 -e "test case at ${BASH_SOURCE[0]}:${BASH_LINENO[$lineIndex]} failed:" "$@"
19 exit 1
20}
21
22if test -n "${TEST_LIB:-}"; then
23 NIX_PATH=nixpkgs="$(dirname "$TEST_LIB")"
24else
25 NIX_PATH=nixpkgs="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.."; pwd)"
26fi
27export NIX_PATH
28
29tmp="$(mktemp -d)"
30clean_up() {
31 rm -rf "$tmp"
32}
33trap clean_up EXIT SIGINT SIGTERM
34work="$tmp/work"
35mkdir "$work"
36cd "$work"
37
38# Crudely unquotes a JSON string by just taking everything between the first and the second quote.
39# We're only using this for resulting /nix/store paths, which can't contain " anyways,
40# nor can they contain any other characters that would need to be escaped specially in JSON
41# This way we don't need to add a dependency on e.g. jq
42crudeUnquoteJSON() {
43 cut -d \" -f2
44}
45
46prefixExpression='
47 let
48 lib = import <nixpkgs/lib>;
49 internal = import <nixpkgs/lib/fileset/internal.nix> {
50 inherit lib;
51 };
52 in
53 with lib;
54 with internal;
55 with lib.fileset;
56'
57
58# Check that two nix expression successfully evaluate to the same value.
59# The expressions have `lib.fileset` in scope.
60# Usage: expectEqual NIX NIX
61expectEqual() {
62 local actualExpr=$1
63 local expectedExpr=$2
64 if actualResult=$(nix-instantiate --eval --strict --show-trace 2>"$tmp"/actualStderr \
65 --expr "$prefixExpression ($actualExpr)"); then
66 actualExitCode=$?
67 else
68 actualExitCode=$?
69 fi
70 actualStderr=$(< "$tmp"/actualStderr)
71
72 if expectedResult=$(nix-instantiate --eval --strict --show-trace 2>"$tmp"/expectedStderr \
73 --expr "$prefixExpression ($expectedExpr)"); then
74 expectedExitCode=$?
75 else
76 expectedExitCode=$?
77 fi
78 expectedStderr=$(< "$tmp"/expectedStderr)
79
80 if [[ "$actualExitCode" != "$expectedExitCode" ]]; then
81 echo "$actualStderr" >&2
82 echo "$actualResult" >&2
83 die "$actualExpr should have exited with $expectedExitCode, but it exited with $actualExitCode"
84 fi
85
86 if [[ "$actualResult" != "$expectedResult" ]]; then
87 die "$actualExpr should have evaluated to $expectedExpr:\n$expectedResult\n\nbut it evaluated to\n$actualResult"
88 fi
89
90 if [[ "$actualStderr" != "$expectedStderr" ]]; then
91 die "$actualExpr should have had this on stderr:\n$expectedStderr\n\nbut it was\n$actualStderr"
92 fi
93}
94
95# Check that a nix expression evaluates successfully to a store path and returns it (without quotes).
96# The expression has `lib.fileset` in scope.
97# Usage: expectStorePath NIX
98expectStorePath() {
99 local expr=$1
100 if ! result=$(nix-instantiate --eval --strict --json --read-write-mode --show-trace 2>"$tmp"/stderr \
101 --expr "$prefixExpression ($expr)"); then
102 cat "$tmp/stderr" >&2
103 die "$expr failed to evaluate, but it was expected to succeed"
104 fi
105 # This is safe because we assume to get back a store path in a string
106 crudeUnquoteJSON <<< "$result"
107}
108
109# Check that a nix expression fails to evaluate (strictly, read-write-mode).
110# And check the received stderr against a regex
111# The expression has `lib.fileset` in scope.
112# Usage: expectFailure NIX REGEX
113expectFailure() {
114 local expr=$1
115 local expectedErrorRegex=$2
116 if result=$(nix-instantiate --eval --strict --read-write-mode --show-trace 2>"$tmp/stderr" \
117 --expr "$prefixExpression $expr"); then
118 die "$expr evaluated successfully to $result, but it was expected to fail"
119 fi
120 stderr=$(<"$tmp/stderr")
121 if [[ ! "$stderr" =~ $expectedErrorRegex ]]; then
122 die "$expr should have errored with this regex pattern:\n\n$expectedErrorRegex\n\nbut this was the actual error:\n\n$stderr"
123 fi
124}
125
126# Check that the traces of a Nix expression are as expected when evaluated.
127# The expression has `lib.fileset` in scope.
128# Usage: expectTrace NIX STR
129expectTrace() {
130 local expr=$1
131 local expectedTrace=$2
132
133 nix-instantiate --eval --show-trace >/dev/null 2>"$tmp"/stderrTrace \
134 --expr "$prefixExpression trace ($expr)" || true
135
136 actualTrace=$(sed -n 's/^trace: //p' "$tmp/stderrTrace")
137
138 nix-instantiate --eval --show-trace >/dev/null 2>"$tmp"/stderrTraceVal \
139 --expr "$prefixExpression traceVal ($expr)" || true
140
141 actualTraceVal=$(sed -n 's/^trace: //p' "$tmp/stderrTraceVal")
142
143 # Test that traceVal returns the same trace as trace
144 if [[ "$actualTrace" != "$actualTraceVal" ]]; then
145 cat "$tmp"/stderrTrace >&2
146 die "$expr traced this for lib.fileset.trace:\n\n$actualTrace\n\nand something different for lib.fileset.traceVal:\n\n$actualTraceVal"
147 fi
148
149 if [[ "$actualTrace" != "$expectedTrace" ]]; then
150 cat "$tmp"/stderrTrace >&2
151 die "$expr should have traced this:\n\n$expectedTrace\n\nbut this was actually traced:\n\n$actualTrace"
152 fi
153}
154
155# We conditionally use inotifywait in withFileMonitor.
156# Check early whether it's available
157# TODO: Darwin support, though not crucial since we have Linux CI
158if type inotifywait 2>/dev/null >/dev/null; then
159 canMonitor=1
160else
161 echo "Warning: Cannot check for paths not getting read since the inotifywait command (from the inotify-tools package) is not available" >&2
162 canMonitor=
163fi
164
165# Run a function while monitoring that it doesn't read certain paths
166# Usage: withFileMonitor FUNNAME PATH...
167# - FUNNAME should be a bash function that:
168# - Performs some operation that should not read some paths
169# - Delete the paths it shouldn't read without triggering any open events
170# - PATH... are the paths that should not get read
171#
172# This function outputs the same as FUNNAME
173withFileMonitor() {
174 local funName=$1
175 shift
176
177 # If we can't monitor files or have none to monitor, just run the function directly
178 if [[ -z "$canMonitor" ]] || (( "$#" == 0 )); then
179 "$funName"
180 else
181
182 # Use a subshell to start the coprocess in and use a trap to kill it when exiting the subshell
183 (
184 # Assigned by coproc, makes shellcheck happy
185 local watcher watcher_PID
186
187 # Start inotifywait in the background to monitor all excluded paths
188 coproc watcher {
189 # inotifywait outputs a string on stderr when ready
190 # Redirect it to stdout so we can access it from the coproc's stdout fd
191 # exec so that the coprocess is inotify itself, making the kill below work correctly
192 # See below why we listen to both open and delete_self events
193 exec inotifywait --format='%e %w' --event open,delete_self --monitor "$@" 2>&1
194 }
195
196 # This will trigger when this subshell exits, no matter if successful or not
197 # After exiting the subshell, the parent shell will continue executing
198 trap 'kill "${watcher_PID}"' exit
199
200 # Synchronously wait until inotifywait is ready
201 while read -r -u "${watcher[0]}" line && [[ "$line" != "Watches established." ]]; do
202 :
203 done
204
205 # Call the function that should not read the given paths and delete them afterwards
206 "$funName"
207
208 # Get the first event
209 read -r -u "${watcher[0]}" event file
210
211 # With funName potentially reading files first before deleting them,
212 # there's only these two possible event timelines:
213 # - open*, ..., open*, delete_self, ..., delete_self: If some excluded paths were read
214 # - delete_self, ..., delete_self: If no excluded paths were read
215 # So by looking at the first event we can figure out which one it is!
216 # This also means we don't have to wait to collect all events.
217 case "$event" in
218 OPEN*)
219 die "$funName opened excluded file $file when it shouldn't have"
220 ;;
221 DELETE_SELF)
222 # Expected events
223 ;;
224 *)
225 die "During $funName, Unexpected event type '$event' on file $file that should be excluded"
226 ;;
227 esac
228 )
229 fi
230}
231
232
233# Create the tree structure declared in the tree variable, usage:
234#
235# tree=(
236# [a/b] = # Declare that file a/b should exist
237# [c/a] = # Declare that file c/a should exist
238# [c/d/]= # Declare that directory c/d/ should exist
239# )
240# createTree
241declare -A tree
242createTree() {
243 # Track which paths need to be created
244 local -a dirsToCreate=()
245 local -a filesToCreate=()
246 for p in "${!tree[@]}"; do
247 # If keys end with a `/` we treat them as directories, otherwise files
248 if [[ "$p" =~ /$ ]]; then
249 dirsToCreate+=("$p")
250 else
251 filesToCreate+=("$p")
252 fi
253 done
254
255 # Create all the necessary paths.
256 # This is done with only a fixed number of processes,
257 # in order to not be too slow
258 # Though this does mean we're a bit limited with how many files can be created
259 if (( ${#dirsToCreate[@]} != 0 )); then
260 mkdir -p "${dirsToCreate[@]}"
261 fi
262 if (( ${#filesToCreate[@]} != 0 )); then
263 readarray -d '' -t parentsToCreate < <(dirname -z "${filesToCreate[@]}")
264 mkdir -p "${parentsToCreate[@]}"
265 touch "${filesToCreate[@]}"
266 fi
267}
268
269# Check whether a file set includes/excludes declared paths as expected, usage:
270#
271# tree=(
272# [a/b] =1 # Declare that file a/b should exist and expect it to be included in the store path
273# [c/a] = # Declare that file c/a should exist and expect it to be excluded in the store path
274# [c/d/]= # Declare that directory c/d/ should exist and expect it to be excluded in the store path
275# )
276# checkFileset './a' # Pass the fileset as the argument
277checkFileset() {
278 local fileset=$1
279
280 # Create the tree
281 createTree
282
283 # Process the tree into separate arrays for included paths, excluded paths and excluded files.
284 local -a included=()
285 local -a includedFiles=()
286 local -a excluded=()
287 local -a excludedFiles=()
288 for p in "${!tree[@]}"; do
289 case "${tree[$p]}" in
290 1)
291 included+=("$p")
292 # If keys end with a `/` we treat them as directories, otherwise files
293 if [[ ! "$p" =~ /$ ]]; then
294 includedFiles+=("$p")
295 fi
296 ;;
297 0)
298 excluded+=("$p")
299 if [[ ! "$p" =~ /$ ]]; then
300 excludedFiles+=("$p")
301 fi
302 ;;
303 *)
304 die "Unsupported tree value: ${tree[$p]}"
305 esac
306 done
307
308 # Test that lib.fileset.toList contains exactly the included files.
309 # The /#/./ part prefixes each element with `./`
310 expectEqual "toList ($fileset)" "sort lessThan [ ${includedFiles[*]/#/./} ]"
311
312 expression="toSource { root = ./.; fileset = $fileset; }"
313
314 # We don't have lambda's in bash unfortunately,
315 # so we just define a function instead and then pass its name
316 # shellcheck disable=SC2317
317 run() {
318 # Call toSource with the fileset, triggering open events for all files that are added to the store
319 expectStorePath "$expression"
320 if (( ${#excludedFiles[@]} != 0 )); then
321 rm "${excludedFiles[@]}"
322 fi
323 }
324
325 # Runs the function while checking that the given excluded files aren't read
326 storePath=$(withFileMonitor run "${excludedFiles[@]}")
327
328 # For each path that should be included, make sure it does occur in the resulting store path
329 for p in "${included[@]}"; do
330 if [[ ! -e "$storePath/$p" ]]; then
331 die "$expression doesn't include path $p when it should have"
332 fi
333 done
334
335 # For each path that should be excluded, make sure it doesn't occur in the resulting store path
336 for p in "${excluded[@]}"; do
337 if [[ -e "$storePath/$p" ]]; then
338 die "$expression included path $p when it shouldn't have"
339 fi
340 done
341
342 rm -rf -- *
343}
344
345
346#### Error messages #####
347
348# We're using [[:blank:]] here instead of \s, because only the former is POSIX
349# (see https://pubs.opengroup.org/onlinepubs/007908799/xbd/re.html#tag_007_003_005).
350# And indeed, Darwin's bash only supports the former
351
352# Absolute paths in strings cannot be passed as `root`
353expectFailure 'toSource { root = "/nix/store/foobar"; fileset = ./.; }' 'lib.fileset.toSource: `root` \(/nix/store/foobar\) is a string-like value, but it should be a path instead.
354[[:blank:]]*Paths in strings are not supported by `lib.fileset`, use `lib.sources` or derivations instead.'
355
356expectFailure 'toSource { root = cleanSourceWith { src = ./.; }; fileset = ./.; }' 'lib.fileset.toSource: `root` is a `lib.sources`-based value, but it should be a path instead.
357[[:blank:]]*To use a `lib.sources`-based value, convert it to a file set using `lib.fileset.fromSource` and pass it as `fileset`.
358[[:blank:]]*Note that this only works for sources created from paths.'
359
360# Only paths are accepted as `root`
361expectFailure 'toSource { root = 10; fileset = ./.; }' 'lib.fileset.toSource: `root` is of type int, but it should be a path instead.'
362
363# Different filesystem roots in root and fileset are not supported
364mkdir -p {foo,bar}/mock-root
365expectFailure 'with ((import <nixpkgs/lib>).extend (import <nixpkgs/lib/fileset/mock-splitRoot.nix>)).fileset;
366 toSource { root = ./foo/mock-root; fileset = ./bar/mock-root; }
367' 'lib.fileset.toSource: Filesystem roots are not the same for `fileset` and `root` \('"$work"'/foo/mock-root\):
368[[:blank:]]*`root`: Filesystem root is "'"$work"'/foo/mock-root"
369[[:blank:]]*`fileset`: Filesystem root is "'"$work"'/bar/mock-root"
370[[:blank:]]*Different filesystem roots are not supported.'
371rm -rf -- *
372
373# `root` needs to exist
374expectFailure 'toSource { root = ./a; fileset = ./.; }' 'lib.fileset.toSource: `root` \('"$work"'/a\) is a path that does not exist.'
375
376# `root` needs to be a file
377touch a
378expectFailure 'toSource { root = ./a; fileset = ./a; }' 'lib.fileset.toSource: `root` \('"$work"'/a\) is a file, but it should be a directory instead. Potential solutions:
379[[:blank:]]*- If you want to import the file into the store _without_ a containing directory, use string interpolation or `builtins.path` instead of this function.
380[[:blank:]]*- If you want to import the file into the store _with_ a containing directory, set `root` to the containing directory, such as '"$work"', and set `fileset` to the file path.'
381rm -rf -- *
382
383# The fileset argument should be evaluated, even if the directory is empty
384expectFailure 'toSource { root = ./.; fileset = abort "This should be evaluated"; }' 'evaluation aborted with the following error message: '\''This should be evaluated'\'
385
386# Only paths under `root` should be able to influence the result
387mkdir a
388expectFailure 'toSource { root = ./a; fileset = ./.; }' 'lib.fileset.toSource: `fileset` could contain files in '"$work"', which is not under the `root` \('"$work"'/a\). Potential solutions:
389[[:blank:]]*- Set `root` to '"$work"' or any directory higher up. This changes the layout of the resulting store path.
390[[:blank:]]*- Set `fileset` to a file set that cannot contain files outside the `root` \('"$work"'/a\). This could change the files included in the result.'
391rm -rf -- *
392
393# non-regular and non-symlink files cannot be added to the Nix store
394mkfifo a
395expectFailure 'toSource { root = ./.; fileset = ./a; }' 'lib.fileset.toSource: `fileset` contains a file that cannot be added to the store: '"$work"'/a
396[[:blank:]]*This file is neither a regular file nor a symlink, the only file types supported by the Nix store.
397[[:blank:]]*Therefore the file set cannot be added to the Nix store as is. Make sure to not include that file to avoid this error.'
398rm -rf -- *
399
400# Path coercion only works for paths
401expectFailure 'toSource { root = ./.; fileset = 10; }' 'lib.fileset.toSource: `fileset` is of type int, but it should be a file set or a path instead.'
402expectFailure 'toSource { root = ./.; fileset = "/some/path"; }' 'lib.fileset.toSource: `fileset` \("/some/path"\) is a string-like value, but it should be a file set or a path instead.
403[[:blank:]]*Paths represented as strings are not supported by `lib.fileset`, use `lib.sources` or derivations instead.'
404expectFailure 'toSource { root = ./.; fileset = cleanSourceWith { src = ./.; }; }' 'lib.fileset.toSource: `fileset` is a `lib.sources`-based value, but it should be a file set or a path instead.
405[[:blank:]]*To convert a `lib.sources`-based value to a file set you can use `lib.fileset.fromSource`.
406[[:blank:]]*Note that this only works for sources created from paths.'
407
408# Path coercion errors for non-existent paths
409expectFailure 'toSource { root = ./.; fileset = ./a; }' 'lib.fileset.toSource: `fileset` \('"$work"'/a\) is a path that does not exist.
410[[:blank:]]*To create a file set from a path that may not exist, use `lib.fileset.maybeMissing`.'
411
412# File sets cannot be evaluated directly
413expectFailure 'union ./. ./.' 'lib.fileset: Directly evaluating a file set is not supported.
414[[:blank:]]*To turn it into a usable source, use `lib.fileset.toSource`.
415[[:blank:]]*To pretty-print the contents, use `lib.fileset.trace` or `lib.fileset.traceVal`.'
416expectFailure '_emptyWithoutBase' 'lib.fileset: Directly evaluating a file set is not supported.
417[[:blank:]]*To turn it into a usable source, use `lib.fileset.toSource`.
418[[:blank:]]*To pretty-print the contents, use `lib.fileset.trace` or `lib.fileset.traceVal`.'
419
420# Past versions of the internal representation are supported
421expectEqual '_coerce "<tests>: value" { _type = "fileset"; _internalVersion = 0; _internalBase = ./.; }' \
422 '{ _internalBase = ./.; _internalBaseComponents = path.subpath.components (path.splitRoot ./.).subpath; _internalBaseRoot = /.; _internalIsEmptyWithoutBase = false; _internalVersion = 3; _type = "fileset"; }'
423expectEqual '_coerce "<tests>: value" { _type = "fileset"; _internalVersion = 1; }' \
424 '{ _type = "fileset"; _internalIsEmptyWithoutBase = false; _internalVersion = 3; }'
425expectEqual '_coerce "<tests>: value" { _type = "fileset"; _internalVersion = 2; }' \
426 '{ _type = "fileset"; _internalIsEmptyWithoutBase = false; _internalVersion = 3; }'
427
428# Future versions of the internal representation are unsupported
429expectFailure '_coerce "<tests>: value" { _type = "fileset"; _internalVersion = 4; }' '<tests>: value is a file set created from a future version of the file set library with a different internal representation:
430[[:blank:]]*- Internal version of the file set: 4
431[[:blank:]]*- Internal version of the library: 3
432[[:blank:]]*Make sure to update your Nixpkgs to have a newer version of `lib.fileset`.'
433
434# _create followed by _coerce should give the inputs back without any validation
435expectEqual '{
436 inherit (_coerce "<test>" (_create ./. "directory"))
437 _internalVersion _internalBase _internalTree;
438}' '{ _internalBase = ./.; _internalTree = "directory"; _internalVersion = 3; }'
439
440#### Resulting store path ####
441
442# The store path name should be "source"
443expectEqual 'toSource { root = ./.; fileset = ./.; }' 'sources.cleanSourceWith { name = "source"; src = ./.; }'
444
445# We should be able to import an empty directory and end up with an empty result
446tree=(
447)
448checkFileset './.'
449
450# The empty value without a base should also result in an empty result
451tree=(
452 [a]=0
453)
454checkFileset '_emptyWithoutBase'
455
456# Directories recursively containing no files are not included
457tree=(
458 [e/]=0
459 [d/e/]=0
460 [d/d/e/]=0
461 [d/d/f]=1
462 [d/f]=1
463 [f]=1
464)
465checkFileset './.'
466
467# Check trees that could cause a naïve string prefix checking implementation to fail
468tree=(
469 [a]=0
470 [ab/x]=0
471 [ab/xy]=1
472 [ab/xyz]=0
473 [abc]=0
474)
475checkFileset './ab/xy'
476
477# Check path coercion examples in ../../doc/functions/fileset.section.md
478tree=(
479 [a/x]=1
480 [a/b/y]=1
481 [c/]=0
482 [c/d/]=0
483)
484checkFileset './.'
485
486tree=(
487 [a/x]=1
488 [a/b/y]=1
489 [c/]=0
490 [c/d/]=0
491)
492checkFileset './a'
493
494tree=(
495 [a/x]=1
496 [a/b/y]=0
497 [c/]=0
498 [c/d/]=0
499)
500checkFileset './a/x'
501
502tree=(
503 [a/x]=0
504 [a/b/y]=1
505 [c/]=0
506 [c/d/]=0
507)
508checkFileset './a/b'
509
510tree=(
511 [a/x]=0
512 [a/b/y]=0
513 [c/]=0
514 [c/d/]=0
515)
516checkFileset './c'
517
518# Test the source filter for the somewhat special case of files in the filesystem root
519# We can't easily test this with the above functions because we can't write to the filesystem root and we don't want to make any assumptions which files are there in the sandbox
520expectEqual '_toSourceFilter (_create /. null) "/foo" ""' 'false'
521expectEqual '_toSourceFilter (_create /. { foo = "regular"; }) "/foo" ""' 'true'
522expectEqual '_toSourceFilter (_create /. { foo = null; }) "/foo" ""' 'false'
523
524
525## lib.fileset.toList
526# This function is mainly tested in checkFileset
527
528# The error context for an invalid argument must be correct
529expectFailure 'toList null' 'lib.fileset.toList: Argument is of type null, but it should be a file set or a path instead.'
530
531# Works for the empty fileset
532expectEqual 'toList _emptyWithoutBase' '[ ]'
533
534# Works on empty paths
535expectEqual 'toList ./.' '[ ]'
536
537
538## lib.fileset.union, lib.fileset.unions
539
540
541# Different filesystem roots in root and fileset are not supported
542mkdir -p {foo,bar}/mock-root
543expectFailure 'with ((import <nixpkgs/lib>).extend (import <nixpkgs/lib/fileset/mock-splitRoot.nix>)).fileset;
544 toSource { root = ./.; fileset = union ./foo/mock-root ./bar/mock-root; }
545' 'lib.fileset.union: Filesystem roots are not the same:
546[[:blank:]]*First argument: Filesystem root is "'"$work"'/foo/mock-root"
547[[:blank:]]*Second argument: Filesystem root is "'"$work"'/bar/mock-root"
548[[:blank:]]*Different filesystem roots are not supported.'
549
550expectFailure 'with ((import <nixpkgs/lib>).extend (import <nixpkgs/lib/fileset/mock-splitRoot.nix>)).fileset;
551 toSource { root = ./.; fileset = unions [ ./foo/mock-root ./bar/mock-root ]; }
552' 'lib.fileset.unions: Filesystem roots are not the same:
553[[:blank:]]*Element 0: Filesystem root is "'"$work"'/foo/mock-root"
554[[:blank:]]*Element 1: Filesystem root is "'"$work"'/bar/mock-root"
555[[:blank:]]*Different filesystem roots are not supported.'
556rm -rf -- *
557
558# Coercion errors show the correct context
559expectFailure 'toSource { root = ./.; fileset = union ./a ./.; }' 'lib.fileset.union: First argument \('"$work"'/a\) is a path that does not exist.'
560expectFailure 'toSource { root = ./.; fileset = union ./. ./b; }' 'lib.fileset.union: Second argument \('"$work"'/b\) is a path that does not exist.'
561expectFailure 'toSource { root = ./.; fileset = unions [ ./a ./. ]; }' 'lib.fileset.unions: Element 0 \('"$work"'/a\) is a path that does not exist.'
562expectFailure 'toSource { root = ./.; fileset = unions [ ./. ./b ]; }' 'lib.fileset.unions: Element 1 \('"$work"'/b\) is a path that does not exist.'
563
564# unions needs a list
565expectFailure 'toSource { root = ./.; fileset = unions null; }' 'lib.fileset.unions: Argument is of type null, but it should be a list instead.'
566
567# The tree of later arguments should not be evaluated if a former argument already includes all files
568tree=()
569checkFileset 'union ./. (_create ./. (abort "This should not be used!"))'
570checkFileset 'unions [ ./. (_create ./. (abort "This should not be used!")) ]'
571
572# unions doesn't include any files for an empty list or only empty values without a base
573tree=(
574 [x]=0
575 [y/z]=0
576)
577checkFileset 'unions [ ]'
578checkFileset 'unions [ _emptyWithoutBase ]'
579checkFileset 'unions [ _emptyWithoutBase _emptyWithoutBase ]'
580checkFileset 'union _emptyWithoutBase _emptyWithoutBase'
581
582# The empty value without a base is the left and right identity of union
583tree=(
584 [x]=1
585 [y/z]=0
586)
587checkFileset 'union ./x _emptyWithoutBase'
588checkFileset 'union _emptyWithoutBase ./x'
589
590# union doesn't include files that weren't specified
591tree=(
592 [x]=1
593 [y]=1
594 [z]=0
595)
596checkFileset 'union ./x ./y'
597checkFileset 'unions [ ./x ./y ]'
598
599# Also for directories
600tree=(
601 [x/a]=1
602 [x/b]=1
603 [y/a]=1
604 [y/b]=1
605 [z/a]=0
606 [z/b]=0
607)
608checkFileset 'union ./x ./y'
609checkFileset 'unions [ ./x ./y ]'
610
611# And for very specific paths
612tree=(
613 [x/a]=1
614 [x/b]=0
615 [y/a]=0
616 [y/b]=1
617 [z/a]=0
618 [z/b]=0
619)
620checkFileset 'union ./x/a ./y/b'
621checkFileset 'unions [ ./x/a ./y/b ]'
622
623# unions or chained union's can include more paths
624tree=(
625 [x/a]=1
626 [x/b]=1
627 [y/a]=1
628 [y/b]=0
629 [z/a]=0
630 [z/b]=1
631)
632checkFileset 'unions [ ./x/a ./x/b ./y/a ./z/b ]'
633checkFileset 'union (union ./x/a ./x/b) (union ./y/a ./z/b)'
634checkFileset 'union (union (union ./x/a ./x/b) ./y/a) ./z/b'
635
636# unions should not stack overflow, even if many elements are passed
637tree=()
638for i in $(seq 1000); do
639 tree[$i/a]=1
640 tree[$i/b]=0
641done
642# This is actually really hard to test:
643# A lot of files would be needed to cause a stack overflow.
644# And while we could limit the maximum stack size using `ulimit -s`,
645# that turns out to not be very deterministic: https://github.com/NixOS/nixpkgs/pull/256417#discussion_r1339396686.
646# Meanwhile, the test infra here is not the fastest, creating 10000 would be too slow.
647# So, just using 1000 files for now.
648checkFileset 'unions (mapAttrsToList (name: _: ./. + "/${name}/a") (builtins.readDir ./.))'
649
650
651## lib.fileset.intersection
652
653
654# Different filesystem roots in root and fileset are not supported
655mkdir -p {foo,bar}/mock-root
656expectFailure 'with ((import <nixpkgs/lib>).extend (import <nixpkgs/lib/fileset/mock-splitRoot.nix>)).fileset;
657 toSource { root = ./.; fileset = intersection ./foo/mock-root ./bar/mock-root; }
658' 'lib.fileset.intersection: Filesystem roots are not the same:
659[[:blank:]]*First argument: Filesystem root is "'"$work"'/foo/mock-root"
660[[:blank:]]*Second argument: Filesystem root is "'"$work"'/bar/mock-root"
661[[:blank:]]*Different filesystem roots are not supported.'
662rm -rf -- *
663
664# Coercion errors show the correct context
665expectFailure 'toSource { root = ./.; fileset = intersection ./a ./.; }' 'lib.fileset.intersection: First argument \('"$work"'/a\) is a path that does not exist.'
666expectFailure 'toSource { root = ./.; fileset = intersection ./. ./b; }' 'lib.fileset.intersection: Second argument \('"$work"'/b\) is a path that does not exist.'
667
668# The tree of later arguments should not be evaluated if a former argument already excludes all files
669tree=(
670 [a]=0
671)
672checkFileset 'intersection _emptyWithoutBase (_create ./. (abort "This should not be used!"))'
673# We don't have any combinators that can explicitly remove files yet, so we need to rely on internal functions to test this for now
674checkFileset 'intersection (_create ./. { a = null; }) (_create ./. { a = abort "This should not be used!"; })'
675
676# If either side is empty, the result is empty
677tree=(
678 [a]=0
679)
680checkFileset 'intersection _emptyWithoutBase _emptyWithoutBase'
681checkFileset 'intersection _emptyWithoutBase (_create ./. null)'
682checkFileset 'intersection (_create ./. null) _emptyWithoutBase'
683checkFileset 'intersection (_create ./. null) (_create ./. null)'
684
685# If the intersection base paths are not overlapping, the result is empty and has no base path
686mkdir a b c
687touch {a,b,c}/x
688expectEqual 'toSource { root = ./c; fileset = intersection ./a ./b; }' 'toSource { root = ./c; fileset = _emptyWithoutBase; }'
689rm -rf -- *
690
691# If the intersection exists, the resulting base path is the longest of them
692mkdir a
693touch x a/b
694expectEqual 'toSource { root = ./a; fileset = intersection ./a ./.; }' 'toSource { root = ./a; fileset = ./a; }'
695expectEqual 'toSource { root = ./a; fileset = intersection ./. ./a; }' 'toSource { root = ./a; fileset = ./a; }'
696rm -rf -- *
697
698# Also finds the intersection with null'd filesetTree's
699tree=(
700 [a]=0
701 [b]=1
702 [c]=0
703)
704checkFileset 'intersection (_create ./. { a = "regular"; b = "regular"; c = null; }) (_create ./. { a = null; b = "regular"; c = "regular"; })'
705
706# Actually computes the intersection between files
707tree=(
708 [a]=0
709 [b]=0
710 [c]=1
711 [d]=1
712 [e]=0
713 [f]=0
714)
715checkFileset 'intersection (unions [ ./a ./b ./c ./d ]) (unions [ ./c ./d ./e ./f ])'
716
717tree=(
718 [a/x]=0
719 [a/y]=0
720 [b/x]=1
721 [b/y]=1
722 [c/x]=0
723 [c/y]=0
724)
725checkFileset 'intersection ./b ./.'
726checkFileset 'intersection ./b (unions [ ./a/x ./a/y ./b/x ./b/y ./c/x ./c/y ])'
727
728# Complicated case
729tree=(
730 [a/x]=0
731 [a/b/i]=1
732 [c/d/x]=0
733 [c/d/f]=1
734 [c/x]=0
735 [c/e/i]=1
736 [c/e/j]=1
737)
738checkFileset 'intersection (unions [ ./a/b ./c/d ./c/e ]) (unions [ ./a ./c/d/f ./c/e ])'
739
740## Difference
741
742# Subtracting something from itself results in nothing
743tree=(
744 [a]=0
745)
746checkFileset 'difference ./. ./.'
747
748# The tree of the second argument should not be evaluated if not needed
749checkFileset 'difference _emptyWithoutBase (_create ./. (abort "This should not be used!"))'
750checkFileset 'difference (_create ./. null) (_create ./. (abort "This should not be used!"))'
751
752# Subtracting nothing gives the same thing back
753tree=(
754 [a]=1
755)
756checkFileset 'difference ./. _emptyWithoutBase'
757checkFileset 'difference ./. (_create ./. null)'
758
759# Subtracting doesn't influence the base path
760mkdir a b
761touch {a,b}/x
762expectEqual 'toSource { root = ./a; fileset = difference ./a ./b; }' 'toSource { root = ./a; fileset = ./a; }'
763rm -rf -- *
764
765# Also not the other way around
766mkdir a
767expectFailure 'toSource { root = ./a; fileset = difference ./. ./a; }' 'lib.fileset.toSource: `fileset` could contain files in '"$work"', which is not under the `root` \('"$work"'/a\). Potential solutions:
768[[:blank:]]*- Set `root` to '"$work"' or any directory higher up. This changes the layout of the resulting store path.
769[[:blank:]]*- Set `fileset` to a file set that cannot contain files outside the `root` \('"$work"'/a\). This could change the files included in the result.'
770rm -rf -- *
771
772# Difference actually works
773# We test all combinations of ./., ./a, ./a/x and ./b
774tree=(
775 [a/x]=0
776 [a/y]=0
777 [b]=0
778 [c]=0
779)
780checkFileset 'difference ./. ./.'
781checkFileset 'difference ./a ./.'
782checkFileset 'difference ./a/x ./.'
783checkFileset 'difference ./b ./.'
784checkFileset 'difference ./a ./a'
785checkFileset 'difference ./a/x ./a'
786checkFileset 'difference ./a/x ./a/x'
787checkFileset 'difference ./b ./b'
788tree=(
789 [a/x]=0
790 [a/y]=0
791 [b]=1
792 [c]=1
793)
794checkFileset 'difference ./. ./a'
795tree=(
796 [a/x]=1
797 [a/y]=1
798 [b]=0
799 [c]=0
800)
801checkFileset 'difference ./a ./b'
802tree=(
803 [a/x]=1
804 [a/y]=0
805 [b]=0
806 [c]=0
807)
808checkFileset 'difference ./a/x ./b'
809tree=(
810 [a/x]=0
811 [a/y]=1
812 [b]=0
813 [c]=0
814)
815checkFileset 'difference ./a ./a/x'
816tree=(
817 [a/x]=0
818 [a/y]=0
819 [b]=1
820 [c]=0
821)
822checkFileset 'difference ./b ./a'
823checkFileset 'difference ./b ./a/x'
824tree=(
825 [a/x]=0
826 [a/y]=1
827 [b]=1
828 [c]=1
829)
830checkFileset 'difference ./. ./a/x'
831tree=(
832 [a/x]=1
833 [a/y]=1
834 [b]=0
835 [c]=1
836)
837checkFileset 'difference ./. ./b'
838
839## File filter
840
841# The first argument needs to be a function
842expectFailure 'fileFilter null (abort "this is not needed")' 'lib.fileset.fileFilter: First argument is of type null, but it should be a function instead.'
843
844# The second argument needs to be an existing path
845expectFailure 'fileFilter (file: abort "this is not needed") _emptyWithoutBase' 'lib.fileset.fileFilter: Second argument is a file set, but it should be a path instead.
846[[:blank:]]*If you need to filter files in a file set, use `intersection fileset \(fileFilter pred \./\.\)` instead.'
847expectFailure 'fileFilter (file: abort "this is not needed") null' 'lib.fileset.fileFilter: Second argument is of type null, but it should be a path instead.'
848expectFailure 'fileFilter (file: abort "this is not needed") ./a' 'lib.fileset.fileFilter: Second argument \('"$work"'/a\) is a path that does not exist.'
849
850# The predicate is not called when there's no files
851tree=()
852checkFileset 'fileFilter (file: abort "this is not needed") ./.'
853
854# The predicate must be able to handle extra attributes
855touch a
856expectFailure 'toSource { root = ./.; fileset = fileFilter ({ name, type, hasExt }: true) ./.; }' 'called with unexpected argument '\''"lib.fileset.fileFilter: The predicate function passed as the first argument must be able to handle extra attributes for future compatibility. If you'\''re using `\{ name, file, hasExt \}:`, use `\{ name, file, hasExt, ... \}:` instead."'\'
857rm -rf -- *
858
859# .name is the name, and it works correctly, even recursively
860tree=(
861 [a]=1
862 [b]=0
863 [c/a]=1
864 [c/b]=0
865 [d/c/a]=1
866 [d/c/b]=0
867)
868checkFileset 'fileFilter (file: file.name == "a") ./.'
869tree=(
870 [a]=0
871 [b]=1
872 [c/a]=0
873 [c/b]=1
874 [d/c/a]=0
875 [d/c/b]=1
876)
877checkFileset 'fileFilter (file: file.name != "a") ./.'
878
879# `.type` is the file type
880mkdir d
881touch d/a
882ln -s d/b d/b
883mkfifo d/c
884expectEqual \
885 'toSource { root = ./.; fileset = fileFilter (file: file.type == "regular") ./.; }' \
886 'toSource { root = ./.; fileset = ./d/a; }'
887expectEqual \
888 'toSource { root = ./.; fileset = fileFilter (file: file.type == "symlink") ./.; }' \
889 'toSource { root = ./.; fileset = ./d/b; }'
890expectEqual \
891 'toSource { root = ./.; fileset = fileFilter (file: file.type == "unknown") ./.; }' \
892 'toSource { root = ./.; fileset = ./d/c; }'
893expectEqual \
894 'toSource { root = ./.; fileset = fileFilter (file: file.type != "regular") ./.; }' \
895 'toSource { root = ./.; fileset = union ./d/b ./d/c; }'
896expectEqual \
897 'toSource { root = ./.; fileset = fileFilter (file: file.type != "symlink") ./.; }' \
898 'toSource { root = ./.; fileset = union ./d/a ./d/c; }'
899expectEqual \
900 'toSource { root = ./.; fileset = fileFilter (file: file.type != "unknown") ./.; }' \
901 'toSource { root = ./.; fileset = union ./d/a ./d/b; }'
902rm -rf -- *
903
904# Check that .hasExt checks for the file extension
905# The empty extension is the same as a file ending with a .
906tree=(
907 [a]=0
908 [a.]=1
909 [a.b]=0
910 [a.b.]=1
911 [a.b.c]=0
912)
913checkFileset 'fileFilter (file: file.hasExt "") ./.'
914
915# It can check for the last extension
916tree=(
917 [a]=0
918 [.a]=1
919 [.a.]=0
920 [.b.a]=1
921 [.b.a.]=0
922)
923checkFileset 'fileFilter (file: file.hasExt "a") ./.'
924
925# It can check for any extension
926tree=(
927 [a.b.c.d]=1
928)
929checkFileset 'fileFilter (file:
930 all file.hasExt [
931 "b.c.d"
932 "c.d"
933 "d"
934 ]
935) ./.'
936
937# It's lazy
938tree=(
939 [b]=1
940 [c/a]=1
941)
942# Note that union evaluates the first argument first if necessary, that's why we can use ./c/a here
943checkFileset 'union ./c/a (fileFilter (file: assert file.name != "a"; true) ./.)'
944# but here we need to use ./c
945checkFileset 'union (fileFilter (file: assert file.name != "a"; true) ./.) ./c'
946
947# Make sure single files are filtered correctly
948tree=(
949 [a]=1
950 [b]=0
951)
952checkFileset 'fileFilter (file: assert file.name == "a"; true) ./a'
953tree=(
954 [a]=0
955 [b]=0
956)
957checkFileset 'fileFilter (file: assert file.name == "a"; false) ./a'
958
959## Tracing
960
961# The second trace argument is returned
962expectEqual 'trace ./. "some value"' 'builtins.trace "(empty)" "some value"'
963
964# The fileset traceVal argument is returned
965expectEqual 'traceVal ./.' 'builtins.trace "(empty)" (_create ./. "directory")'
966
967# The tracing happens before the final argument is needed
968expectEqual 'trace ./.' 'builtins.trace "(empty)" (x: x)'
969
970# Tracing an empty directory shows it as such
971expectTrace './.' '(empty)'
972
973# This also works if there are directories, but all recursively without files
974mkdir -p a/b/c
975expectTrace './.' '(empty)'
976rm -rf -- *
977
978# The empty file set without a base also prints as empty
979expectTrace '_emptyWithoutBase' '(empty)'
980expectTrace 'unions [ ]' '(empty)'
981mkdir foo bar
982touch {foo,bar}/x
983expectTrace 'intersection ./foo ./bar' '(empty)'
984rm -rf -- *
985
986# If a directory is fully included, print it as such
987touch a
988expectTrace './.' "$work"' (all files in directory)'
989rm -rf -- *
990
991# If a directory is not fully included, recurse
992mkdir a b
993touch a/{x,y} b/{x,y}
994expectTrace 'union ./a/x ./b' "$work"'
995- a
996 - x (regular)
997- b (all files in directory)'
998rm -rf -- *
999
1000# If an included path is a file, print its type
1001touch a x
1002ln -s a b
1003mkfifo c
1004expectTrace 'unions [ ./a ./b ./c ]' "$work"'
1005- a (regular)
1006- b (symlink)
1007- c (unknown)'
1008rm -rf -- *
1009
1010# Do not print directories without any files recursively
1011mkdir -p a/b/c
1012touch b x
1013expectTrace 'unions [ ./a ./b ]' "$work"'
1014- b (regular)'
1015rm -rf -- *
1016
1017# If all children are either fully included or empty directories,
1018# the parent should be printed as fully included
1019touch a
1020mkdir b
1021expectTrace 'union ./a ./b' "$work"' (all files in directory)'
1022rm -rf -- *
1023
1024mkdir -p x/b x/c
1025touch x/a
1026touch a
1027# If all children are either fully excluded or empty directories,
1028# the parent should be shown (or rather not shown) as fully excluded
1029expectTrace 'unions [ ./a ./x/b ./x/c ]' "$work"'
1030- a (regular)'
1031rm -rf -- *
1032
1033# Completely filtered out directories also print as empty
1034touch a
1035expectTrace '_create ./. {}' '(empty)'
1036rm -rf -- *
1037
1038# A general test to make sure the resulting format makes sense
1039# Such as indentation and ordering
1040mkdir -p bar/{qux,someDir}
1041touch bar/{baz,qux,someDir/a} foo
1042touch bar/qux/x
1043ln -s x bar/qux/a
1044mkfifo bar/qux/b
1045expectTrace 'unions [
1046 ./bar/baz
1047 ./bar/qux/a
1048 ./bar/qux/b
1049 ./bar/someDir/a
1050 ./foo
1051]' "$work"'
1052- bar
1053 - baz (regular)
1054 - qux
1055 - a (symlink)
1056 - b (unknown)
1057 - someDir (all files in directory)
1058- foo (regular)'
1059rm -rf -- *
1060
1061# For recursively included directories,
1062# `(all files in directory)` should only be used if there's at least one file (otherwise it would be `(empty)`)
1063# and this should be determined without doing a full search
1064#
1065# a is intentionally ordered first here in order to allow triggering the short-circuit behavior
1066# We then check that b is not read
1067# In a more realistic scenario, some directories might need to be recursed into,
1068# but a file would be quickly found to trigger the short-circuit.
1069touch a
1070mkdir b
1071# We don't have lambda's in bash unfortunately,
1072# so we just define a function instead and then pass its name
1073# shellcheck disable=SC2317
1074run() {
1075 # This shouldn't read b/
1076 expectTrace './.' "$work"' (all files in directory)'
1077 # Remove all files immediately after, triggering delete_self events for all of them
1078 rmdir b
1079}
1080# Runs the function while checking that b isn't read
1081withFileMonitor run b
1082rm -rf -- *
1083
1084# Partially included directories trace entries as they are evaluated
1085touch a b c
1086expectTrace '_create ./. { a = null; b = "regular"; c = throw "b"; }' "$work"'
1087- b (regular)'
1088
1089# Except entries that need to be evaluated to even figure out if it's only partially included:
1090# Here the directory could be fully excluded or included just from seeing a and b,
1091# so c needs to be evaluated before anything can be traced
1092expectTrace '_create ./. { a = null; b = null; c = throw "c"; }' ''
1093expectTrace '_create ./. { a = "regular"; b = "regular"; c = throw "c"; }' ''
1094rm -rf -- *
1095
1096# We can trace large directories (10000 here) without any problems
1097filesToCreate=({0..9}{0..9}{0..9}{0..9})
1098expectedTrace=$work$'\n'$(printf -- '- %s (regular)\n' "${filesToCreate[@]}")
1099# We need an excluded file so it doesn't print as `(all files in directory)`
1100touch 0 "${filesToCreate[@]}"
1101expectTrace 'unions (mapAttrsToList (n: _: ./. + "/${n}") (removeAttrs (builtins.readDir ./.) [ "0" ]))' "$expectedTrace"
1102rm -rf -- *
1103
1104## lib.fileset.fromSource
1105
1106# Check error messages
1107
1108# String-like values are not supported
1109expectFailure 'fromSource (lib.cleanSource "")' 'lib.fileset.fromSource: The source origin of the argument is a string-like value \(""\), but it should be a path instead.
1110[[:blank:]]*Sources created from paths in strings cannot be turned into file sets, use `lib.sources` or derivations instead.'
1111
1112# Wrong type
1113expectFailure 'fromSource null' 'lib.fileset.fromSource: The source origin of the argument is of type null, but it should be a path instead.'
1114expectFailure 'fromSource (lib.cleanSource null)' 'lib.fileset.fromSource: The source origin of the argument is of type null, but it should be a path instead.'
1115
1116# fromSource on non-existent paths gives an error
1117expectFailure 'fromSource ./a' 'lib.fileset.fromSource: The source origin \('"$work"'/a\) of the argument is a path that does not exist.'
1118
1119# fromSource on a path works and is the same as coercing that path
1120mkdir a
1121touch a/b c
1122expectEqual 'trace (fromSource ./.) null' 'trace ./. null'
1123rm -rf -- *
1124
1125# Check that converting to a file set doesn't read the included files
1126mkdir a
1127touch a/b
1128run() {
1129 expectEqual "trace (fromSource (lib.cleanSourceWith { src = ./a; })) null" "builtins.trace \"$work/a (all files in directory)\" null"
1130 rm a/b
1131}
1132withFileMonitor run a/b
1133rm -rf -- *
1134
1135# Check that converting to a file set doesn't read entries for directories that are filtered out
1136mkdir -p a/b
1137touch a/b/c
1138run() {
1139 expectEqual "trace (fromSource (lib.cleanSourceWith {
1140 src = ./a;
1141 filter = pathString: type: false;
1142 })) null" "builtins.trace \"(empty)\" null"
1143 rm a/b/c
1144 rmdir a/b
1145}
1146withFileMonitor run a/b
1147rm -rf -- *
1148
1149# The filter is not needed on empty directories
1150expectEqual 'trace (fromSource (lib.cleanSourceWith {
1151 src = ./.;
1152 filter = abort "filter should not be needed";
1153})) null' 'trace _emptyWithoutBase null'
1154
1155# Single files also work
1156touch a b
1157expectEqual 'trace (fromSource (cleanSourceWith { src = ./a; })) null' 'trace ./a null'
1158rm -rf -- *
1159
1160# For a tree assigning each subpath true/false,
1161# check whether a source filter with those results includes the same files
1162# as a file set created using fromSource. Usage:
1163#
1164# tree=(
1165# [a]=1 # ./a is a file and the filter should return true for it
1166# [b/]=0 # ./b is a directory and the filter should return false for it
1167# )
1168# checkSource
1169checkSource() {
1170 createTree
1171
1172 # Serialise the tree as JSON (there's only minimal savings with jq,
1173 # and we don't need to handle escapes)
1174 {
1175 echo "{"
1176 first=1
1177 for p in "${!tree[@]}"; do
1178 if [[ -z "$first" ]]; then
1179 echo ","
1180 else
1181 first=
1182 fi
1183 echo "\"$p\":"
1184 case "${tree[$p]}" in
1185 1)
1186 echo "true"
1187 ;;
1188 0)
1189 echo "false"
1190 ;;
1191 *)
1192 die "Unsupported tree value: ${tree[$p]}"
1193 esac
1194 done
1195 echo "}"
1196 } > "$tmp/tree.json"
1197
1198 # An expression to create a source value with a filter matching the tree
1199 sourceExpr='
1200 let
1201 tree = importJSON '"$tmp"'/tree.json;
1202 in
1203 cleanSourceWith {
1204 src = ./.;
1205 filter =
1206 pathString: type:
1207 let
1208 stripped = removePrefix (toString ./. + "/") pathString;
1209 key = stripped + optionalString (type == "directory") "/";
1210 in
1211 tree.${key} or
1212 (throw "tree key ${key} missing");
1213 }
1214 '
1215
1216 filesetExpr='
1217 toSource {
1218 root = ./.;
1219 fileset = fromSource ('"$sourceExpr"');
1220 }
1221 '
1222
1223 # Turn both into store paths
1224 sourceStorePath=$(expectStorePath "$sourceExpr")
1225 filesetStorePath=$(expectStorePath "$filesetExpr")
1226
1227 # Loop through each path in the tree
1228 while IFS= read -r -d $'\0' subpath; do
1229 if [[ ! -e "$sourceStorePath"/"$subpath" ]]; then
1230 # If it's not in the source store path, it's also not in the file set store path
1231 if [[ -e "$filesetStorePath"/"$subpath" ]]; then
1232 die "The store path $sourceStorePath created by $expr doesn't contain $subpath, but the corresponding store path $filesetStorePath created via fromSource does contain $subpath"
1233 fi
1234 elif [[ -z "$(find "$sourceStorePath"/"$subpath" -type f)" ]]; then
1235 # If it's an empty directory in the source store path, it shouldn't be in the file set store path
1236 if [[ -e "$filesetStorePath"/"$subpath" ]]; then
1237 die "The store path $sourceStorePath created by $expr contains the path $subpath without any files, but the corresponding store path $filesetStorePath created via fromSource didn't omit it"
1238 fi
1239 else
1240 # If it's non-empty directory or a file, it should be in the file set store path
1241 if [[ ! -e "$filesetStorePath"/"$subpath" ]]; then
1242 die "The store path $sourceStorePath created by $expr contains the non-empty path $subpath, but the corresponding store path $filesetStorePath created via fromSource doesn't include it"
1243 fi
1244 fi
1245 done < <(find . -mindepth 1 -print0)
1246
1247 rm -rf -- *
1248}
1249
1250# Check whether the filter is evaluated correctly
1251tree=(
1252 [a]=
1253 [b/]=
1254 [b/c]=
1255 [b/d]=
1256 [e/]=
1257 [e/e/]=
1258)
1259# We fill out the above tree values with all possible combinations of 0 and 1
1260# Then check whether a filter based on those return values gets turned into the corresponding file set
1261for i in $(seq 0 $((2 ** ${#tree[@]} - 1 ))); do
1262 for p in "${!tree[@]}"; do
1263 tree[$p]=$(( i % 2 ))
1264 (( i /= 2 )) || true
1265 done
1266 checkSource
1267done
1268
1269# The filter is called with the same arguments in the same order
1270mkdir a e
1271touch a/b a/c d e
1272expectEqual '
1273 trace (fromSource (cleanSourceWith {
1274 src = ./.;
1275 filter = pathString: type: builtins.trace "${pathString} ${toString type}" true;
1276 })) null
1277' '
1278 builtins.seq (cleanSourceWith {
1279 src = ./.;
1280 filter = pathString: type: builtins.trace "${pathString} ${toString type}" true;
1281 }).outPath
1282 builtins.trace "'"$work"' (all files in directory)"
1283 null
1284'
1285rm -rf -- *
1286
1287# Test that if a directory is not included, the filter isn't called on its contents
1288mkdir a b
1289touch a/c b/d
1290expectEqual 'trace (fromSource (cleanSourceWith {
1291 src = ./.;
1292 filter = pathString: type:
1293 if pathString == toString ./a then
1294 false
1295 else if pathString == toString ./b then
1296 true
1297 else if pathString == toString ./b/d then
1298 true
1299 else
1300 abort "This filter should not be called with path ${pathString}";
1301})) null' 'trace (_create ./. { b = "directory"; }) null'
1302rm -rf -- *
1303
1304# The filter is called lazily:
1305# If a later say intersection removes a part of the tree, the filter won't run on it
1306mkdir a d
1307touch a/{b,c} d/e
1308expectEqual 'trace (intersection ./a (fromSource (lib.cleanSourceWith {
1309 src = ./.;
1310 filter = pathString: type:
1311 if pathString == toString ./a || pathString == toString ./a/b then
1312 true
1313 else if pathString == toString ./a/c then
1314 false
1315 else
1316 abort "filter should not be called on ${pathString}";
1317}))) null' 'trace ./a/b null'
1318rm -rf -- *
1319
1320## lib.fileset.gitTracked/gitTrackedWith
1321
1322# The first/second argument has to be a path
1323expectFailure 'gitTracked null' 'lib.fileset.gitTracked: Expected the argument to be a path, but it'\''s a null instead.'
1324expectFailure 'gitTrackedWith {} null' 'lib.fileset.gitTrackedWith: Expected the second argument to be a path, but it'\''s a null instead.'
1325
1326# The path must be a directory
1327touch a
1328expectFailure 'gitTracked ./a' 'lib.fileset.gitTracked: Expected the argument \('"$work"'/a\) to be a directory, but it'\''s a file instead'
1329expectFailure 'gitTrackedWith {} ./a' 'lib.fileset.gitTrackedWith: Expected the second argument \('"$work"'/a\) to be a directory, but it'\''s a file instead'
1330rm -rf -- *
1331
1332# The path has to contain a .git directory
1333expectFailure 'gitTracked ./.' 'lib.fileset.gitTracked: Expected the argument \('"$work"'\) to point to a local working tree of a Git repository, but it'\''s not.'
1334expectFailure 'gitTrackedWith {} ./.' 'lib.fileset.gitTrackedWith: Expected the second argument \('"$work"'\) to point to a local working tree of a Git repository, but it'\''s not.'
1335
1336# recurseSubmodules has to be a boolean
1337expectFailure 'gitTrackedWith { recurseSubmodules = null; } ./.' 'lib.fileset.gitTrackedWith: Expected the attribute `recurseSubmodules` of the first argument to be a boolean, but it'\''s a null instead.'
1338
1339# Checks that `gitTrackedWith` contains the same files as `git ls-files`
1340# for the current working directory.
1341# If --recurse-submodules is passed, the flag is passed through to `git ls-files`
1342# and as `recurseSubmodules` to `gitTrackedWith`
1343checkGitTrackedWith() {
1344 if [[ "${1:-}" == "--recurse-submodules" ]]; then
1345 gitLsFlags="--recurse-submodules"
1346 gitTrackedArg="{ recurseSubmodules = true; }"
1347 else
1348 gitLsFlags=""
1349 gitTrackedArg="{ }"
1350 fi
1351
1352 # All files listed by `git ls-files`
1353 expectedFiles=()
1354 while IFS= read -r -d $'\0' file; do
1355 # If there are submodules but --recurse-submodules isn't passed,
1356 # `git ls-files` lists them as empty directories,
1357 # we need to filter that out since we only want to check/count files
1358 if [[ -f "$file" ]]; then
1359 expectedFiles+=("$file")
1360 fi
1361 done < <(git ls-files -z $gitLsFlags)
1362
1363 storePath=$(expectStorePath 'toSource { root = ./.; fileset = gitTrackedWith '"$gitTrackedArg"' ./.; }')
1364
1365 # Check that each expected file is also in the store path with the same content
1366 for expectedFile in "${expectedFiles[@]}"; do
1367 if [[ ! -e "$storePath"/"$expectedFile" ]]; then
1368 die "Expected file $expectedFile to exist in $storePath, but it doesn't.\nGit status:\n$(git status)\nStore path contents:\n$(find "$storePath")"
1369 fi
1370 if ! diff "$expectedFile" "$storePath"/"$expectedFile"; then
1371 die "Expected file $expectedFile to have the same contents as in $storePath, but it doesn't.\nGit status:\n$(git status)\nStore path contents:\n$(find "$storePath")"
1372 fi
1373 done
1374
1375 # This is a cheap way to verify the inverse: That all files in the store path are also expected
1376 # We just count the number of files in both and verify they're the same
1377 actualFileCount=$(find "$storePath" -type f -printf . | wc -c)
1378 if [[ "${#expectedFiles[@]}" != "$actualFileCount" ]]; then
1379 die "Expected ${#expectedFiles[@]} files in $storePath, but got $actualFileCount.\nGit status:\n$(git status)\nStore path contents:\n$(find "$storePath")"
1380 fi
1381}
1382
1383
1384# Runs checkGitTrackedWith with and without --recurse-submodules
1385# Allows testing both variants together
1386checkGitTracked() {
1387 checkGitTrackedWith
1388 checkGitTrackedWith --recurse-submodules
1389}
1390
1391createGitRepo() {
1392 git init -q "$1"
1393 # Only repo-local config
1394 git -C "$1" config user.name "Nixpkgs"
1395 git -C "$1" config user.email "nixpkgs@nixos.org"
1396 # Get at least a HEAD commit, needed for older Nix versions
1397 git -C "$1" commit -q --allow-empty -m "Empty commit"
1398}
1399
1400# Check that gitTracked[With] works as expected when evaluated out-of-tree
1401
1402## First we create a git repositories (and a subrepository) with `default.nix` files referring to their local paths
1403## Simulating how it would be used in the wild
1404createGitRepo .
1405echo '{ fs }: fs.toSource { root = ./.; fileset = fs.gitTracked ./.; }' > default.nix
1406git add .
1407
1408## We can evaluate it locally just fine, `fetchGit` is used underneath to filter git-tracked files
1409expectEqual '(import ./. { fs = lib.fileset; }).outPath' '(fetchGit ./.).outPath'
1410
1411## We can also evaluate when importing from fetched store paths
1412storePath=$(expectStorePath 'fetchGit ./.')
1413expectEqual '(import '"$storePath"' { fs = lib.fileset; }).outPath' \""$storePath"\"
1414
1415## But it fails if the path is imported with a fetcher that doesn't remove .git (like just using "${./.}")
1416expectFailure 'import "${./.}" { fs = lib.fileset; }' 'lib.fileset.gitTracked: The argument \(.*\) is a store path within a working tree of a Git repository.
1417[[:blank:]]*This indicates that a source directory was imported into the store using a method such as `import "\$\{./.\}"` or `path:.`.
1418[[:blank:]]*This function currently does not support such a use case, since it currently relies on `builtins.fetchGit`.
1419[[:blank:]]*You could make this work by using a fetcher such as `fetchGit` instead of copying the whole repository.
1420[[:blank:]]*If you can'\''t avoid copying the repo to the store, see https://github.com/NixOS/nix/issues/9292.'
1421
1422## Even with submodules
1423## Both the main repo with the submodule
1424echo '{ fs }: fs.toSource { root = ./.; fileset = fs.gitTrackedWith { recurseSubmodules = true; } ./.; }' > default.nix
1425createGitRepo sub
1426git submodule add ./sub sub >/dev/null
1427## But also the submodule itself
1428echo '{ fs }: fs.toSource { root = ./.; fileset = fs.gitTracked ./.; }' > sub/default.nix
1429git -C sub add .
1430
1431## We can evaluate it locally just fine, `fetchGit` is used underneath to filter git-tracked files
1432expectEqual '(import ./. { fs = lib.fileset; }).outPath' '(fetchGit { url = ./.; submodules = true; }).outPath'
1433expectEqual '(import ./sub { fs = lib.fileset; }).outPath' '(fetchGit ./sub).outPath'
1434
1435## We can also evaluate when importing from fetched store paths
1436storePathWithSub=$(expectStorePath 'fetchGit { url = ./.; submodules = true; }')
1437expectEqual '(import '"$storePathWithSub"' { fs = lib.fileset; }).outPath' \""$storePathWithSub"\"
1438storePathSub=$(expectStorePath 'fetchGit ./sub')
1439expectEqual '(import '"$storePathSub"' { fs = lib.fileset; }).outPath' \""$storePathSub"\"
1440
1441## But it fails if the path is imported with a fetcher that doesn't remove .git (like just using "${./.}")
1442expectFailure 'import "${./.}" { fs = lib.fileset; }' 'lib.fileset.gitTrackedWith: The second argument \(.*\) is a store path within a working tree of a Git repository.
1443[[:blank:]]*This indicates that a source directory was imported into the store using a method such as `import "\$\{./.\}"` or `path:.`.
1444[[:blank:]]*This function currently does not support such a use case, since it currently relies on `builtins.fetchGit`.
1445[[:blank:]]*You could make this work by using a fetcher such as `fetchGit` instead of copying the whole repository.
1446[[:blank:]]*If you can'\''t avoid copying the repo to the store, see https://github.com/NixOS/nix/issues/9292.'
1447expectFailure 'import "${./.}/sub" { fs = lib.fileset; }' 'lib.fileset.gitTracked: The argument \(.*/sub\) is a store path within a working tree of a Git repository.
1448[[:blank:]]*This indicates that a source directory was imported into the store using a method such as `import "\$\{./.\}"` or `path:.`.
1449[[:blank:]]*This function currently does not support such a use case, since it currently relies on `builtins.fetchGit`.
1450[[:blank:]]*You could make this work by using a fetcher such as `fetchGit` instead of copying the whole repository.
1451[[:blank:]]*If you can'\''t avoid copying the repo to the store, see https://github.com/NixOS/nix/issues/9292.'
1452rm -rf -- *
1453
1454createGitRepo full
1455# Extra commit such that there's a commit that won't be in the shallow clone
1456git -C full commit --allow-empty -q -m extra
1457git clone -q --depth 1 "file://${PWD}/full" shallow
1458cd shallow
1459checkGitTracked
1460cd ..
1461rm -rf -- *
1462
1463# Go through all stages of Git files
1464# See https://www.git-scm.com/book/en/v2/Git-Basics-Recording-Changes-to-the-Repository
1465
1466# Empty repository
1467createGitRepo .
1468checkGitTracked
1469
1470# Untracked file
1471echo a > a
1472checkGitTracked
1473
1474# Staged file
1475git add a
1476checkGitTracked
1477
1478# Committed file
1479git commit -q -m "Added a"
1480checkGitTracked
1481
1482# Edited file
1483echo b > a
1484checkGitTracked
1485
1486# Removed file
1487git rm -f -q a
1488checkGitTracked
1489
1490rm -rf -- *
1491
1492# gitignored file
1493createGitRepo .
1494echo a > .gitignore
1495touch a
1496git add -A
1497checkGitTracked
1498
1499# Add it regardless (needs -f)
1500git add -f a
1501checkGitTracked
1502rm -rf -- *
1503
1504# Directory
1505createGitRepo .
1506mkdir -p d1/d2/d3
1507touch d1/d2/d3/a
1508git add d1
1509checkGitTracked
1510rm -rf -- *
1511
1512# Submodules
1513createGitRepo .
1514createGitRepo sub
1515
1516# Untracked submodule
1517git -C sub commit -q --allow-empty -m "Empty commit"
1518checkGitTracked
1519
1520# Tracked submodule
1521git submodule add ./sub sub >/dev/null
1522checkGitTracked
1523
1524# Untracked file
1525echo a > sub/a
1526checkGitTracked
1527
1528# Staged file
1529git -C sub add a
1530checkGitTracked
1531
1532# Committed file
1533git -C sub commit -q -m "Add a"
1534checkGitTracked
1535
1536# Changed file
1537echo b > sub/b
1538checkGitTracked
1539
1540# Removed file
1541git -C sub rm -f -q a
1542checkGitTracked
1543
1544rm -rf -- *
1545
1546## lib.fileset.maybeMissing
1547
1548# Argument must be a path
1549expectFailure 'maybeMissing "someString"' 'lib.fileset.maybeMissing: Argument \("someString"\) is a string-like value, but it should be a path instead.'
1550expectFailure 'maybeMissing null' 'lib.fileset.maybeMissing: Argument is of type null, but it should be a path instead.'
1551
1552tree=(
1553)
1554checkFileset 'maybeMissing ./a'
1555checkFileset 'maybeMissing ./b'
1556checkFileset 'maybeMissing ./b/c'
1557
1558# Works on single files
1559tree=(
1560 [a]=1
1561 [b/c]=0
1562 [b/d]=0
1563)
1564checkFileset 'maybeMissing ./a'
1565tree=(
1566 [a]=0
1567 [b/c]=1
1568 [b/d]=0
1569)
1570checkFileset 'maybeMissing ./b/c'
1571
1572# Works on directories
1573tree=(
1574 [a]=0
1575 [b/c]=1
1576 [b/d]=1
1577)
1578checkFileset 'maybeMissing ./b'
1579
1580# TODO: Once we have combinators and a property testing library, derive property tests from https://en.wikipedia.org/wiki/Algebra_of_sets
1581
1582echo >&2 tests ok