1#!/usr/bin/env bash
2# shellcheck disable=SC2016
3# shellcheck disable=SC2317
4# shellcheck disable=SC2192
5
6# Tests lib.fileset
7# Run:
8# [nixpkgs]$ lib/fileset/tests.sh
9# or:
10# [nixpkgs]$ nix-build lib/tests/release.nix
11
12set -euo pipefail
13shopt -s inherit_errexit dotglob
14
15die() {
16 # The second to last entry contains the line number of the top-level caller
17 lineIndex=$(( ${#BASH_LINENO[@]} - 2 ))
18 echo >&2 -e "test case at ${BASH_SOURCE[0]}:${BASH_LINENO[$lineIndex]} failed:" "$@"
19 exit 1
20}
21
22if test -n "${TEST_LIB:-}"; then
23 NIX_PATH=nixpkgs="$(dirname "$TEST_LIB")"
24else
25 NIX_PATH=nixpkgs="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.."; pwd)"
26fi
27export NIX_PATH
28
29tmp="$(mktemp -d)"
30clean_up() {
31 rm -rf "$tmp"
32}
33trap clean_up EXIT SIGINT SIGTERM
34work="$tmp/work"
35mkdir "$work"
36cd "$work"
37
38# Crudely unquotes a JSON string by just taking everything between the first and the second quote.
39# We're only using this for resulting /nix/store paths, which can't contain " anyways,
40# nor can they contain any other characters that would need to be escaped specially in JSON
41# This way we don't need to add a dependency on e.g. jq
42crudeUnquoteJSON() {
43 cut -d \" -f2
44}
45
46prefixExpression() {
47 echo 'let
48 lib =
49 (import <nixpkgs/lib>)
50 '
51 if [[ "${1:-}" == "--simulate-pure-eval" ]]; then
52 echo '
53 .extend (final: prev: {
54 trivial = prev.trivial // {
55 inPureEvalMode = true;
56 };
57 })'
58 fi
59 echo '
60 ;
61 internal = import <nixpkgs/lib/fileset/internal.nix> {
62 inherit lib;
63 };
64 in
65 with lib;
66 with internal;
67 with lib.fileset;'
68}
69
70# Check that two nix expression successfully evaluate to the same value.
71# The expressions have `lib.fileset` in scope.
72# Usage: expectEqual NIX NIX
73expectEqual() {
74 local actualExpr=$1
75 local expectedExpr=$2
76 if actualResult=$(nix-instantiate --eval --strict --show-trace 2>"$tmp"/actualStderr \
77 --expr "$(prefixExpression) ($actualExpr)"); then
78 actualExitCode=$?
79 else
80 actualExitCode=$?
81 fi
82 actualStderr=$(< "$tmp"/actualStderr)
83
84 if expectedResult=$(nix-instantiate --eval --strict --show-trace 2>"$tmp"/expectedStderr \
85 --expr "$(prefixExpression) ($expectedExpr)"); then
86 expectedExitCode=$?
87 else
88 expectedExitCode=$?
89 fi
90 expectedStderr=$(< "$tmp"/expectedStderr)
91
92 if [[ "$actualExitCode" != "$expectedExitCode" ]]; then
93 echo "$actualStderr" >&2
94 echo "$actualResult" >&2
95 die "$actualExpr should have exited with $expectedExitCode, but it exited with $actualExitCode"
96 fi
97
98 if [[ "$actualResult" != "$expectedResult" ]]; then
99 die "$actualExpr should have evaluated to $expectedExpr:\n$expectedResult\n\nbut it evaluated to\n$actualResult"
100 fi
101
102 if [[ "$actualStderr" != "$expectedStderr" ]]; then
103 die "$actualExpr should have had this on stderr:\n$expectedStderr\n\nbut it was\n$actualStderr"
104 fi
105}
106
107# Check that a nix expression evaluates successfully to a store path and returns it (without quotes).
108# The expression has `lib.fileset` in scope.
109# Usage: expectStorePath NIX
110expectStorePath() {
111 local expr=$1
112 if ! result=$(nix-instantiate --eval --strict --json --read-write-mode --show-trace 2>"$tmp"/stderr \
113 --expr "$(prefixExpression) ($expr)"); then
114 cat "$tmp/stderr" >&2
115 die "$expr failed to evaluate, but it was expected to succeed"
116 fi
117 # This is safe because we assume to get back a store path in a string
118 crudeUnquoteJSON <<< "$result"
119}
120
121# Check that a nix expression fails to evaluate (strictly, read-write-mode).
122# And check the received stderr against a regex
123# The expression has `lib.fileset` in scope.
124# Usage: expectFailure NIX REGEX
125expectFailure() {
126 if [[ "$1" == "--simulate-pure-eval" ]]; then
127 maybePure="--simulate-pure-eval"
128 shift
129 else
130 maybePure=""
131 fi
132 local expr=$1
133 local expectedErrorRegex=$2
134 if result=$(nix-instantiate --eval --strict --read-write-mode --show-trace 2>"$tmp/stderr" \
135 --expr "$(prefixExpression $maybePure) $expr"); then
136 die "$expr evaluated successfully to $result, but it was expected to fail"
137 fi
138 stderr=$(<"$tmp/stderr")
139 if [[ ! "$stderr" =~ $expectedErrorRegex ]]; then
140 die "$expr should have errored with this regex pattern:\n\n$expectedErrorRegex\n\nbut this was the actual error:\n\n$stderr"
141 fi
142}
143
144# Check that the traces of a Nix expression are as expected when evaluated.
145# The expression has `lib.fileset` in scope.
146# Usage: expectTrace NIX STR
147expectTrace() {
148 local expr=$1
149 local expectedTrace=$2
150
151 nix-instantiate --eval --show-trace >/dev/null 2>"$tmp"/stderrTrace \
152 --expr "$(prefixExpression) trace ($expr)" || true
153
154 actualTrace=$(sed -n 's/^trace: //p' "$tmp/stderrTrace")
155
156 nix-instantiate --eval --show-trace >/dev/null 2>"$tmp"/stderrTraceVal \
157 --expr "$(prefixExpression) traceVal ($expr)" || true
158
159 actualTraceVal=$(sed -n 's/^trace: //p' "$tmp/stderrTraceVal")
160
161 # Test that traceVal returns the same trace as trace
162 if [[ "$actualTrace" != "$actualTraceVal" ]]; then
163 cat "$tmp"/stderrTrace >&2
164 die "$expr traced this for lib.fileset.trace:\n\n$actualTrace\n\nand something different for lib.fileset.traceVal:\n\n$actualTraceVal"
165 fi
166
167 if [[ "$actualTrace" != "$expectedTrace" ]]; then
168 cat "$tmp"/stderrTrace >&2
169 die "$expr should have traced this:\n\n$expectedTrace\n\nbut this was actually traced:\n\n$actualTrace"
170 fi
171}
172
173# We conditionally use inotifywait in withFileMonitor.
174# Check early whether it's available
175# TODO: Darwin support, though not crucial since we have Linux CI
176if type inotifywait 2>/dev/null >/dev/null; then
177 canMonitor=1
178else
179 echo "Warning: Cannot check for paths not getting read since the inotifywait command (from the inotify-tools package) is not available" >&2
180 canMonitor=
181fi
182
183# Run a function while monitoring that it doesn't read certain paths
184# Usage: withFileMonitor FUNNAME PATH...
185# - FUNNAME should be a bash function that:
186# - Performs some operation that should not read some paths
187# - Delete the paths it shouldn't read without triggering any open events
188# - PATH... are the paths that should not get read
189#
190# This function outputs the same as FUNNAME
191withFileMonitor() {
192 local funName=$1
193 shift
194
195 # If we can't monitor files or have none to monitor, just run the function directly
196 if [[ -z "$canMonitor" ]] || (( "$#" == 0 )); then
197 "$funName"
198 else
199
200 # Use a subshell to start the coprocess in and use a trap to kill it when exiting the subshell
201 (
202 # Assigned by coproc, makes shellcheck happy
203 local watcher watcher_PID
204
205 # Start inotifywait in the background to monitor all excluded paths
206 coproc watcher {
207 # inotifywait outputs a string on stderr when ready
208 # Redirect it to stdout so we can access it from the coproc's stdout fd
209 # exec so that the coprocess is inotify itself, making the kill below work correctly
210 # See below why we listen to both open and delete_self events
211 exec inotifywait --format='%e %w' --event open,delete_self --monitor "$@" 2>&1
212 }
213
214 # This will trigger when this subshell exits, no matter if successful or not
215 # After exiting the subshell, the parent shell will continue executing
216 trap 'kill "${watcher_PID}"' exit
217
218 # Synchronously wait until inotifywait is ready
219 while read -r -u "${watcher[0]}" line && [[ "$line" != "Watches established." ]]; do
220 :
221 done
222
223 # Call the function that should not read the given paths and delete them afterwards
224 "$funName"
225
226 # Get the first event
227 read -r -u "${watcher[0]}" event file
228
229 # With funName potentially reading files first before deleting them,
230 # there's only these two possible event timelines:
231 # - open*, ..., open*, delete_self, ..., delete_self: If some excluded paths were read
232 # - delete_self, ..., delete_self: If no excluded paths were read
233 # So by looking at the first event we can figure out which one it is!
234 # This also means we don't have to wait to collect all events.
235 case "$event" in
236 OPEN*)
237 die "$funName opened excluded file $file when it shouldn't have"
238 ;;
239 DELETE_SELF)
240 # Expected events
241 ;;
242 *)
243 die "During $funName, Unexpected event type '$event' on file $file that should be excluded"
244 ;;
245 esac
246 )
247 fi
248}
249
250
251# Create the tree structure declared in the tree variable, usage:
252#
253# tree=(
254# [a/b] = # Declare that file a/b should exist
255# [c/a] = # Declare that file c/a should exist
256# [c/d/]= # Declare that directory c/d/ should exist
257# )
258# createTree
259declare -A tree
260createTree() {
261 # Track which paths need to be created
262 local -a dirsToCreate=()
263 local -a filesToCreate=()
264 for p in "${!tree[@]}"; do
265 # If keys end with a `/` we treat them as directories, otherwise files
266 if [[ "$p" =~ /$ ]]; then
267 dirsToCreate+=("$p")
268 else
269 filesToCreate+=("$p")
270 fi
271 done
272
273 # Create all the necessary paths.
274 # This is done with only a fixed number of processes,
275 # in order to not be too slow
276 # Though this does mean we're a bit limited with how many files can be created
277 if (( ${#dirsToCreate[@]} != 0 )); then
278 mkdir -p "${dirsToCreate[@]}"
279 fi
280 if (( ${#filesToCreate[@]} != 0 )); then
281 readarray -d '' -t parentsToCreate < <(dirname -z "${filesToCreate[@]}")
282 mkdir -p "${parentsToCreate[@]}"
283 touch "${filesToCreate[@]}"
284 fi
285}
286
287# Check whether a file set includes/excludes declared paths as expected, usage:
288#
289# tree=(
290# [a/b] =1 # Declare that file a/b should exist and expect it to be included in the store path
291# [c/a] = # Declare that file c/a should exist and expect it to be excluded in the store path
292# [c/d/]= # Declare that directory c/d/ should exist and expect it to be excluded in the store path
293# )
294# checkFileset './a' # Pass the fileset as the argument
295checkFileset() {
296 # New subshell so that we can have a separate trap handler, see `trap` below
297 local fileset=$1
298
299 # Create the tree
300 createTree
301
302 # Process the tree into separate arrays for included paths, excluded paths and excluded files.
303 local -a included=()
304 local -a excluded=()
305 local -a excludedFiles=()
306 for p in "${!tree[@]}"; do
307 case "${tree[$p]}" in
308 1)
309 included+=("$p")
310 ;;
311 0)
312 excluded+=("$p")
313 # If keys end with a `/` we treat them as directories, otherwise files
314 if [[ ! "$p" =~ /$ ]]; then
315 excludedFiles+=("$p")
316 fi
317 ;;
318 *)
319 die "Unsupported tree value: ${tree[$p]}"
320 esac
321 done
322
323 expression="toSource { root = ./.; fileset = $fileset; }"
324
325 # We don't have lambda's in bash unfortunately,
326 # so we just define a function instead and then pass its name
327 # shellcheck disable=SC2317
328 run() {
329 # Call toSource with the fileset, triggering open events for all files that are added to the store
330 expectStorePath "$expression"
331 if (( ${#excludedFiles[@]} != 0 )); then
332 rm "${excludedFiles[@]}"
333 fi
334 }
335
336 # Runs the function while checking that the given excluded files aren't read
337 storePath=$(withFileMonitor run "${excludedFiles[@]}")
338
339 # For each path that should be included, make sure it does occur in the resulting store path
340 for p in "${included[@]}"; do
341 if [[ ! -e "$storePath/$p" ]]; then
342 die "$expression doesn't include path $p when it should have"
343 fi
344 done
345
346 # For each path that should be excluded, make sure it doesn't occur in the resulting store path
347 for p in "${excluded[@]}"; do
348 if [[ -e "$storePath/$p" ]]; then
349 die "$expression included path $p when it shouldn't have"
350 fi
351 done
352
353 rm -rf -- *
354}
355
356
357#### Error messages #####
358
359# Absolute paths in strings cannot be passed as `root`
360expectFailure 'toSource { root = "/nix/store/foobar"; fileset = ./.; }' 'lib.fileset.toSource: `root` \(/nix/store/foobar\) is a string-like value, but it should be a path instead.
361\s*Paths in strings are not supported by `lib.fileset`, use `lib.sources` or derivations instead.'
362
363expectFailure 'toSource { root = cleanSourceWith { src = ./.; }; fileset = ./.; }' 'lib.fileset.toSource: `root` is a `lib.sources`-based value, but it should be a path instead.
364\s*To use a `lib.sources`-based value, convert it to a file set using `lib.fileset.fromSource` and pass it as `fileset`.
365\s*Note that this only works for sources created from paths.'
366
367# Only paths are accepted as `root`
368expectFailure 'toSource { root = 10; fileset = ./.; }' 'lib.fileset.toSource: `root` is of type int, but it should be a path instead.'
369
370# Different filesystem roots in root and fileset are not supported
371mkdir -p {foo,bar}/mock-root
372expectFailure 'with ((import <nixpkgs/lib>).extend (import <nixpkgs/lib/fileset/mock-splitRoot.nix>)).fileset;
373 toSource { root = ./foo/mock-root; fileset = ./bar/mock-root; }
374' 'lib.fileset.toSource: Filesystem roots are not the same for `fileset` and `root` \('"$work"'/foo/mock-root\):
375\s*`root`: Filesystem root is "'"$work"'/foo/mock-root"
376\s*`fileset`: Filesystem root is "'"$work"'/bar/mock-root"
377\s*Different filesystem roots are not supported.'
378rm -rf -- *
379
380# `root` needs to exist
381expectFailure 'toSource { root = ./a; fileset = ./.; }' 'lib.fileset.toSource: `root` \('"$work"'/a\) is a path that does not exist.'
382
383# `root` needs to be a file
384touch a
385expectFailure 'toSource { root = ./a; fileset = ./a; }' 'lib.fileset.toSource: `root` \('"$work"'/a\) is a file, but it should be a directory instead. Potential solutions:
386\s*- If you want to import the file into the store _without_ a containing directory, use string interpolation or `builtins.path` instead of this function.
387\s*- If you want to import the file into the store _with_ a containing directory, set `root` to the containing directory, such as '"$work"', and set `fileset` to the file path.'
388rm -rf -- *
389
390# The fileset argument should be evaluated, even if the directory is empty
391expectFailure 'toSource { root = ./.; fileset = abort "This should be evaluated"; }' 'evaluation aborted with the following error message: '\''This should be evaluated'\'
392
393# Only paths under `root` should be able to influence the result
394mkdir a
395expectFailure 'toSource { root = ./a; fileset = ./.; }' 'lib.fileset.toSource: `fileset` could contain files in '"$work"', which is not under the `root` \('"$work"'/a\). Potential solutions:
396\s*- Set `root` to '"$work"' or any directory higher up. This changes the layout of the resulting store path.
397\s*- Set `fileset` to a file set that cannot contain files outside the `root` \('"$work"'/a\). This could change the files included in the result.'
398rm -rf -- *
399
400# non-regular and non-symlink files cannot be added to the Nix store
401mkfifo a
402expectFailure 'toSource { root = ./.; fileset = ./a; }' 'lib.fileset.toSource: `fileset` contains a file that cannot be added to the store: '"$work"'/a
403\s*This file is neither a regular file nor a symlink, the only file types supported by the Nix store.
404\s*Therefore the file set cannot be added to the Nix store as is. Make sure to not include that file to avoid this error.'
405rm -rf -- *
406
407# Path coercion only works for paths
408expectFailure 'toSource { root = ./.; fileset = 10; }' 'lib.fileset.toSource: `fileset` is of type int, but it should be a file set or a path instead.'
409expectFailure 'toSource { root = ./.; fileset = "/some/path"; }' 'lib.fileset.toSource: `fileset` \("/some/path"\) is a string-like value, but it should be a file set or a path instead.
410\s*Paths represented as strings are not supported by `lib.fileset`, use `lib.sources` or derivations instead.'
411expectFailure 'toSource { root = ./.; fileset = cleanSourceWith { src = ./.; }; }' 'lib.fileset.toSource: `fileset` is a `lib.sources`-based value, but it should be a file set or a path instead.
412\s*To convert a `lib.sources`-based value to a file set you can use `lib.fileset.fromSource`.
413\s*Note that this only works for sources created from paths.'
414
415# Path coercion errors for non-existent paths
416expectFailure 'toSource { root = ./.; fileset = ./a; }' 'lib.fileset.toSource: `fileset` \('"$work"'/a\) is a path that does not exist.'
417
418# File sets cannot be evaluated directly
419expectFailure 'union ./. ./.' 'lib.fileset: Directly evaluating a file set is not supported.
420\s*To turn it into a usable source, use `lib.fileset.toSource`.
421\s*To pretty-print the contents, use `lib.fileset.trace` or `lib.fileset.traceVal`.'
422expectFailure '_emptyWithoutBase' 'lib.fileset: Directly evaluating a file set is not supported.
423\s*To turn it into a usable source, use `lib.fileset.toSource`.
424\s*To pretty-print the contents, use `lib.fileset.trace` or `lib.fileset.traceVal`.'
425
426# Past versions of the internal representation are supported
427expectEqual '_coerce "<tests>: value" { _type = "fileset"; _internalVersion = 0; _internalBase = ./.; }' \
428 '{ _internalBase = ./.; _internalBaseComponents = path.subpath.components (path.splitRoot ./.).subpath; _internalBaseRoot = /.; _internalIsEmptyWithoutBase = false; _internalVersion = 3; _type = "fileset"; }'
429expectEqual '_coerce "<tests>: value" { _type = "fileset"; _internalVersion = 1; }' \
430 '{ _type = "fileset"; _internalIsEmptyWithoutBase = false; _internalVersion = 3; }'
431expectEqual '_coerce "<tests>: value" { _type = "fileset"; _internalVersion = 2; }' \
432 '{ _type = "fileset"; _internalIsEmptyWithoutBase = false; _internalVersion = 3; }'
433
434# Future versions of the internal representation are unsupported
435expectFailure '_coerce "<tests>: value" { _type = "fileset"; _internalVersion = 4; }' '<tests>: value is a file set created from a future version of the file set library with a different internal representation:
436\s*- Internal version of the file set: 4
437\s*- Internal version of the library: 3
438\s*Make sure to update your Nixpkgs to have a newer version of `lib.fileset`.'
439
440# _create followed by _coerce should give the inputs back without any validation
441expectEqual '{
442 inherit (_coerce "<test>" (_create ./. "directory"))
443 _internalVersion _internalBase _internalTree;
444}' '{ _internalBase = ./.; _internalTree = "directory"; _internalVersion = 3; }'
445
446#### Resulting store path ####
447
448# The store path name should be "source"
449expectEqual 'toSource { root = ./.; fileset = ./.; }' 'sources.cleanSourceWith { name = "source"; src = ./.; }'
450
451# We should be able to import an empty directory and end up with an empty result
452tree=(
453)
454checkFileset './.'
455
456# The empty value without a base should also result in an empty result
457tree=(
458 [a]=0
459)
460checkFileset '_emptyWithoutBase'
461
462# Directories recursively containing no files are not included
463tree=(
464 [e/]=0
465 [d/e/]=0
466 [d/d/e/]=0
467 [d/d/f]=1
468 [d/f]=1
469 [f]=1
470)
471checkFileset './.'
472
473# Check trees that could cause a naïve string prefix checking implementation to fail
474tree=(
475 [a]=0
476 [ab/x]=0
477 [ab/xy]=1
478 [ab/xyz]=0
479 [abc]=0
480)
481checkFileset './ab/xy'
482
483# Check path coercion examples in ../../doc/functions/fileset.section.md
484tree=(
485 [a/x]=1
486 [a/b/y]=1
487 [c/]=0
488 [c/d/]=0
489)
490checkFileset './.'
491
492tree=(
493 [a/x]=1
494 [a/b/y]=1
495 [c/]=0
496 [c/d/]=0
497)
498checkFileset './a'
499
500tree=(
501 [a/x]=1
502 [a/b/y]=0
503 [c/]=0
504 [c/d/]=0
505)
506checkFileset './a/x'
507
508tree=(
509 [a/x]=0
510 [a/b/y]=1
511 [c/]=0
512 [c/d/]=0
513)
514checkFileset './a/b'
515
516tree=(
517 [a/x]=0
518 [a/b/y]=0
519 [c/]=0
520 [c/d/]=0
521)
522checkFileset './c'
523
524# Test the source filter for the somewhat special case of files in the filesystem root
525# We can't easily test this with the above functions because we can't write to the filesystem root and we don't want to make any assumptions which files are there in the sandbox
526expectEqual '_toSourceFilter (_create /. null) "/foo" ""' 'false'
527expectEqual '_toSourceFilter (_create /. { foo = "regular"; }) "/foo" ""' 'true'
528expectEqual '_toSourceFilter (_create /. { foo = null; }) "/foo" ""' 'false'
529
530
531## lib.fileset.union, lib.fileset.unions
532
533
534# Different filesystem roots in root and fileset are not supported
535mkdir -p {foo,bar}/mock-root
536expectFailure 'with ((import <nixpkgs/lib>).extend (import <nixpkgs/lib/fileset/mock-splitRoot.nix>)).fileset;
537 toSource { root = ./.; fileset = union ./foo/mock-root ./bar/mock-root; }
538' 'lib.fileset.union: Filesystem roots are not the same:
539\s*First argument: Filesystem root is "'"$work"'/foo/mock-root"
540\s*Second argument: Filesystem root is "'"$work"'/bar/mock-root"
541\s*Different filesystem roots are not supported.'
542
543expectFailure 'with ((import <nixpkgs/lib>).extend (import <nixpkgs/lib/fileset/mock-splitRoot.nix>)).fileset;
544 toSource { root = ./.; fileset = unions [ ./foo/mock-root ./bar/mock-root ]; }
545' 'lib.fileset.unions: Filesystem roots are not the same:
546\s*Element 0: Filesystem root is "'"$work"'/foo/mock-root"
547\s*Element 1: Filesystem root is "'"$work"'/bar/mock-root"
548\s*Different filesystem roots are not supported.'
549rm -rf -- *
550
551# Coercion errors show the correct context
552expectFailure 'toSource { root = ./.; fileset = union ./a ./.; }' 'lib.fileset.union: First argument \('"$work"'/a\) is a path that does not exist.'
553expectFailure 'toSource { root = ./.; fileset = union ./. ./b; }' 'lib.fileset.union: Second argument \('"$work"'/b\) is a path that does not exist.'
554expectFailure 'toSource { root = ./.; fileset = unions [ ./a ./. ]; }' 'lib.fileset.unions: Element 0 \('"$work"'/a\) is a path that does not exist.'
555expectFailure 'toSource { root = ./.; fileset = unions [ ./. ./b ]; }' 'lib.fileset.unions: Element 1 \('"$work"'/b\) is a path that does not exist.'
556
557# unions needs a list
558expectFailure 'toSource { root = ./.; fileset = unions null; }' 'lib.fileset.unions: Argument is of type null, but it should be a list instead.'
559
560# The tree of later arguments should not be evaluated if a former argument already includes all files
561tree=()
562checkFileset 'union ./. (_create ./. (abort "This should not be used!"))'
563checkFileset 'unions [ ./. (_create ./. (abort "This should not be used!")) ]'
564
565# unions doesn't include any files for an empty list or only empty values without a base
566tree=(
567 [x]=0
568 [y/z]=0
569)
570checkFileset 'unions [ ]'
571checkFileset 'unions [ _emptyWithoutBase ]'
572checkFileset 'unions [ _emptyWithoutBase _emptyWithoutBase ]'
573checkFileset 'union _emptyWithoutBase _emptyWithoutBase'
574
575# The empty value without a base is the left and right identity of union
576tree=(
577 [x]=1
578 [y/z]=0
579)
580checkFileset 'union ./x _emptyWithoutBase'
581checkFileset 'union _emptyWithoutBase ./x'
582
583# union doesn't include files that weren't specified
584tree=(
585 [x]=1
586 [y]=1
587 [z]=0
588)
589checkFileset 'union ./x ./y'
590checkFileset 'unions [ ./x ./y ]'
591
592# Also for directories
593tree=(
594 [x/a]=1
595 [x/b]=1
596 [y/a]=1
597 [y/b]=1
598 [z/a]=0
599 [z/b]=0
600)
601checkFileset 'union ./x ./y'
602checkFileset 'unions [ ./x ./y ]'
603
604# And for very specific paths
605tree=(
606 [x/a]=1
607 [x/b]=0
608 [y/a]=0
609 [y/b]=1
610 [z/a]=0
611 [z/b]=0
612)
613checkFileset 'union ./x/a ./y/b'
614checkFileset 'unions [ ./x/a ./y/b ]'
615
616# unions or chained union's can include more paths
617tree=(
618 [x/a]=1
619 [x/b]=1
620 [y/a]=1
621 [y/b]=0
622 [z/a]=0
623 [z/b]=1
624)
625checkFileset 'unions [ ./x/a ./x/b ./y/a ./z/b ]'
626checkFileset 'union (union ./x/a ./x/b) (union ./y/a ./z/b)'
627checkFileset 'union (union (union ./x/a ./x/b) ./y/a) ./z/b'
628
629# unions should not stack overflow, even if many elements are passed
630tree=()
631for i in $(seq 1000); do
632 tree[$i/a]=1
633 tree[$i/b]=0
634done
635# This is actually really hard to test:
636# A lot of files would be needed to cause a stack overflow.
637# And while we could limit the maximum stack size using `ulimit -s`,
638# that turns out to not be very deterministic: https://github.com/NixOS/nixpkgs/pull/256417#discussion_r1339396686.
639# Meanwhile, the test infra here is not the fastest, creating 10000 would be too slow.
640# So, just using 1000 files for now.
641checkFileset 'unions (mapAttrsToList (name: _: ./. + "/${name}/a") (builtins.readDir ./.))'
642
643
644## lib.fileset.intersection
645
646
647# Different filesystem roots in root and fileset are not supported
648mkdir -p {foo,bar}/mock-root
649expectFailure 'with ((import <nixpkgs/lib>).extend (import <nixpkgs/lib/fileset/mock-splitRoot.nix>)).fileset;
650 toSource { root = ./.; fileset = intersection ./foo/mock-root ./bar/mock-root; }
651' 'lib.fileset.intersection: Filesystem roots are not the same:
652\s*First argument: Filesystem root is "'"$work"'/foo/mock-root"
653\s*Second argument: Filesystem root is "'"$work"'/bar/mock-root"
654\s*Different filesystem roots are not supported.'
655rm -rf -- *
656
657# Coercion errors show the correct context
658expectFailure 'toSource { root = ./.; fileset = intersection ./a ./.; }' 'lib.fileset.intersection: First argument \('"$work"'/a\) is a path that does not exist.'
659expectFailure 'toSource { root = ./.; fileset = intersection ./. ./b; }' 'lib.fileset.intersection: Second argument \('"$work"'/b\) is a path that does not exist.'
660
661# The tree of later arguments should not be evaluated if a former argument already excludes all files
662tree=(
663 [a]=0
664)
665checkFileset 'intersection _emptyWithoutBase (_create ./. (abort "This should not be used!"))'
666# We don't have any combinators that can explicitly remove files yet, so we need to rely on internal functions to test this for now
667checkFileset 'intersection (_create ./. { a = null; }) (_create ./. { a = abort "This should not be used!"; })'
668
669# If either side is empty, the result is empty
670tree=(
671 [a]=0
672)
673checkFileset 'intersection _emptyWithoutBase _emptyWithoutBase'
674checkFileset 'intersection _emptyWithoutBase (_create ./. null)'
675checkFileset 'intersection (_create ./. null) _emptyWithoutBase'
676checkFileset 'intersection (_create ./. null) (_create ./. null)'
677
678# If the intersection base paths are not overlapping, the result is empty and has no base path
679mkdir a b c
680touch {a,b,c}/x
681expectEqual 'toSource { root = ./c; fileset = intersection ./a ./b; }' 'toSource { root = ./c; fileset = _emptyWithoutBase; }'
682rm -rf -- *
683
684# If the intersection exists, the resulting base path is the longest of them
685mkdir a
686touch x a/b
687expectEqual 'toSource { root = ./a; fileset = intersection ./a ./.; }' 'toSource { root = ./a; fileset = ./a; }'
688expectEqual 'toSource { root = ./a; fileset = intersection ./. ./a; }' 'toSource { root = ./a; fileset = ./a; }'
689rm -rf -- *
690
691# Also finds the intersection with null'd filesetTree's
692tree=(
693 [a]=0
694 [b]=1
695 [c]=0
696)
697checkFileset 'intersection (_create ./. { a = "regular"; b = "regular"; c = null; }) (_create ./. { a = null; b = "regular"; c = "regular"; })'
698
699# Actually computes the intersection between files
700tree=(
701 [a]=0
702 [b]=0
703 [c]=1
704 [d]=1
705 [e]=0
706 [f]=0
707)
708checkFileset 'intersection (unions [ ./a ./b ./c ./d ]) (unions [ ./c ./d ./e ./f ])'
709
710tree=(
711 [a/x]=0
712 [a/y]=0
713 [b/x]=1
714 [b/y]=1
715 [c/x]=0
716 [c/y]=0
717)
718checkFileset 'intersection ./b ./.'
719checkFileset 'intersection ./b (unions [ ./a/x ./a/y ./b/x ./b/y ./c/x ./c/y ])'
720
721# Complicated case
722tree=(
723 [a/x]=0
724 [a/b/i]=1
725 [c/d/x]=0
726 [c/d/f]=1
727 [c/x]=0
728 [c/e/i]=1
729 [c/e/j]=1
730)
731checkFileset 'intersection (unions [ ./a/b ./c/d ./c/e ]) (unions [ ./a ./c/d/f ./c/e ])'
732
733## Difference
734
735# Subtracting something from itself results in nothing
736tree=(
737 [a]=0
738)
739checkFileset 'difference ./. ./.'
740
741# The tree of the second argument should not be evaluated if not needed
742checkFileset 'difference _emptyWithoutBase (_create ./. (abort "This should not be used!"))'
743checkFileset 'difference (_create ./. null) (_create ./. (abort "This should not be used!"))'
744
745# Subtracting nothing gives the same thing back
746tree=(
747 [a]=1
748)
749checkFileset 'difference ./. _emptyWithoutBase'
750checkFileset 'difference ./. (_create ./. null)'
751
752# Subtracting doesn't influence the base path
753mkdir a b
754touch {a,b}/x
755expectEqual 'toSource { root = ./a; fileset = difference ./a ./b; }' 'toSource { root = ./a; fileset = ./a; }'
756rm -rf -- *
757
758# Also not the other way around
759mkdir a
760expectFailure 'toSource { root = ./a; fileset = difference ./. ./a; }' 'lib.fileset.toSource: `fileset` could contain files in '"$work"', which is not under the `root` \('"$work"'/a\). Potential solutions:
761\s*- Set `root` to '"$work"' or any directory higher up. This changes the layout of the resulting store path.
762\s*- Set `fileset` to a file set that cannot contain files outside the `root` \('"$work"'/a\). This could change the files included in the result.'
763rm -rf -- *
764
765# Difference actually works
766# We test all combinations of ./., ./a, ./a/x and ./b
767tree=(
768 [a/x]=0
769 [a/y]=0
770 [b]=0
771 [c]=0
772)
773checkFileset 'difference ./. ./.'
774checkFileset 'difference ./a ./.'
775checkFileset 'difference ./a/x ./.'
776checkFileset 'difference ./b ./.'
777checkFileset 'difference ./a ./a'
778checkFileset 'difference ./a/x ./a'
779checkFileset 'difference ./a/x ./a/x'
780checkFileset 'difference ./b ./b'
781tree=(
782 [a/x]=0
783 [a/y]=0
784 [b]=1
785 [c]=1
786)
787checkFileset 'difference ./. ./a'
788tree=(
789 [a/x]=1
790 [a/y]=1
791 [b]=0
792 [c]=0
793)
794checkFileset 'difference ./a ./b'
795tree=(
796 [a/x]=1
797 [a/y]=0
798 [b]=0
799 [c]=0
800)
801checkFileset 'difference ./a/x ./b'
802tree=(
803 [a/x]=0
804 [a/y]=1
805 [b]=0
806 [c]=0
807)
808checkFileset 'difference ./a ./a/x'
809tree=(
810 [a/x]=0
811 [a/y]=0
812 [b]=1
813 [c]=0
814)
815checkFileset 'difference ./b ./a'
816checkFileset 'difference ./b ./a/x'
817tree=(
818 [a/x]=0
819 [a/y]=1
820 [b]=1
821 [c]=1
822)
823checkFileset 'difference ./. ./a/x'
824tree=(
825 [a/x]=1
826 [a/y]=1
827 [b]=0
828 [c]=1
829)
830checkFileset 'difference ./. ./b'
831
832## File filter
833
834# The first argument needs to be a function
835expectFailure 'fileFilter null (abort "this is not needed")' 'lib.fileset.fileFilter: First argument is of type null, but it should be a function instead.'
836
837# The second argument needs to be an existing path
838expectFailure 'fileFilter (file: abort "this is not needed") _emptyWithoutBase' 'lib.fileset.fileFilter: Second argument is a file set, but it should be a path instead.
839\s*If you need to filter files in a file set, use `intersection fileset \(fileFilter pred \./\.\)` instead.'
840expectFailure 'fileFilter (file: abort "this is not needed") null' 'lib.fileset.fileFilter: Second argument is of type null, but it should be a path instead.'
841expectFailure 'fileFilter (file: abort "this is not needed") ./a' 'lib.fileset.fileFilter: Second argument \('"$work"'/a\) is a path that does not exist.'
842
843# The predicate is not called when there's no files
844tree=()
845checkFileset 'fileFilter (file: abort "this is not needed") ./.'
846
847# The predicate must be able to handle extra attributes
848touch a
849expectFailure 'toSource { root = ./.; fileset = fileFilter ({ name, type }: true) ./.; }' 'called with unexpected argument '\''"lib.fileset.fileFilter: The predicate function passed as the first argument must be able to handle extra attributes for future compatibility. If you'\''re using `\{ name, file \}:`, use `\{ name, file, ... \}:` instead."'\'
850rm -rf -- *
851
852# .name is the name, and it works correctly, even recursively
853tree=(
854 [a]=1
855 [b]=0
856 [c/a]=1
857 [c/b]=0
858 [d/c/a]=1
859 [d/c/b]=0
860)
861checkFileset 'fileFilter (file: file.name == "a") ./.'
862tree=(
863 [a]=0
864 [b]=1
865 [c/a]=0
866 [c/b]=1
867 [d/c/a]=0
868 [d/c/b]=1
869)
870checkFileset 'fileFilter (file: file.name != "a") ./.'
871
872# `.type` is the file type
873mkdir d
874touch d/a
875ln -s d/b d/b
876mkfifo d/c
877expectEqual \
878 'toSource { root = ./.; fileset = fileFilter (file: file.type == "regular") ./.; }' \
879 'toSource { root = ./.; fileset = ./d/a; }'
880expectEqual \
881 'toSource { root = ./.; fileset = fileFilter (file: file.type == "symlink") ./.; }' \
882 'toSource { root = ./.; fileset = ./d/b; }'
883expectEqual \
884 'toSource { root = ./.; fileset = fileFilter (file: file.type == "unknown") ./.; }' \
885 'toSource { root = ./.; fileset = ./d/c; }'
886expectEqual \
887 'toSource { root = ./.; fileset = fileFilter (file: file.type != "regular") ./.; }' \
888 'toSource { root = ./.; fileset = union ./d/b ./d/c; }'
889expectEqual \
890 'toSource { root = ./.; fileset = fileFilter (file: file.type != "symlink") ./.; }' \
891 'toSource { root = ./.; fileset = union ./d/a ./d/c; }'
892expectEqual \
893 'toSource { root = ./.; fileset = fileFilter (file: file.type != "unknown") ./.; }' \
894 'toSource { root = ./.; fileset = union ./d/a ./d/b; }'
895rm -rf -- *
896
897# It's lazy
898tree=(
899 [b]=1
900 [c/a]=1
901)
902# Note that union evaluates the first argument first if necessary, that's why we can use ./c/a here
903checkFileset 'union ./c/a (fileFilter (file: assert file.name != "a"; true) ./.)'
904# but here we need to use ./c
905checkFileset 'union (fileFilter (file: assert file.name != "a"; true) ./.) ./c'
906
907# Make sure single files are filtered correctly
908tree=(
909 [a]=1
910 [b]=0
911)
912checkFileset 'fileFilter (file: assert file.name == "a"; true) ./a'
913tree=(
914 [a]=0
915 [b]=0
916)
917checkFileset 'fileFilter (file: assert file.name == "a"; false) ./a'
918
919## Tracing
920
921# The second trace argument is returned
922expectEqual 'trace ./. "some value"' 'builtins.trace "(empty)" "some value"'
923
924# The fileset traceVal argument is returned
925expectEqual 'traceVal ./.' 'builtins.trace "(empty)" (_create ./. "directory")'
926
927# The tracing happens before the final argument is needed
928expectEqual 'trace ./.' 'builtins.trace "(empty)" (x: x)'
929
930# Tracing an empty directory shows it as such
931expectTrace './.' '(empty)'
932
933# This also works if there are directories, but all recursively without files
934mkdir -p a/b/c
935expectTrace './.' '(empty)'
936rm -rf -- *
937
938# The empty file set without a base also prints as empty
939expectTrace '_emptyWithoutBase' '(empty)'
940expectTrace 'unions [ ]' '(empty)'
941mkdir foo bar
942touch {foo,bar}/x
943expectTrace 'intersection ./foo ./bar' '(empty)'
944rm -rf -- *
945
946# If a directory is fully included, print it as such
947touch a
948expectTrace './.' "$work"' (all files in directory)'
949rm -rf -- *
950
951# If a directory is not fully included, recurse
952mkdir a b
953touch a/{x,y} b/{x,y}
954expectTrace 'union ./a/x ./b' "$work"'
955- a
956 - x (regular)
957- b (all files in directory)'
958rm -rf -- *
959
960# If an included path is a file, print its type
961touch a x
962ln -s a b
963mkfifo c
964expectTrace 'unions [ ./a ./b ./c ]' "$work"'
965- a (regular)
966- b (symlink)
967- c (unknown)'
968rm -rf -- *
969
970# Do not print directories without any files recursively
971mkdir -p a/b/c
972touch b x
973expectTrace 'unions [ ./a ./b ]' "$work"'
974- b (regular)'
975rm -rf -- *
976
977# If all children are either fully included or empty directories,
978# the parent should be printed as fully included
979touch a
980mkdir b
981expectTrace 'union ./a ./b' "$work"' (all files in directory)'
982rm -rf -- *
983
984mkdir -p x/b x/c
985touch x/a
986touch a
987# If all children are either fully excluded or empty directories,
988# the parent should be shown (or rather not shown) as fully excluded
989expectTrace 'unions [ ./a ./x/b ./x/c ]' "$work"'
990- a (regular)'
991rm -rf -- *
992
993# Completely filtered out directories also print as empty
994touch a
995expectTrace '_create ./. {}' '(empty)'
996rm -rf -- *
997
998# A general test to make sure the resulting format makes sense
999# Such as indentation and ordering
1000mkdir -p bar/{qux,someDir}
1001touch bar/{baz,qux,someDir/a} foo
1002touch bar/qux/x
1003ln -s x bar/qux/a
1004mkfifo bar/qux/b
1005expectTrace 'unions [
1006 ./bar/baz
1007 ./bar/qux/a
1008 ./bar/qux/b
1009 ./bar/someDir/a
1010 ./foo
1011]' "$work"'
1012- bar
1013 - baz (regular)
1014 - qux
1015 - a (symlink)
1016 - b (unknown)
1017 - someDir (all files in directory)
1018- foo (regular)'
1019rm -rf -- *
1020
1021# For recursively included directories,
1022# `(all files in directory)` should only be used if there's at least one file (otherwise it would be `(empty)`)
1023# and this should be determined without doing a full search
1024#
1025# a is intentionally ordered first here in order to allow triggering the short-circuit behavior
1026# We then check that b is not read
1027# In a more realistic scenario, some directories might need to be recursed into,
1028# but a file would be quickly found to trigger the short-circuit.
1029touch a
1030mkdir b
1031# We don't have lambda's in bash unfortunately,
1032# so we just define a function instead and then pass its name
1033# shellcheck disable=SC2317
1034run() {
1035 # This shouldn't read b/
1036 expectTrace './.' "$work"' (all files in directory)'
1037 # Remove all files immediately after, triggering delete_self events for all of them
1038 rmdir b
1039}
1040# Runs the function while checking that b isn't read
1041withFileMonitor run b
1042rm -rf -- *
1043
1044# Partially included directories trace entries as they are evaluated
1045touch a b c
1046expectTrace '_create ./. { a = null; b = "regular"; c = throw "b"; }' "$work"'
1047- b (regular)'
1048
1049# Except entries that need to be evaluated to even figure out if it's only partially included:
1050# Here the directory could be fully excluded or included just from seeing a and b,
1051# so c needs to be evaluated before anything can be traced
1052expectTrace '_create ./. { a = null; b = null; c = throw "c"; }' ''
1053expectTrace '_create ./. { a = "regular"; b = "regular"; c = throw "c"; }' ''
1054rm -rf -- *
1055
1056# We can trace large directories (10000 here) without any problems
1057filesToCreate=({0..9}{0..9}{0..9}{0..9})
1058expectedTrace=$work$'\n'$(printf -- '- %s (regular)\n' "${filesToCreate[@]}")
1059# We need an excluded file so it doesn't print as `(all files in directory)`
1060touch 0 "${filesToCreate[@]}"
1061expectTrace 'unions (mapAttrsToList (n: _: ./. + "/${n}") (removeAttrs (builtins.readDir ./.) [ "0" ]))' "$expectedTrace"
1062rm -rf -- *
1063
1064## lib.fileset.fromSource
1065
1066# Check error messages
1067
1068# String-like values are not supported
1069expectFailure 'fromSource (lib.cleanSource "")' 'lib.fileset.fromSource: The source origin of the argument is a string-like value \(""\), but it should be a path instead.
1070\s*Sources created from paths in strings cannot be turned into file sets, use `lib.sources` or derivations instead.'
1071
1072# Wrong type
1073expectFailure 'fromSource null' 'lib.fileset.fromSource: The source origin of the argument is of type null, but it should be a path instead.'
1074expectFailure 'fromSource (lib.cleanSource null)' 'lib.fileset.fromSource: The source origin of the argument is of type null, but it should be a path instead.'
1075
1076# fromSource on non-existent paths gives an error
1077expectFailure 'fromSource ./a' 'lib.fileset.fromSource: The source origin \('"$work"'/a\) of the argument is a path that does not exist.'
1078
1079# fromSource on a path works and is the same as coercing that path
1080mkdir a
1081touch a/b c
1082expectEqual 'trace (fromSource ./.) null' 'trace ./. null'
1083rm -rf -- *
1084
1085# Check that converting to a file set doesn't read the included files
1086mkdir a
1087touch a/b
1088run() {
1089 expectEqual "trace (fromSource (lib.cleanSourceWith { src = ./a; })) null" "builtins.trace \"$work/a (all files in directory)\" null"
1090 rm a/b
1091}
1092withFileMonitor run a/b
1093rm -rf -- *
1094
1095# Check that converting to a file set doesn't read entries for directories that are filtered out
1096mkdir -p a/b
1097touch a/b/c
1098run() {
1099 expectEqual "trace (fromSource (lib.cleanSourceWith {
1100 src = ./a;
1101 filter = pathString: type: false;
1102 })) null" "builtins.trace \"(empty)\" null"
1103 rm a/b/c
1104 rmdir a/b
1105}
1106withFileMonitor run a/b
1107rm -rf -- *
1108
1109# The filter is not needed on empty directories
1110expectEqual 'trace (fromSource (lib.cleanSourceWith {
1111 src = ./.;
1112 filter = abort "filter should not be needed";
1113})) null' 'trace _emptyWithoutBase null'
1114
1115# Single files also work
1116touch a b
1117expectEqual 'trace (fromSource (cleanSourceWith { src = ./a; })) null' 'trace ./a null'
1118rm -rf -- *
1119
1120# For a tree assigning each subpath true/false,
1121# check whether a source filter with those results includes the same files
1122# as a file set created using fromSource. Usage:
1123#
1124# tree=(
1125# [a]=1 # ./a is a file and the filter should return true for it
1126# [b/]=0 # ./b is a directory and the filter should return false for it
1127# )
1128# checkSource
1129checkSource() {
1130 createTree
1131
1132 # Serialise the tree as JSON (there's only minimal savings with jq,
1133 # and we don't need to handle escapes)
1134 {
1135 echo "{"
1136 first=1
1137 for p in "${!tree[@]}"; do
1138 if [[ -z "$first" ]]; then
1139 echo ","
1140 else
1141 first=
1142 fi
1143 echo "\"$p\":"
1144 case "${tree[$p]}" in
1145 1)
1146 echo "true"
1147 ;;
1148 0)
1149 echo "false"
1150 ;;
1151 *)
1152 die "Unsupported tree value: ${tree[$p]}"
1153 esac
1154 done
1155 echo "}"
1156 } > "$tmp/tree.json"
1157
1158 # An expression to create a source value with a filter matching the tree
1159 sourceExpr='
1160 let
1161 tree = importJSON '"$tmp"'/tree.json;
1162 in
1163 cleanSourceWith {
1164 src = ./.;
1165 filter =
1166 pathString: type:
1167 let
1168 stripped = removePrefix (toString ./. + "/") pathString;
1169 key = stripped + optionalString (type == "directory") "/";
1170 in
1171 tree.${key} or
1172 (throw "tree key ${key} missing");
1173 }
1174 '
1175
1176 filesetExpr='
1177 toSource {
1178 root = ./.;
1179 fileset = fromSource ('"$sourceExpr"');
1180 }
1181 '
1182
1183 # Turn both into store paths
1184 sourceStorePath=$(expectStorePath "$sourceExpr")
1185 filesetStorePath=$(expectStorePath "$filesetExpr")
1186
1187 # Loop through each path in the tree
1188 while IFS= read -r -d $'\0' subpath; do
1189 if [[ ! -e "$sourceStorePath"/"$subpath" ]]; then
1190 # If it's not in the source store path, it's also not in the file set store path
1191 if [[ -e "$filesetStorePath"/"$subpath" ]]; then
1192 die "The store path $sourceStorePath created by $expr doesn't contain $subpath, but the corresponding store path $filesetStorePath created via fromSource does contain $subpath"
1193 fi
1194 elif [[ -z "$(find "$sourceStorePath"/"$subpath" -type f)" ]]; then
1195 # If it's an empty directory in the source store path, it shouldn't be in the file set store path
1196 if [[ -e "$filesetStorePath"/"$subpath" ]]; then
1197 die "The store path $sourceStorePath created by $expr contains the path $subpath without any files, but the corresponding store path $filesetStorePath created via fromSource didn't omit it"
1198 fi
1199 else
1200 # If it's non-empty directory or a file, it should be in the file set store path
1201 if [[ ! -e "$filesetStorePath"/"$subpath" ]]; then
1202 die "The store path $sourceStorePath created by $expr contains the non-empty path $subpath, but the corresponding store path $filesetStorePath created via fromSource doesn't include it"
1203 fi
1204 fi
1205 done < <(find . -mindepth 1 -print0)
1206
1207 rm -rf -- *
1208}
1209
1210# Check whether the filter is evaluated correctly
1211tree=(
1212 [a]=
1213 [b/]=
1214 [b/c]=
1215 [b/d]=
1216 [e/]=
1217 [e/e/]=
1218)
1219# We fill out the above tree values with all possible combinations of 0 and 1
1220# Then check whether a filter based on those return values gets turned into the corresponding file set
1221for i in $(seq 0 $((2 ** ${#tree[@]} - 1 ))); do
1222 for p in "${!tree[@]}"; do
1223 tree[$p]=$(( i % 2 ))
1224 (( i /= 2 )) || true
1225 done
1226 checkSource
1227done
1228
1229# The filter is called with the same arguments in the same order
1230mkdir a e
1231touch a/b a/c d e
1232expectEqual '
1233 trace (fromSource (cleanSourceWith {
1234 src = ./.;
1235 filter = pathString: type: builtins.trace "${pathString} ${toString type}" true;
1236 })) null
1237' '
1238 builtins.seq (cleanSourceWith {
1239 src = ./.;
1240 filter = pathString: type: builtins.trace "${pathString} ${toString type}" true;
1241 }).outPath
1242 builtins.trace "'"$work"' (all files in directory)"
1243 null
1244'
1245rm -rf -- *
1246
1247# Test that if a directory is not included, the filter isn't called on its contents
1248mkdir a b
1249touch a/c b/d
1250expectEqual 'trace (fromSource (cleanSourceWith {
1251 src = ./.;
1252 filter = pathString: type:
1253 if pathString == toString ./a then
1254 false
1255 else if pathString == toString ./b then
1256 true
1257 else if pathString == toString ./b/d then
1258 true
1259 else
1260 abort "This filter should not be called with path ${pathString}";
1261})) null' 'trace (_create ./. { b = "directory"; }) null'
1262rm -rf -- *
1263
1264# The filter is called lazily:
1265# If a later say intersection removes a part of the tree, the filter won't run on it
1266mkdir a d
1267touch a/{b,c} d/e
1268expectEqual 'trace (intersection ./a (fromSource (lib.cleanSourceWith {
1269 src = ./.;
1270 filter = pathString: type:
1271 if pathString == toString ./a || pathString == toString ./a/b then
1272 true
1273 else if pathString == toString ./a/c then
1274 false
1275 else
1276 abort "filter should not be called on ${pathString}";
1277}))) null' 'trace ./a/b null'
1278rm -rf -- *
1279
1280## lib.fileset.gitTracked/gitTrackedWith
1281
1282# The first/second argument has to be a path
1283expectFailure 'gitTracked null' 'lib.fileset.gitTracked: Expected the argument to be a path, but it'\''s a null instead.'
1284expectFailure 'gitTrackedWith {} null' 'lib.fileset.gitTrackedWith: Expected the second argument to be a path, but it'\''s a null instead.'
1285
1286# The path has to contain a .git directory
1287expectFailure 'gitTracked ./.' 'lib.fileset.gitTracked: Expected the argument \('"$work"'\) to point to a local working tree of a Git repository, but it'\''s not.'
1288expectFailure 'gitTrackedWith {} ./.' 'lib.fileset.gitTrackedWith: Expected the second argument \('"$work"'\) to point to a local working tree of a Git repository, but it'\''s not.'
1289
1290# recurseSubmodules has to be a boolean
1291expectFailure 'gitTrackedWith { recurseSubmodules = null; } ./.' 'lib.fileset.gitTrackedWith: Expected the attribute `recurseSubmodules` of the first argument to be a boolean, but it'\''s a null instead.'
1292
1293# recurseSubmodules = true is not supported on all Nix versions
1294if [[ "$(nix-instantiate --eval --expr "$(prefixExpression) (versionAtLeast builtins.nixVersion _fetchGitSubmodulesMinver)")" == true ]]; then
1295 fetchGitSupportsSubmodules=1
1296else
1297 fetchGitSupportsSubmodules=
1298 expectFailure 'gitTrackedWith { recurseSubmodules = true; } ./.' 'lib.fileset.gitTrackedWith: Setting the attribute `recurseSubmodules` to `true` is only supported for Nix version 2.4 and after, but Nix version [0-9.]+ is used.'
1299fi
1300
1301# Checks that `gitTrackedWith` contains the same files as `git ls-files`
1302# for the current working directory.
1303# If --recurse-submodules is passed, the flag is passed through to `git ls-files`
1304# and as `recurseSubmodules` to `gitTrackedWith`
1305checkGitTrackedWith() {
1306 if [[ "${1:-}" == "--recurse-submodules" ]]; then
1307 gitLsFlags="--recurse-submodules"
1308 gitTrackedArg="{ recurseSubmodules = true; }"
1309 else
1310 gitLsFlags=""
1311 gitTrackedArg="{ }"
1312 fi
1313
1314 # All files listed by `git ls-files`
1315 expectedFiles=()
1316 while IFS= read -r -d $'\0' file; do
1317 # If there are submodules but --recurse-submodules isn't passed,
1318 # `git ls-files` lists them as empty directories,
1319 # we need to filter that out since we only want to check/count files
1320 if [[ -f "$file" ]]; then
1321 expectedFiles+=("$file")
1322 fi
1323 done < <(git ls-files -z $gitLsFlags)
1324
1325 storePath=$(expectStorePath 'toSource { root = ./.; fileset = gitTrackedWith '"$gitTrackedArg"' ./.; }')
1326
1327 # Check that each expected file is also in the store path with the same content
1328 for expectedFile in "${expectedFiles[@]}"; do
1329 if [[ ! -e "$storePath"/"$expectedFile" ]]; then
1330 die "Expected file $expectedFile to exist in $storePath, but it doesn't.\nGit status:\n$(git status)\nStore path contents:\n$(find "$storePath")"
1331 fi
1332 if ! diff "$expectedFile" "$storePath"/"$expectedFile"; then
1333 die "Expected file $expectedFile to have the same contents as in $storePath, but it doesn't.\nGit status:\n$(git status)\nStore path contents:\n$(find "$storePath")"
1334 fi
1335 done
1336
1337 # This is a cheap way to verify the inverse: That all files in the store path are also expected
1338 # We just count the number of files in both and verify they're the same
1339 actualFileCount=$(find "$storePath" -type f -printf . | wc -c)
1340 if [[ "${#expectedFiles[@]}" != "$actualFileCount" ]]; then
1341 die "Expected ${#expectedFiles[@]} files in $storePath, but got $actualFileCount.\nGit status:\n$(git status)\nStore path contents:\n$(find "$storePath")"
1342 fi
1343}
1344
1345
1346# Runs checkGitTrackedWith with and without --recurse-submodules
1347# Allows testing both variants together
1348checkGitTracked() {
1349 checkGitTrackedWith
1350 if [[ -n "$fetchGitSupportsSubmodules" ]]; then
1351 checkGitTrackedWith --recurse-submodules
1352 fi
1353}
1354
1355createGitRepo() {
1356 git init -q "$1"
1357 # Only repo-local config
1358 git -C "$1" config user.name "Nixpkgs"
1359 git -C "$1" config user.email "nixpkgs@nixos.org"
1360 # Get at least a HEAD commit, needed for older Nix versions
1361 git -C "$1" commit -q --allow-empty -m "Empty commit"
1362}
1363
1364# Check the error message for pure eval mode
1365createGitRepo .
1366expectFailure --simulate-pure-eval 'toSource { root = ./.; fileset = gitTracked ./.; }' 'lib.fileset.gitTracked: This function is currently not supported in pure evaluation mode, since it currently relies on `builtins.fetchGit`. See https://github.com/NixOS/nix/issues/9292.'
1367expectFailure --simulate-pure-eval 'toSource { root = ./.; fileset = gitTrackedWith {} ./.; }' 'lib.fileset.gitTrackedWith: This function is currently not supported in pure evaluation mode, since it currently relies on `builtins.fetchGit`. See https://github.com/NixOS/nix/issues/9292.'
1368rm -rf -- *
1369
1370# Go through all stages of Git files
1371# See https://www.git-scm.com/book/en/v2/Git-Basics-Recording-Changes-to-the-Repository
1372
1373# Empty repository
1374createGitRepo .
1375checkGitTracked
1376
1377# Untracked file
1378echo a > a
1379checkGitTracked
1380
1381# Staged file
1382git add a
1383checkGitTracked
1384
1385# Committed file
1386git commit -q -m "Added a"
1387checkGitTracked
1388
1389# Edited file
1390echo b > a
1391checkGitTracked
1392
1393# Removed file
1394git rm -f -q a
1395checkGitTracked
1396
1397rm -rf -- *
1398
1399# gitignored file
1400createGitRepo .
1401echo a > .gitignore
1402touch a
1403git add -A
1404checkGitTracked
1405
1406# Add it regardless (needs -f)
1407git add -f a
1408checkGitTracked
1409rm -rf -- *
1410
1411# Directory
1412createGitRepo .
1413mkdir -p d1/d2/d3
1414touch d1/d2/d3/a
1415git add d1
1416checkGitTracked
1417rm -rf -- *
1418
1419# Submodules
1420createGitRepo .
1421createGitRepo sub
1422
1423# Untracked submodule
1424git -C sub commit -q --allow-empty -m "Empty commit"
1425checkGitTracked
1426
1427# Tracked submodule
1428git submodule add ./sub sub >/dev/null
1429checkGitTracked
1430
1431# Untracked file
1432echo a > sub/a
1433checkGitTracked
1434
1435# Staged file
1436git -C sub add a
1437checkGitTracked
1438
1439# Committed file
1440git -C sub commit -q -m "Add a"
1441checkGitTracked
1442
1443# Changed file
1444echo b > sub/b
1445checkGitTracked
1446
1447# Removed file
1448git -C sub rm -f -q a
1449checkGitTracked
1450
1451rm -rf -- *
1452
1453# TODO: Once we have combinators and a property testing library, derive property tests from https://en.wikipedia.org/wiki/Algebra_of_sets
1454
1455echo >&2 tests ok