at 23.11-beta 2.6 kB view raw
1# Test a minimal HDFS cluster with no HA 2import ../make-test-python.nix ({ package, lib, ... }: 3{ 4 name = "hadoop-hdfs"; 5 6 nodes = let 7 coreSite = { 8 "fs.defaultFS" = "hdfs://namenode:8020"; 9 "hadoop.proxyuser.httpfs.groups" = "*"; 10 "hadoop.proxyuser.httpfs.hosts" = "*"; 11 }; 12 in { 13 namenode = { pkgs, ... }: { 14 services.hadoop = { 15 inherit package; 16 hdfs = { 17 namenode = { 18 enable = true; 19 openFirewall = true; 20 formatOnInit = true; 21 }; 22 httpfs = { 23 # The NixOS hadoop module only support webHDFS on 3.3 and newer 24 enable = lib.mkIf (lib.versionAtLeast package.version "3.3") true; 25 openFirewall = true; 26 }; 27 }; 28 inherit coreSite; 29 }; 30 }; 31 datanode = { pkgs, ... }: { 32 services.hadoop = { 33 inherit package; 34 hdfs.datanode = { 35 enable = true; 36 openFirewall = true; 37 dataDirs = [{ 38 type = "DISK"; 39 path = "/tmp/dn1"; 40 }]; 41 }; 42 inherit coreSite; 43 }; 44 }; 45 }; 46 47 testScript = '' 48 start_all() 49 50 namenode.wait_for_unit("hdfs-namenode") 51 namenode.wait_for_unit("network.target") 52 namenode.wait_for_open_port(8020) 53 namenode.succeed("systemd-cat ss -tulpne") 54 namenode.succeed("systemd-cat cat /etc/hadoop*/hdfs-site.xml") 55 namenode.wait_for_open_port(9870) 56 57 datanode.wait_for_unit("hdfs-datanode") 58 datanode.wait_for_unit("network.target") 59 '' + (if lib.versionAtLeast package.version "3" then '' 60 datanode.wait_for_open_port(9864) 61 datanode.wait_for_open_port(9866) 62 datanode.wait_for_open_port(9867) 63 64 datanode.succeed("curl -f http://datanode:9864") 65 '' else '' 66 datanode.wait_for_open_port(50075) 67 datanode.wait_for_open_port(50010) 68 datanode.wait_for_open_port(50020) 69 70 datanode.succeed("curl -f http://datanode:50075") 71 '' ) + '' 72 namenode.succeed("curl -f http://namenode:9870") 73 74 datanode.succeed("sudo -u hdfs hdfs dfsadmin -safemode wait") 75 datanode.succeed("echo testfilecontents | sudo -u hdfs hdfs dfs -put - /testfile") 76 assert "testfilecontents" in datanode.succeed("sudo -u hdfs hdfs dfs -cat /testfile") 77 78 '' + lib.optionalString (lib.versionAtLeast package.version "3.3" ) '' 79 namenode.wait_for_unit("hdfs-httpfs") 80 namenode.wait_for_open_port(14000) 81 assert "testfilecontents" in datanode.succeed("curl -f \"http://namenode:14000/webhdfs/v1/testfile?user.name=hdfs&op=OPEN\" 2>&1") 82 ''; 83})