at 23.05-pre 2.6 kB view raw
1# Test a minimal HDFS cluster with no HA 2import ../make-test-python.nix ({ package, lib, ... }: 3with lib; 4{ 5 name = "hadoop-hdfs"; 6 7 nodes = let 8 coreSite = { 9 "fs.defaultFS" = "hdfs://namenode:8020"; 10 "hadoop.proxyuser.httpfs.groups" = "*"; 11 "hadoop.proxyuser.httpfs.hosts" = "*"; 12 }; 13 in { 14 namenode = { pkgs, ... }: { 15 services.hadoop = { 16 inherit package; 17 hdfs = { 18 namenode = { 19 enable = true; 20 openFirewall = true; 21 formatOnInit = true; 22 }; 23 httpfs = { 24 # The NixOS hadoop module only support webHDFS on 3.3 and newer 25 enable = mkIf (versionAtLeast package.version "3.3") true; 26 openFirewall = true; 27 }; 28 }; 29 inherit coreSite; 30 }; 31 }; 32 datanode = { pkgs, ... }: { 33 services.hadoop = { 34 inherit package; 35 hdfs.datanode = { 36 enable = true; 37 openFirewall = true; 38 dataDirs = [{ 39 type = "DISK"; 40 path = "/tmp/dn1"; 41 }]; 42 }; 43 inherit coreSite; 44 }; 45 }; 46 }; 47 48 testScript = '' 49 start_all() 50 51 namenode.wait_for_unit("hdfs-namenode") 52 namenode.wait_for_unit("network.target") 53 namenode.wait_for_open_port(8020) 54 namenode.succeed("ss -tulpne | systemd-cat") 55 namenode.succeed("cat /etc/hadoop*/hdfs-site.xml | systemd-cat") 56 namenode.wait_for_open_port(9870) 57 58 datanode.wait_for_unit("hdfs-datanode") 59 datanode.wait_for_unit("network.target") 60 '' + ( if versionAtLeast package.version "3" then '' 61 datanode.wait_for_open_port(9864) 62 datanode.wait_for_open_port(9866) 63 datanode.wait_for_open_port(9867) 64 65 datanode.succeed("curl -f http://datanode:9864") 66 '' else '' 67 datanode.wait_for_open_port(50075) 68 datanode.wait_for_open_port(50010) 69 datanode.wait_for_open_port(50020) 70 71 datanode.succeed("curl -f http://datanode:50075") 72 '' ) + '' 73 namenode.succeed("curl -f http://namenode:9870") 74 75 datanode.succeed("sudo -u hdfs hdfs dfsadmin -safemode wait") 76 datanode.succeed("echo testfilecontents | sudo -u hdfs hdfs dfs -put - /testfile") 77 assert "testfilecontents" in datanode.succeed("sudo -u hdfs hdfs dfs -cat /testfile") 78 79 '' + optionalString ( versionAtLeast package.version "3.3" ) '' 80 namenode.wait_for_unit("hdfs-httpfs") 81 namenode.wait_for_open_port(14000) 82 assert "testfilecontents" in datanode.succeed("curl -f \"http://namenode:14000/webhdfs/v1/testfile?user.name=hdfs&op=OPEN\" 2>&1") 83 ''; 84})