at 22.05-pre 1.8 kB view raw
1# Test a minimal HDFS cluster with no HA 2import ../make-test-python.nix ({...}: { 3 nodes = { 4 namenode = {pkgs, ...}: { 5 services.hadoop = { 6 package = pkgs.hadoop; 7 hdfs = { 8 namenode = { 9 enable = true; 10 formatOnInit = true; 11 }; 12 httpfs.enable = true; 13 }; 14 coreSite = { 15 "fs.defaultFS" = "hdfs://namenode:8020"; 16 "hadoop.proxyuser.httpfs.groups" = "*"; 17 "hadoop.proxyuser.httpfs.hosts" = "*"; 18 }; 19 }; 20 }; 21 datanode = {pkgs, ...}: { 22 services.hadoop = { 23 package = pkgs.hadoop; 24 hdfs.datanode.enable = true; 25 coreSite = { 26 "fs.defaultFS" = "hdfs://namenode:8020"; 27 "hadoop.proxyuser.httpfs.groups" = "*"; 28 "hadoop.proxyuser.httpfs.hosts" = "*"; 29 }; 30 }; 31 }; 32 }; 33 34 testScript = '' 35 start_all() 36 37 namenode.wait_for_unit("hdfs-namenode") 38 namenode.wait_for_unit("network.target") 39 namenode.wait_for_open_port(8020) 40 namenode.wait_for_open_port(9870) 41 42 datanode.wait_for_unit("hdfs-datanode") 43 datanode.wait_for_unit("network.target") 44 datanode.wait_for_open_port(9864) 45 datanode.wait_for_open_port(9866) 46 datanode.wait_for_open_port(9867) 47 48 namenode.succeed("curl -f http://namenode:9870") 49 datanode.succeed("curl -f http://datanode:9864") 50 51 datanode.succeed("sudo -u hdfs hdfs dfsadmin -safemode wait") 52 datanode.succeed("echo testfilecontents | sudo -u hdfs hdfs dfs -put - /testfile") 53 assert "testfilecontents" in datanode.succeed("sudo -u hdfs hdfs dfs -cat /testfile") 54 55 namenode.wait_for_unit("hdfs-httpfs") 56 namenode.wait_for_open_port(14000) 57 assert "testfilecontents" in datanode.succeed("curl -f \"http://namenode:14000/webhdfs/v1/testfile?user.name=hdfs&op=OPEN\" 2>&1") 58 ''; 59})