1{ lib, ... }:
2let
3 mainPort = 11434;
4 altPort = 11435;
5in
6{
7 name = "ollama";
8 meta.maintainers = with lib.maintainers; [ abysssol ];
9
10 nodes = {
11 cpu =
12 { ... }:
13 {
14 services.ollama.enable = true;
15 };
16
17 altAddress =
18 { ... }:
19 {
20 services.ollama.enable = true;
21 services.ollama.port = altPort;
22 };
23 };
24
25 testScript = ''
26 import json
27
28 def curl_request_ollama(prompt, port):
29 json_prompt = json.dumps(prompt)
30 return f"""curl http://127.0.0.1:{port}/api/generate -d '{json_prompt}'"""
31
32 prompt = {
33 "model": "tinydolphin",
34 "prompt": "lorem ipsum",
35 "options": {
36 "seed": 69,
37 "temperature": 0,
38 },
39 }
40
41
42 vms = [
43 (cpu, ${toString mainPort}),
44 (altAddress, ${toString altPort}),
45 ]
46
47 start_all()
48 for (vm, port) in vms:
49 vm.wait_for_unit("multi-user.target")
50 vm.wait_for_open_port(port)
51 stdout = vm.succeed(curl_request_ollama(prompt, port), timeout = 100)
52 '';
53}