1{
2 config,
3 pkgs,
4 lib,
5 ...
6}:
7let
8 cfg = config.services.nextjs-ollama-llm-ui;
9 # we have to override the URL to a Ollama service here, because it gets baked into the web app.
10 nextjs-ollama-llm-ui = cfg.package.override { inherit (cfg) ollamaUrl; };
11in
12{
13 options = {
14 services.nextjs-ollama-llm-ui = {
15 enable = lib.mkEnableOption ''
16 Simple Ollama web UI service; an easy to use web frontend for a Ollama backend service.
17 Run state-of-the-art AI large language models (LLM) similar to ChatGPT locally with privacy
18 on your personal computer.
19 This service is stateless and doesn't store any data on the server; all data is kept
20 locally in your web browser.
21 See <https://github.com/jakobhoeg/nextjs-ollama-llm-ui>.
22
23 Required: You need the Ollama backend service running by having
24 "services.nextjs-ollama-llm-ui.ollamaUrl" point to the correct url.
25 You can host such a backend service with NixOS through "services.ollama".
26 '';
27 package = lib.mkPackageOption pkgs "nextjs-ollama-llm-ui" { };
28
29 hostname = lib.mkOption {
30 type = lib.types.str;
31 default = "127.0.0.1";
32 example = "ui.example.org";
33 description = ''
34 The hostname under which the Ollama UI interface should be accessible.
35 By default it uses localhost/127.0.0.1 to be accessible only from the local machine.
36 Change to "0.0.0.0" to make it directly accessible from the local network.
37
38 Note: You should keep it at 127.0.0.1 and only serve to the local
39 network or internet from a (home) server behind a reverse-proxy and secured encryption.
40 See https://wiki.nixos.org/wiki/Nginx for instructions on how to set up a reverse-proxy.
41 '';
42 };
43
44 port = lib.mkOption {
45 type = lib.types.port;
46 default = 3000;
47 example = 3000;
48 description = ''
49 The port under which the Ollama UI interface should be accessible.
50 '';
51 };
52
53 ollamaUrl = lib.mkOption {
54 type = lib.types.str;
55 default = "http://127.0.0.1:11434";
56 example = "https://ollama.example.org";
57 description = ''
58 The address (including host and port) under which we can access the Ollama backend server.
59 !Note that if the the UI service is running under a domain "https://ui.example.org",
60 the Ollama backend service must allow "CORS" requests from this domain, e.g. by adding
61 "services.ollama.environment.OLLAMA_ORIGINS = [ ... "https://ui.example.org" ];"!
62 '';
63 };
64 };
65 };
66
67 config = lib.mkIf cfg.enable {
68 systemd.services = {
69
70 nextjs-ollama-llm-ui = {
71 wantedBy = [ "multi-user.target" ];
72 description = "Nextjs Ollama LLM Ui.";
73 after = [ "network.target" ];
74 environment = {
75 HOSTNAME = cfg.hostname;
76 PORT = toString cfg.port;
77 NEXT_PUBLIC_OLLAMA_URL = cfg.ollamaUrl;
78 };
79 serviceConfig = {
80 ExecStart = "${lib.getExe nextjs-ollama-llm-ui}";
81 DynamicUser = true;
82 CacheDirectory = "nextjs-ollama-llm-ui";
83 };
84 };
85 };
86 };
87 meta.maintainers = with lib.maintainers; [ malteneuss ];
88}