nixos/private-gpt: drop

Changed files
+8 -155
doc
release-notes
nixos
+2
doc/release-notes/rl-2511.section.md
···
- NixOS display manager modules now strictly use tty1, where many of them previously used tty7. Options to configure display managers' VT have been dropped. A configuration with a display manager enabled will not start `getty@tty1.service`, even if the system is forced to boot into `multi-user.target` instead of `graphical.target`.
+
- [private-gpt](https://github.com/zylon-ai/private-gpt) service has been removed by lack of maintenance upstream.
+
## Other Notable Changes {#sec-nixpkgs-release-25.11-notable-changes}
<!-- To avoid merge conflicts, consider adding your item at an arbitrary place in the list instead. -->
+1 -1
nixos/doc/manual/release-notes/rl-2405.section.md
···
- [pretalx](https://github.com/pretalx/pretalx), a conference planning tool. Available as [services.pretalx](#opt-services.pretalx.enable).
-
- [private-gpt](https://github.com/zylon-ai/private-gpt), a service to interact with your documents using the power of LLMs, 100% privately, no data leaks. Available as [services.private-gpt](#opt-services.private-gpt.enable).
+
- [private-gpt](https://github.com/zylon-ai/private-gpt), a service to interact with your documents using the power of LLMs, 100% privately, no data leaks.
- [Prometheus DNSSEC Exporter](https://github.com/chrj/prometheus-dnssec-exporter): check for validity and expiration in DNSSEC signatures and expose metrics for Prometheus. Available as [services.prometheus.exporters.dnssec](#opt-services.prometheus.exporters.dnssec.enable).
+2
nixos/doc/manual/release-notes/rl-2511.section.md
···
- The `wstunnel` module was converted to RFC42-style settings, you will need to update your NixOS config if you make use of this module.
+
- [private-gpt](https://github.com/zylon-ai/private-gpt) service has been removed by lack of maintenance upstream.
+
## Other Notable Changes {#sec-release-25.11-notable-changes}
<!-- To avoid merge conflicts, consider adding your item at an arbitrary place in the list instead. -->
-1
nixos/modules/module-list.nix
···
./services/misc/polaris.nix
./services/misc/portunus.nix
./services/misc/preload.nix
-
./services/misc/private-gpt.nix
./services/misc/pufferpanel.nix
./services/misc/pykms.nix
./services/misc/radicle.nix
+3 -1
nixos/modules/rename.nix
···
The signald project is unmaintained and has long been incompatible with the
official Signal servers.
'')
-
+
(mkRemovedOptionModule [ "services" "private-gpt" ] ''
+
The private-gpt package and the corresponding module have been removed due to being broken and unmaintained.
+
'')
# Do NOT add any option renames here, see top of the file
];
}
-122
nixos/modules/services/misc/private-gpt.nix
···
-
{
-
config,
-
lib,
-
pkgs,
-
...
-
}:
-
let
-
inherit (lib) types;
-
-
format = pkgs.formats.yaml { };
-
cfg = config.services.private-gpt;
-
in
-
{
-
options = {
-
services.private-gpt = {
-
enable = lib.mkEnableOption "private-gpt for local large language models";
-
package = lib.mkPackageOption pkgs "private-gpt" { };
-
-
stateDir = lib.mkOption {
-
type = types.path;
-
default = "/var/lib/private-gpt";
-
description = "State directory of private-gpt.";
-
};
-
-
settings = lib.mkOption {
-
type = format.type;
-
default = {
-
llm = {
-
mode = "ollama";
-
tokenizer = "";
-
};
-
embedding = {
-
mode = "ollama";
-
};
-
ollama = {
-
llm_model = "llama3";
-
embedding_model = "nomic-embed-text";
-
api_base = "http://localhost:11434";
-
embedding_api_base = "http://localhost:11434";
-
keep_alive = "5m";
-
tfs_z = 1;
-
top_k = 40;
-
top_p = 0.9;
-
repeat_last_n = 64;
-
repeat_penalty = 1.2;
-
request_timeout = 120;
-
};
-
vectorstore = {
-
database = "qdrant";
-
};
-
qdrant = {
-
path = "/var/lib/private-gpt/vectorstore/qdrant";
-
};
-
data = {
-
local_data_folder = "/var/lib/private-gpt";
-
};
-
openai = { };
-
azopenai = { };
-
};
-
description = ''
-
settings-local.yaml for private-gpt
-
'';
-
};
-
};
-
};
-
-
config = lib.mkIf cfg.enable {
-
systemd.services.private-gpt = {
-
description = "Interact with your documents using the power of GPT, 100% privately, no data leaks";
-
wantedBy = [ "multi-user.target" ];
-
after = [ "network.target" ];
-
-
preStart =
-
let
-
config = format.generate "settings-local.yaml" (cfg.settings // { server.env_name = "local"; });
-
in
-
''
-
mkdir -p ${cfg.stateDir}/{settings,huggingface,matplotlib,tiktoken_cache}
-
cp ${cfg.package.cl100k_base.tiktoken} ${cfg.stateDir}/tiktoken_cache/9b5ad71b2ce5302211f9c61530b329a4922fc6a4
-
cp ${pkgs.python3Packages.private-gpt}/${pkgs.python3.sitePackages}/private_gpt/settings.yaml ${cfg.stateDir}/settings/settings.yaml
-
cp "${config}" "${cfg.stateDir}/settings/settings-local.yaml"
-
chmod 600 "${cfg.stateDir}/settings/settings-local.yaml"
-
'';
-
-
environment = {
-
PGPT_PROFILES = "local";
-
PGPT_SETTINGS_FOLDER = "${cfg.stateDir}/settings";
-
HF_HOME = "${cfg.stateDir}/huggingface";
-
TRANSFORMERS_OFFLINE = "1";
-
HF_DATASETS_OFFLINE = "1";
-
MPLCONFIGDIR = "${cfg.stateDir}/matplotlib";
-
};
-
-
serviceConfig = {
-
ExecStart = lib.getExe cfg.package;
-
WorkingDirectory = cfg.stateDir;
-
StateDirectory = "private-gpt";
-
RuntimeDirectory = "private-gpt";
-
RuntimeDirectoryMode = "0755";
-
PrivateTmp = true;
-
DynamicUser = true;
-
DevicePolicy = "closed";
-
LockPersonality = true;
-
MemoryDenyWriteExecute = true;
-
PrivateUsers = true;
-
ProtectHome = true;
-
ProtectHostname = true;
-
ProtectKernelLogs = true;
-
ProtectKernelModules = true;
-
ProtectKernelTunables = true;
-
ProtectControlGroups = true;
-
ProcSubset = "pid";
-
RestrictNamespaces = true;
-
RestrictRealtime = true;
-
SystemCallArchitectures = "native";
-
UMask = "0077";
-
};
-
};
-
};
-
-
meta.maintainers = [ ];
-
}
-1
nixos/tests/all-tests.nix
···
_module.args.socket = false;
_module.args.listenTcp = false;
};
-
private-gpt = runTest ./private-gpt.nix;
privatebin = runTest ./privatebin.nix;
privoxy = runTest ./privoxy.nix;
prometheus = import ./prometheus { inherit runTest; };
-29
nixos/tests/private-gpt.nix
···
-
{ pkgs, lib, ... }:
-
let
-
mainPort = "8001";
-
in
-
{
-
name = "private-gpt";
-
meta = with lib.maintainers; {
-
maintainers = [ ];
-
};
-
-
nodes = {
-
machine =
-
{ ... }:
-
{
-
services.private-gpt = {
-
enable = true;
-
};
-
};
-
};
-
-
testScript = ''
-
machine.start()
-
-
machine.wait_for_unit("private-gpt.service")
-
machine.wait_for_open_port(${mainPort})
-
-
machine.succeed("curl http://127.0.0.1:${mainPort}")
-
'';
-
}