From 53ed61892c3af0e99de65fa4603cbce81db40b44 Mon Sep 17 00:00:00 2001 From: Aleksandr Lebedev Date: Fri, 17 Oct 2025 16:17:40 +0200 Subject: [PATCH] Ollama on stargate --- modules/nixos/services/ai/default.nix | 11 ++++++---- .../x86_64-linux/stargate/services/nginx.nix | 8 ++++++++ .../x86_64-linux/stargate/services/ollama.nix | 20 +++++++++++++++++++ 3 files changed, 35 insertions(+), 4 deletions(-) create mode 100644 systems/x86_64-linux/stargate/services/ollama.nix diff --git a/modules/nixos/services/ai/default.nix b/modules/nixos/services/ai/default.nix index 4f31a0a..5e7cfca 100644 --- a/modules/nixos/services/ai/default.nix +++ b/modules/nixos/services/ai/default.nix @@ -31,6 +31,11 @@ in { Search for models of your choice from: ''; }; + home = mkOpt path ( + if impermanence.enable + then "${persist}/ollama" + else "/var/lib/ollama" + ) "Path to data folder"; ui.enable = mkBoolOpt true "Enable openwebui at localhost:8080"; ui.port = mkOption { type = types.port; @@ -49,10 +54,8 @@ in { if nvidia.enable then "cuda" else null; - home = - if impermanence.enable - then "${persist}/ollama" - else "/var/lib/ollama"; + home = cfg.home; + user = "ollama"; group = "ollama"; }; diff --git a/systems/x86_64-linux/stargate/services/nginx.nix b/systems/x86_64-linux/stargate/services/nginx.nix index bc38324..cff3a4d 100644 --- a/systems/x86_64-linux/stargate/services/nginx.nix +++ b/systems/x86_64-linux/stargate/services/nginx.nix @@ -138,6 +138,14 @@ in { proxyWebsockets = true; }; }; + "chat.kylekrein.com" = { + enableACME = true; + forceSSL = true; + locations."/" = { + proxyPass = "http://${config.services.open-webui.host}:${builtins.toString config.services.open-webui.port}"; + proxyWebsockets = true; + }; + }; "smart-home.kylekrein.com" = { forceSSL = true; enableACME = true; diff --git a/systems/x86_64-linux/stargate/services/ollama.nix b/systems/x86_64-linux/stargate/services/ollama.nix new file mode 100644 index 0000000..564e82e --- /dev/null +++ b/systems/x86_64-linux/stargate/services/ollama.nix @@ -0,0 +1,20 @@ +{ + pkgs, + lib, + config, + ... +}: { + services.ollama = { + enable = true; + loadModels = ["qwq" "llama3.1" "qwen2.5-coder:7b" "gpt-oss:20b"]; + acceleration = null; + user = "ollama"; + group = "ollama"; + }; + + services.open-webui = { + enable = true; + openFirewall = false; + port = 7009; + }; +}