Cuda + ollama on homepc

This commit is contained in:
Aleksandr Lebedev 2025-02-08 13:47:14 +01:00
parent 54c1a40ca7
commit 247379dce8
6 changed files with 72 additions and 25 deletions

View file

@ -24,6 +24,49 @@
#inputs.nix-gaming.packages.${pkgs.system}.star-citizen
];
#LLMs
services.ollama = {
enable = true;
loadModels = [ "deepseek-r1:32b" ];
acceleration = "cuda";
home = "/persist/ollama";
user = "ollama";
group = "ollama";
};
services.open-webui.enable = true;
services.open-webui.openFirewall = true;
services.open-webui.host = "0.0.0.0";
services.open-webui.stateDir = "/persist/open-webui";
systemd.services.open-webui.serviceConfig.User = "ollama";
systemd.services.open-webui.serviceConfig.Group = "ollama";
systemd.services.open-webui.serviceConfig.DynamicUser = lib.mkForce false;
#Chat host
networking.firewall.allowedTCPPorts = [ 80 443 ];
security.acme = {
acceptTerms = true;
defaults.email = "alex.lebedev2003@icloud.com";
};
services.nginx.enable = true;
services.nginx = {
# Use recommended settings
recommendedGzipSettings = true;
recommendedOptimisation = true;
recommendedProxySettings = true;
recommendedTlsSettings = true;
};
services.nginx.virtualHosts = let
SSL = {
enableACME = true;
forceSSL = true;
}; in {
"chat.kylekrein.com" = (SSL // {
locations."/" = {
proxyPass = "http://127.0.0.1:8080/";
proxyWebsockets = true;
};
});
};
systemd.network.wait-online.enable = lib.mkForce false;
}