refactor(architect/llm.nix): update configuration to use config for host and port

This commit is contained in:
Giulio De Pasquale 2025-03-09 14:31:50 +00:00
parent 4285da55bf
commit d7a3a3bcf3

View File

@ -1,9 +1,8 @@
{ pkgs, ... }: { config, pkgs, ... }:
let let
backendDomain = "ollama.giugl.io"; backendDomain = "ollama.giugl.io";
ollamaHost = "127.0.0.1"; frontendDomain = "llm.giugl.io";
ollamaPort = 11434;
ollamaPkg = pkgs.unstablePkgs.ollama-cuda; ollamaPkg = pkgs.unstablePkgs.ollama-cuda;
in in
{ {
@ -15,8 +14,6 @@ in
enable = true; enable = true;
package = ollamaPkg; package = ollamaPkg;
host = ollamaHost;
port = ollamaPort;
acceleration = "cuda"; acceleration = "cuda";
environmentVariables = { environmentVariables = {
OLLAMA_FLASH_ATTENTION = "1"; OLLAMA_FLASH_ATTENTION = "1";
@ -29,15 +26,15 @@ in
dnsInterfaces = [ "tailscale" "lan" ]; dnsInterfaces = [ "tailscale" "lan" ];
locations."/" = { locations."/" = {
host = ollamaHost; host = config.services.ollama.host;
port = ollamaPort; port = config.services.ollama.port;
allowLan = true; allowLan = true;
allowWAN = true; allowWAN = true;
recommendedProxySettings = false; recommendedProxySettings = false;
extraConfig = '' extraConfig = ''
proxy_buffering off; proxy_buffering off;
proxy_read_timeout 600s; proxy_read_timeout 600s;
proxy_set_header Host localhost:${toString ollamaPort}; proxy_set_header Host localhost:${toString config.services.ollama.host};
''; '';
}; };
}; };