refactor(architect/llm.nix): comment out frontend vhost and container configurations

- Commented out the `frontendDomain` vhost configuration
- Updated backend port to use `ollamaPort`
- Added `recommendedProxySettings = false;` for backend vhost
- Commented out the entire OCI containers section for `ollama-webui`
This commit is contained in:
Giulio De Pasquale 2024-11-17 20:29:57 +00:00
parent 2602da324e
commit 48f370d9a4

View File

@ -20,34 +20,35 @@ in
acceleration = "cuda"; acceleration = "cuda";
package = ollamaPkg; package = ollamaPkg;
environmentVariables = { environmentVariables = {
OLLAMA_ORIGINS = "*"; # OLLAMA_ORIGINS = "10.0.0.0/24";
OLLAMA_FLASH_ATTENTION = "1"; OLLAMA_FLASH_ATTENTION = "1";
OLLAMA_NUM_PARALLEL = "2"; OLLAMA_NUM_PARALLEL = "2";
}; };
}; };
architect.vhost.${frontendDomain} = { # architect.vhost.${frontendDomain} = {
dnsInterfaces = [ "tailscale" "lan" ]; # dnsInterfaces = [ "tailscale" "lan" ];
locations."/" = { # locations."/" = {
host = "127.0.0.1"; # host = "127.0.0.1";
port = frontendPort; # port = frontendPort;
allowLan = true; # allowLan = true;
allowWAN = true; # allowWAN = true;
extraConfig = '' # extraConfig = ''
proxy_read_timeout 600s; # proxy_read_timeout 600s;
''; # '';
}; # };
}; # };
architect.vhost.${backendDomain} = { architect.vhost.${backendDomain} = {
dnsInterfaces = [ "tailscale" "lan" ]; dnsInterfaces = [ "tailscale" "lan" ];
locations."/" = { locations."/" = {
host = "127.0.0.1"; host = "127.0.0.1";
port = 11434; port = ollamaPort;
allowLan = true; allowLan = true;
allowWAN = true; allowWAN = true;
recommendedProxySettings = false;
extraConfig = '' extraConfig = ''
proxy_buffering off; proxy_buffering off;
proxy_read_timeout 600s; proxy_read_timeout 600s;
@ -56,27 +57,27 @@ in
}; };
}; };
virtualisation.oci-containers = { # virtualisation.oci-containers = {
containers = { # containers = {
ollama-webui = { # ollama-webui = {
image = "ghcr.io/open-webui/open-webui:main"; # image = "ghcr.io/open-webui/open-webui:main";
autoStart = true; # autoStart = true;
ports = [ # ports = [
"127.0.0.1:${toString frontendPort}:8080" # "127.0.0.1:${toString frontendPort}:8080"
]; # ];
environment = { # environment = {
OLLAMA_BASE_URL = "https://${backendDomain}"; # OLLAMA_BASE_URL = "https://${backendDomain}";
}; # };
extraOptions = [ # extraOptions = [
"--pull=always" # "--pull=always"
]; # ];
volumes = [ # volumes = [
"/var/lib/ollama-webui:/app/backend/data" # "/var/lib/ollama-webui:/app/backend/data"
]; # ];
}; # };
}; # };
}; # };
} }