refactor(architect/llm.nix): comment out frontend vhost and container configurations
- Commented out the `frontendDomain` vhost configuration - Updated backend port to use `ollamaPort` - Added `recommendedProxySettings = false;` for backend vhost - Commented out the entire OCI containers section for `ollama-webui`
This commit is contained in:
parent
2602da324e
commit
48f370d9a4
@ -20,34 +20,35 @@ in
|
||||
acceleration = "cuda";
|
||||
package = ollamaPkg;
|
||||
environmentVariables = {
|
||||
OLLAMA_ORIGINS = "*";
|
||||
# OLLAMA_ORIGINS = "10.0.0.0/24";
|
||||
OLLAMA_FLASH_ATTENTION = "1";
|
||||
OLLAMA_NUM_PARALLEL = "2";
|
||||
};
|
||||
};
|
||||
|
||||
architect.vhost.${frontendDomain} = {
|
||||
dnsInterfaces = [ "tailscale" "lan" ];
|
||||
# architect.vhost.${frontendDomain} = {
|
||||
# dnsInterfaces = [ "tailscale" "lan" ];
|
||||
|
||||
locations."/" = {
|
||||
host = "127.0.0.1";
|
||||
port = frontendPort;
|
||||
allowLan = true;
|
||||
allowWAN = true;
|
||||
extraConfig = ''
|
||||
proxy_read_timeout 600s;
|
||||
'';
|
||||
};
|
||||
};
|
||||
# locations."/" = {
|
||||
# host = "127.0.0.1";
|
||||
# port = frontendPort;
|
||||
# allowLan = true;
|
||||
# allowWAN = true;
|
||||
# extraConfig = ''
|
||||
# proxy_read_timeout 600s;
|
||||
# '';
|
||||
# };
|
||||
# };
|
||||
|
||||
architect.vhost.${backendDomain} = {
|
||||
dnsInterfaces = [ "tailscale" "lan" ];
|
||||
|
||||
locations."/" = {
|
||||
host = "127.0.0.1";
|
||||
port = 11434;
|
||||
port = ollamaPort;
|
||||
allowLan = true;
|
||||
allowWAN = true;
|
||||
recommendedProxySettings = false;
|
||||
extraConfig = ''
|
||||
proxy_buffering off;
|
||||
proxy_read_timeout 600s;
|
||||
@ -56,27 +57,27 @@ in
|
||||
};
|
||||
};
|
||||
|
||||
virtualisation.oci-containers = {
|
||||
containers = {
|
||||
ollama-webui = {
|
||||
image = "ghcr.io/open-webui/open-webui:main";
|
||||
autoStart = true;
|
||||
# virtualisation.oci-containers = {
|
||||
# containers = {
|
||||
# ollama-webui = {
|
||||
# image = "ghcr.io/open-webui/open-webui:main";
|
||||
# autoStart = true;
|
||||
|
||||
ports = [
|
||||
"127.0.0.1:${toString frontendPort}:8080"
|
||||
];
|
||||
# ports = [
|
||||
# "127.0.0.1:${toString frontendPort}:8080"
|
||||
# ];
|
||||
|
||||
environment = {
|
||||
OLLAMA_BASE_URL = "https://${backendDomain}";
|
||||
};
|
||||
# environment = {
|
||||
# OLLAMA_BASE_URL = "https://${backendDomain}";
|
||||
# };
|
||||
|
||||
extraOptions = [
|
||||
"--pull=always"
|
||||
];
|
||||
volumes = [
|
||||
"/var/lib/ollama-webui:/app/backend/data"
|
||||
];
|
||||
};
|
||||
};
|
||||
};
|
||||
# extraOptions = [
|
||||
# "--pull=always"
|
||||
# ];
|
||||
# volumes = [
|
||||
# "/var/lib/ollama-webui:/app/backend/data"
|
||||
# ];
|
||||
# };
|
||||
# };
|
||||
# };
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user