2024-04-17 17:01:45 +01:00
|
|
|
{ pkgs, ... }:
|
2023-11-16 12:25:58 +00:00
|
|
|
|
|
|
|
let
|
2024-03-14 11:55:56 +00:00
|
|
|
frontendDomain = "pino.giugl.io";
|
|
|
|
backendDomain = "ollama.giugl.io";
|
2024-02-21 11:33:54 +00:00
|
|
|
frontendPort = 3030;
|
2024-03-14 11:55:56 +00:00
|
|
|
ollamaPort = 11434;
|
|
|
|
listenAddress = "127.0.0.1:${toString ollamaPort}";
|
|
|
|
ollamaPkg = pkgs.unstablePkgs.ollama;
|
2023-11-16 12:25:58 +00:00
|
|
|
in
|
|
|
|
{
|
2024-03-14 11:55:56 +00:00
|
|
|
environment = {
|
2024-05-27 13:39:06 +01:00
|
|
|
systemPackages = [ ollamaPkg ];
|
2024-03-14 11:55:56 +00:00
|
|
|
};
|
|
|
|
|
2024-05-23 23:45:48 +01:00
|
|
|
services.ollama = {
|
|
|
|
inherit listenAddress;
|
2024-03-14 11:55:56 +00:00
|
|
|
|
2024-05-23 23:45:48 +01:00
|
|
|
enable = true;
|
|
|
|
acceleration = "cuda";
|
|
|
|
package = ollamaPkg;
|
2024-06-04 22:50:49 +01:00
|
|
|
environmentVariables = {
|
2024-11-17 20:29:57 +00:00
|
|
|
# OLLAMA_ORIGINS = "10.0.0.0/24";
|
2024-06-04 22:50:49 +01:00
|
|
|
OLLAMA_FLASH_ATTENTION = "1";
|
|
|
|
OLLAMA_NUM_PARALLEL = "2";
|
|
|
|
};
|
2024-05-23 23:45:48 +01:00
|
|
|
};
|
2024-03-14 11:55:56 +00:00
|
|
|
|
2024-11-17 20:29:57 +00:00
|
|
|
# architect.vhost.${frontendDomain} = {
|
|
|
|
# dnsInterfaces = [ "tailscale" "lan" ];
|
2023-11-16 12:25:58 +00:00
|
|
|
|
2024-11-17 20:29:57 +00:00
|
|
|
# locations."/" = {
|
|
|
|
# host = "127.0.0.1";
|
|
|
|
# port = frontendPort;
|
|
|
|
# allowLan = true;
|
|
|
|
# allowWAN = true;
|
|
|
|
# extraConfig = ''
|
|
|
|
# proxy_read_timeout 600s;
|
|
|
|
# '';
|
|
|
|
# };
|
|
|
|
# };
|
2024-02-21 11:33:54 +00:00
|
|
|
|
2024-03-14 11:55:56 +00:00
|
|
|
architect.vhost.${backendDomain} = {
|
2024-10-07 13:05:45 +01:00
|
|
|
dnsInterfaces = [ "tailscale" "lan" ];
|
2024-02-21 11:33:54 +00:00
|
|
|
|
|
|
|
locations."/" = {
|
2024-03-14 11:55:56 +00:00
|
|
|
host = "127.0.0.1";
|
2024-11-17 20:29:57 +00:00
|
|
|
port = ollamaPort;
|
2024-02-21 11:33:54 +00:00
|
|
|
allowLan = true;
|
|
|
|
allowWAN = true;
|
2024-11-17 20:29:57 +00:00
|
|
|
recommendedProxySettings = false;
|
2024-02-21 11:33:54 +00:00
|
|
|
extraConfig = ''
|
2024-04-02 22:56:17 +01:00
|
|
|
proxy_buffering off;
|
2024-02-21 11:33:54 +00:00
|
|
|
proxy_read_timeout 600s;
|
2024-05-14 17:09:45 +01:00
|
|
|
proxy_set_header Host localhost:${toString ollamaPort};
|
2024-02-21 11:33:54 +00:00
|
|
|
'';
|
2023-11-16 12:25:58 +00:00
|
|
|
};
|
|
|
|
};
|
|
|
|
|
2024-11-17 20:29:57 +00:00
|
|
|
# virtualisation.oci-containers = {
|
|
|
|
# containers = {
|
|
|
|
# ollama-webui = {
|
|
|
|
# image = "ghcr.io/open-webui/open-webui:main";
|
|
|
|
# autoStart = true;
|
2023-11-16 12:25:58 +00:00
|
|
|
|
2024-11-17 20:29:57 +00:00
|
|
|
# ports = [
|
|
|
|
# "127.0.0.1:${toString frontendPort}:8080"
|
|
|
|
# ];
|
2024-02-21 11:33:54 +00:00
|
|
|
|
2024-11-17 20:29:57 +00:00
|
|
|
# environment = {
|
|
|
|
# OLLAMA_BASE_URL = "https://${backendDomain}";
|
|
|
|
# };
|
2024-02-21 11:33:54 +00:00
|
|
|
|
2024-11-17 20:29:57 +00:00
|
|
|
# extraOptions = [
|
|
|
|
# "--pull=always"
|
|
|
|
# ];
|
|
|
|
# volumes = [
|
|
|
|
# "/var/lib/ollama-webui:/app/backend/data"
|
|
|
|
# ];
|
|
|
|
# };
|
|
|
|
# };
|
|
|
|
# };
|
2023-11-16 12:25:58 +00:00
|
|
|
}
|