architect: use docker ollama

This commit is contained in:
Giulio De Pasquale 2024-02-21 11:33:54 +00:00
parent 31a41642bb
commit 9b21c7d2ef

View File

@ -2,13 +2,13 @@
let let
domain = "pino.giugl.io"; domain = "pino.giugl.io";
backendPort = 3000; backendPort = 8080;
frontendPort = 3002; frontendPort = 3030;
llama-cpp = pkgs.unstablePkgs.llama-cpp.override { cudaSupport = true; }; # llama-cpp = pkgs.unstablePkgs.llama-cpp.override { cudaSupport = true; };
ollama = pkgs.unstablePkgs.ollama.override { inherit llama-cpp; }; # ollama = pkgs.unstablePkgs.ollama.override { inherit llama-cpp; };
in in
{ {
environment.systemPackages = [ ollama ]; # environment.systemPackages = [ ollama ];
architect.vhost.${domain} = { architect.vhost.${domain} = {
dnsInterfaces = [ "tailscale" ]; dnsInterfaces = [ "tailscale" ];
@ -16,25 +16,69 @@ in
host = "172.17.0.1"; host = "172.17.0.1";
port = frontendPort; port = frontendPort;
allowLan = true; allowLan = true;
allow = [ config.architect.networks."tailscale".net ]; allowWAN = true;
# allow = [ config.architect.networks."tailscale".net ];
extraConfig = ''
proxy_read_timeout 600s;
'';
};
};
architect.vhost."ollama.giugl.io" = {
dnsInterfaces = [ "tailscale" ];
locations."/" = {
host = "172.17.0.1";
port = 11434;
allowLan = true;
allowWAN = true;
# allow = [ config.architect.networks."tailscale".net ];
extraConfig = ''
proxy_read_timeout 600s;
'';
}; };
}; };
virtualisation.oci-containers = { virtualisation.oci-containers = {
containers = { containers = {
big-agi = { ollama-webui = {
image = "ghcr.io/enricoros/big-agi:latest"; image = "ghcr.io/open-webui/open-webui:main";
autoStart = true; autoStart = true;
ports = [ ports = [
"172.17.0.1:${toString frontendPort}:${toString backendPort}" "172.17.0.1:${toString frontendPort}:${toString backendPort}"
]; ];
environmentFiles = [
"/var/lib/llm/big-agi.env" environment = {
]; PORT = "${toString backendPort}";
OLLAMA_API_BASE_URL = "http://172.17.0.1:11434/api";
};
extraOptions = [ extraOptions = [
"--pull=always" "--pull=always"
]; ];
volumes = [
"/var/lib/ollama-webui:/app/backend/data"
];
};
ollama = {
image = "ollama/ollama:latest";
autoStart = true;
extraOptions = [
"--pull=always"
"--gpus=all"
];
environment = {
OLLAMA_ORIGINS = "*";
};
volumes = [
"/ollama:/root/.ollama"
];
ports = [
"127.0.0.1:11434:11434"
"172.17.0.1:11434:11434"
];
}; };
}; };
}; };