llm: re-use ollama upstream

This commit is contained in:
Giulio De Pasquale 2024-05-23 23:45:48 +01:00
parent 854f818f23
commit 3e2e530d49

View File

@ -11,18 +11,19 @@ in
{
environment = {
systemPackages = [ ollamaPkg pkgs.aichat ];
# variables = {
# OLLAMA_ORIGINS = "*";
# };
variables = {
OLLAMA_ORIGINS = "*";
OLLAMA_FLASH_ATTENTION="1";
};
};
# services.ollama = {
# inherit listenAddress;
services.ollama = {
inherit listenAddress;
# enable = true;
# acceleration = "cuda";
# package = ollamaPkg;
# };
enable = true;
acceleration = "cuda";
package = ollamaPkg;
};
architect.vhost.${frontendDomain} = {
dnsInterfaces = [ "tailscale" ];
@ -56,24 +57,24 @@ in
virtualisation.oci-containers = {
containers = {
ollama = {
image = "ollama/ollama:latest";
autoStart = true;
extraOptions = [
"--pull=always"
"--gpus=all"
];
environment = {
OLLAMA_ORIGINS = "*";
};
volumes = [
"/ollama:/root/.ollama"
];
ports = [
"${listenAddress}:${toString ollamaPort}"
"172.17.0.1:${toString ollamaPort}:${toString ollamaPort}"
];
};
# ollama = {
# image = "ollama/ollama:latest";
# autoStart = true;
# extraOptions = [
# "--pull=always"
# "--gpus=all"
# ];
# environment = {
# OLLAMA_ORIGINS = "*";
# };
# volumes = [
# "/ollama:/root/.ollama"
# ];
# ports = [
# "${listenAddress}:${toString ollamaPort}"
# "172.17.0.1:${toString ollamaPort}:${toString ollamaPort}"
# ];
# };
ollama-webui = {
image = "ghcr.io/open-webui/open-webui:main";