feat: Added open-webui

This commit is contained in:
uttarayan21
2025-02-05 18:21:20 +02:00
parent e6037d27d3
commit 9d78187a13
2 changed files with 80 additions and 65 deletions

View File

@@ -2,16 +2,18 @@
imports = [ imports = [
./atuin.nix ./atuin.nix
./authelia.nix ./authelia.nix
# ./home-assistant.nix ./jellyfin.nix
./llama.nix
./minecraft.nix
./nextcloud.nix
./tailscale.nix
./vscode.nix
# ./seafile.nix
# ./navidrome.nix # ./navidrome.nix
# ./ldap.nix
# ./home-assistant.nix
# ./llama.nix # ./llama.nix
# ./nextcloud.nix # ./nextcloud.nix
# ./seafile.nix
./minecraft.nix
./jellyfin.nix
./vscode.nix
./tailscale.nix
# ./ldap.nix
]; ];
services = { services = {
nix-serve = { nix-serve = {

View File

@@ -1,89 +1,102 @@
{config, ...}: { {
config,
pkgs,
...
}: {
sops = { sops = {
secrets."llama/api_key".owner = config.services.caddy.user; secrets."llama/api_key".owner = config.services.caddy.user;
secrets."llama/user".owner = config.services.caddy.user; secrets."llama/user".owner = config.services.caddy.user;
secrets."openai/api_key" = {};
templates = { templates = {
"LLAMA_API_KEY.env".content = '' "LLAMA_API_KEY.env".content = ''
LLAMA_API_KEY=${config.sops.placeholder."llama/api_key"} LLAMA_API_KEY=${config.sops.placeholder."llama/api_key"}
''; '';
api_key_env.owner = config.services.caddy.user; api_key_env.owner = config.services.caddy.user;
"OPENAI_API_KEY.env".content = ''
OPENAI_API_KEY="${config.sops.placeholder."openai/api_key"}"
'';
}; };
}; };
services = { services = {
ollama = { ollama = {
enable = true; enable = true;
loadModels = ["RobinBially/nomic-embed-text-8k" "mistral" "hf.co/unsloth/DeepSeek-R1-GGUF:BF16"]; loadModels = ["deepseek-r1:7b"];
port = 11434; port = 11434;
host = "0.0.0.0"; host = "0.0.0.0";
environmentVariables = { environmentVariables = {
OLLAMA_ORIGINS = "*"; OLLAMA_ORIGINS = "*";
}; };
}; };
nextjs-ollama-llm-ui = { open-webui = {
enable = false; enable = true;
port = 5096; port = 7070;
ollamaUrl = "https://llama.darksailor.dev/api/ollama"; environment = {
WEBUI_AUTH = "False";
WEBUI_URL = "https://llama.darksailor.dev";
ENABLE_LOGIN_FORM = "False";
OLLAMA_BASE_URL = "https://llama.darksailor.dev/api/ollama";
# OPENAI_BASE_URLS = "https://api.openai.com/v1;https://llama.darksailor.dev/api/v1";
OPENAI_BASE_URLS = "https://api.openai.com/v1";
};
environmentFile = "${config.sops.templates."OPENAI_API_KEY.env".path}";
}; };
# llama-cpp = { # llama-cpp = let
# enable = false; # deepseek_r1 = map (part: "https://huggingface.co/unsloth/DeepSeek-R1-GGUF/resolve/main/DeepSeek-R1-UD-IQ1_M/DeepSeek-R1-UD-IQ1_M-0000${toString part}-of-00004.gguf?download=true") [1 2 3 4];
# in {
# enable = true;
# host = "127.0.0.1"; # host = "127.0.0.1";
# port = 3000; # port = 3000;
# model = builtins.fetchurl { # # model = builtins.fetchurl {
# name = "qwen_2.5.1_coder_7b_instruct_gguf"; # # name = "qwen_2.5.1_coder_7b_instruct_gguf";
# sha256 = "61834b88c1a1ce5c277028a98c4a0c94a564210290992a7ba301bbef96ef8eba"; # # sha256 = "61834b88c1a1ce5c277028a98c4a0c94a564210290992a7ba301bbef96ef8eba";
# url = "https://huggingface.co/bartowski/Qwen2.5.1-Coder-7B-Instruct-GGUF/resolve/main/Qwen2.5.1-Coder-7B-Instruct-Q8_0.gguf?download=true"; # # url = "https://huggingface.co/bartowski/Qwen2.5.1-Coder-7B-Instruct-GGUF/resolve/main/Qwen2.5.1-Coder-7B-Instruct-Q8_0.gguf?download=true";
# }; # # };
# model = deepseek_r1;
# }; # };
# nginx.virtualHosts."${config.services.nextcloud.hostName}".listen = [
# {
# addr = "127.0.0.1";
# port = 8080; # NOT an exposed port
# }
# ];
caddy = { caddy = {
# handle /api/ollama/* {
# uri strip_prefix /api/ollama
# reverse_proxy localhost:11434
#
# @apikey {
# header Authorization "Bearer {env.LLAMA_API_KEY}"
# }
#
# handle @apikey {
# header {
# # Set response headers or proxy to a different service if API key is valid
# Access-Control-Allow-Origin *
# -Authorization "Bearer {env.LLAMA_API_KEY}" # Remove the header after validation
# }
# reverse_proxy localhost:11434
# }
#
# handle {
# respond "Unauthorized" 403
# }
# }
virtualHosts."llama.darksailor.dev".extraConfig = '' virtualHosts."llama.darksailor.dev".extraConfig = ''
handle /api/v1/* { handle /api/v1/* {
uri strip_prefix /api/v1 uri strip_prefix /api/v1
reverse_proxy localhost:3000 reverse_proxy localhost:3000
@apikey { @apikey {
header Authorization "Bearer {env.LLAMA_API_KEY}" header Authorization "Bearer {env.LLAMA_API_KEY}"
} }
handle @apikey { handle @apikey {
header { header {
# Set response headers or proxy to a different service if API key is valid # Set response headers or proxy to a different service if API key is valid
Access-Control-Allow-Origin * Access-Control-Allow-Origin *
-Authorization "Bearer {env.LLAMA_API_KEY}" # Remove the header after validation -Authorization "Bearer {env.LLAMA_API_KEY}" # Remove the header after validation
} }
reverse_proxy localhost:11434 reverse_proxy localhost:11434
} }
handle { handle {
respond "Unauthorized" 403 respond "Unauthorized" 403
} }
}
handle /api/ollama/* {
uri strip_prefix /api/ollama
reverse_proxy localhost:11434
@apikey {
header Authorization "Bearer {env.LLAMA_API_KEY}"
}
handle @apikey {
header {
# Set response headers or proxy to a different service if API key is valid
Access-Control-Allow-Origin *
-Authorization "Bearer {env.LLAMA_API_KEY}" # Remove the header after validation
}
reverse_proxy localhost:11434
}
handle {
respond "Unauthorized" 403
}
} }
handle { handle {
@@ -91,7 +104,7 @@
uri /api/authz/forward-auth uri /api/authz/forward-auth
copy_headers Remote-User Remote-Groups Remote-Email Remote-Name copy_headers Remote-User Remote-Groups Remote-Email Remote-Name
} }
reverse_proxy localhost:5096 reverse_proxy localhost:7070
} }
''; '';
}; };