feat: Ollama

This commit is contained in:
uttarayan21
2025-07-29 23:11:04 +05:30
parent 3324ae280d
commit c655cb26f1
7 changed files with 109 additions and 12 deletions

View File

@@ -0,0 +1,38 @@
{
config,
pkgs,
...
}: {
sops = {
secrets."hetzner/api_key".owner = config.services.caddy.user;
templates = {
"HETZNER_API_KEY.env".content = ''
HETZNER_API_KEY=${config.sops.placeholder."hetzner/api_key"}
'';
};
};
services = {
caddy = {
enable = true;
extraConfig = ''
(hetzner) {
tls {
propagation_timeout -1
propagation_delay 120s
dns hetzner {env.HETZNER_API_KEY}
resolvers 1.1.1.1
}
}
'';
package = pkgs.caddy.withPlugins {
plugins = ["github.com/caddy-dns/hetzner@v1.0.0"];
hash = "sha256-9ea0CfOHG7JhejB73HjfXQpnonn+ZRBqLNz1fFRkcDQ=";
};
};
};
systemd.services.caddy = {
serviceConfig = {
EnvironmentFile = config.sops.templates."HETZNER_API_KEY.env".path;
};
};
}

View File

@@ -1,10 +1,10 @@
{...}: {
imports = [
# ./ollama.nix
# ./rsyncd.nix
# ./sunshine.nix
# ./zerotier.nix
# ./dnscrypt.nix
./ollama.nix
./tailscale.nix
./samba.nix
./mullvad.nix
@@ -14,7 +14,4 @@
./minecraft.nix
./fwupd.nix
];
services = {
# hardware.openrgb.enable = true;
};
}

View File

@@ -1,19 +1,55 @@
{pkgs, ...}: {
{
pkgs,
lib,
config,
...
}: {
services = {
ollama = {
enable = false;
enable = true;
host = "127.0.0.1";
loadModels = ["deepseek-r1:7b" "deepseek-r1:14b"];
port = 11434;
acceleration = "cuda";
environmentVariables = {
OLLAMA_LLM_LIBRARY = "cuda";
LD_LIBRARY_PATH = "run/opengl-driver/lib";
};
};
open-webui = {
enable = false;
enable = true;
environment = {
OLLAMA_BASE_URL = "http://127.0.0.1:11434";
WEBUI_AUTH = "False";
ENABLE_LOGIN_FORM = "False";
};
};
caddy = {
virtualHosts."llama.ryu.darksailor.dev".extraConfig = ''
import hetzner
forward_auth mirai:5555 {
uri /api/authz/forward-auth
copy_headers Remote-User Remote-Groups Remote-Email Remote-Name
}
reverse_proxy localhost:${builtins.toString config.services.open-webui.port}
'';
virtualHosts."ollama.ryu.darksailor.dev".extraConfig = ''
import hetzner
@apikey {
header Authorization "Bearer {env.LLAMA_API_KEY}"
}
handle @apikey {
header {
# Set response headers or proxy to a different service if API key is valid
Access-Control-Allow-Origin *
-Authorization "Bearer {env.LLAMA_API_KEY}" # Remove the header after validation
}
reverse_proxy localhost:${builtins.toString config.services.ollama.port}
}
respond "Unauthorized" 403
'';
};
};
}