feat: Added wireshark and refactor ollama modules
Some checks failed
Flake checker / Build Nix targets (push) Has been cancelled

This commit is contained in:
2026-01-13 14:11:37 +05:30
parent 124e3fedfd
commit 74d5ec1426
10 changed files with 107 additions and 200 deletions

View File

@@ -10,5 +10,6 @@
./nix-ld.nix
./gamemode.nix
./droidcam.nix
./wireshark.nix
];
}

View File

@@ -0,0 +1,14 @@
{
pkgs,
device,
...
}: {
programs.wireshark = {
enable = true;
dumpcap.enable = true;
};
environment.systemPackages = with pkgs; [
wireshark-qt
];
users.users.${device.user}.extraGroups = ["wireshark"];
}

View File

@@ -4,7 +4,6 @@
# ./sunshine.nix
# ./zerotier.nix
# ./dnscrypt.nix
./llama.nix
./ollama.nix
./tailscale.nix
./samba.nix

View File

@@ -1,35 +0,0 @@
{
pkgs,
config,
inputs,
...
}: {
# llama-cpp = {
# enable = false;
# port = 11345;
# # model = "/nix/store/ch6z9di3l0k54ad29pzv8k3zv47q30d1-Qwen3-Coder-30B-A3B-Instruct-Q4_K_M.gguf";
# model = pkgs.fetchurl {
# # url = "https://huggingface.co/lmstudio-community/gpt-oss-20b-GGUF/resolve/main/gpt-oss-20b-MXFP4.gguf";
# # sha256 = "65d06d31a3977d553cb3af137b5c26b5f1e9297a6aaa29ae7caa98788cde53ab";
# url = "https://huggingface.co/lmstudio-community/Qwen3-Coder-30B-A3B-Instruct-GGUF/resolve/main/Qwen3-Coder-30B-A3B-Instruct-Q4_K_M.gguf";
# sha256 = "79ad15a5ee3caddc3f4ff0db33a14454a5a3eb503d7fa1c1e35feafc579de486";
# };
# extraFlags = [
# "-c"
# "98304"
# "--jinja"
# "--chat-template-file"
# "${../../../assets/chat.hbs}"
# # "/nix/store/4zk1p50hrzghp3jzzysz96pa64i2kmjl-promp.hbs"
# ];
# # package = inputs.llama-cpp.packages.${pkgs.system}.cuda;
# };
services = {
caddy = {
virtualHosts."llama.ryu.darksailor.dev".extraConfig = ''
import cloudflare
reverse_proxy localhost:11345
'';
};
};
}

View File

@@ -4,43 +4,53 @@
config,
...
}: {
sops = {
secrets."openai/api_key" = {};
secrets."llama/api_key".owner = config.services.caddy.user;
templates = {
"LLAMA_API_KEY.env".content = ''
LLAMA_API_KEY=${config.sops.placeholder."llama/api_key"}
'';
};
};
services = {
ollama = {
enable = true;
host = "0.0.0.0";
# loadModels = ["deepseek-r1:7b" "deepseek-r1:14b" "RobinBially/nomic-embed-text-8k" "qwen3:8b" "qwen3:14b"];
# loadModels = ["deepseek-r1:7b" "deepseek-r1:14b" "RobinBially/nomic-embed-text-8k" "qwen3:8b" "qwen3:14b"];
port = 11434;
# acceleration = "cuda";
environmentVariables = {
OLLAMA_ORIGINS = "*";
OLLAMA_LLM_LIBRARY = "cuda";
LD_LIBRARY_PATH = "run/opengl-driver/lib";
HTTP_PROXY = "https://ollama.ryu.darksailor.dev";
HTTP_PROXY = "https://ollama.darksailor.dev";
};
package = pkgs.ollama-cuda;
};
# open-webui = {
# enable = false;
# environment = {
# OLLAMA_BASE_URL = "http://127.0.0.1:11434";
# WEBUI_AUTH = "False";
# ENABLE_LOGIN_FORM = "False";
# };
# };
caddy = {
# virtualHosts."llama.ryu.darksailor.dev".extraConfig = ''
# import cloudflare
# forward_auth tako:5555 {
# uri /api/authz/forward-auth
# copy_headers Remote-User Remote-Groups Remote-Email Remote-Name
# }
# reverse_proxy localhost:${builtins.toString config.services.open-webui.port}
# '';
virtualHosts."ollama.ryu.darksailor.dev".extraConfig = ''
virtualHosts."ollama.darksailor.dev".extraConfig = ''
import cloudflare
reverse_proxy localhost:${builtins.toString config.services.ollama.port}
@apikey {
header Authorization "Bearer {env.LLAMA_API_KEY}"
}
handle @apikey {
header {
# Set response headers or proxy to a different service if API key is valid
Access-Control-Allow-Origin *
-Authorization "Bearer {env.LLAMA_API_KEY}" # Remove the header after validation
}
reverse_proxy localhost:${builtins.toString config.services.ollama.port}
}
respond "Unauthorized" 403
'';
};
};
systemd.services.caddy = {
serviceConfig = {
EnvironmentFile = config.sops.templates."LLAMA_API_KEY.env".path;
};
};
}

View File

@@ -2,7 +2,7 @@
imports = [
./games
# ./headscale.nix
# ./llama.nix
./llms.nix
# ./monitoring.nix
# ./paperless.nix
./navidrome.nix

View File

@@ -1,103 +0,0 @@
{
config,
pkgs,
inputs,
...
}: {
sops = {
secrets."llama/api_key".owner = config.services.caddy.user;
secrets."llama/user".owner = config.services.caddy.user;
secrets."openai/api_key" = {};
templates = {
"LLAMA_API_KEY.env".content = ''
LLAMA_API_KEY=${config.sops.placeholder."llama/api_key"}
'';
api_key_env.owner = config.services.caddy.user;
"OPENAI_API_KEY.env".content = ''
OPENAI_API_KEY="${config.sops.placeholder."openai/api_key"}"
'';
};
};
services = {
llama-cpp = {
enable = false;
port = 11435;
model = pkgs.fetchurl {
url = "https://huggingface.co/lmstudio-community/gpt-oss-20b-GGUF/resolve/main/gpt-oss-20b-MXFP4.gguf";
sha256 = "65d06d31a3977d553cb3af137b5c26b5f1e9297a6aaa29ae7caa98788cde53ab";
};
# package = pkgs.ik_llama;
};
ollama = {
enable = false;
loadModels = [
"deepseek-r1:7b"
"deepseek-r1:14b"
"RobinBially/nomic-embed-text-8k"
"qwen3:8b"
];
port = 11434;
host = "0.0.0.0";
environmentVariables = {
OLLAMA_ORIGINS = "*";
};
};
open-webui = {
enable = false;
port = 7070;
environment = {
SCARF_NO_ANALYTICS = "True";
DO_NOT_TRACK = "True";
ANONYMIZED_TELEMETRY = "False";
WEBUI_AUTH = "False";
ENABLE_LOGIN_FORM = "False";
WEBUI_URL = "https://llama.darksailor.dev";
OPENAI_BASE_URL = "https://ollama.darksailor.dev/v1";
OLLAMA_API_BASE_URL = "https://ollama.ryu.darksailor.dev";
};
environmentFile = "${config.sops.templates."LLAMA_API_KEY.env".path}";
};
caddy = {
virtualHosts."llama.darksailor.dev".extraConfig = ''
import auth
reverse_proxy localhost:${builtins.toString config.services.open-webui.port}
'';
virtualHosts."ollama.darksailor.dev".extraConfig = ''
@apikey {
header Authorization "Bearer {env.LLAMA_API_KEY}"
}
handle @apikey {
header {
# Set response headers or proxy to a different service if API key is valid
Access-Control-Allow-Origin *
-Authorization "Bearer {env.LLAMA_API_KEY}" # Remove the header after validation
}
reverse_proxy localhost:${builtins.toString config.services.llama-cpp.port}
}
respond "Unauthorized" 403
'';
};
authelia = {
instances.darksailor = {
settings = {
access_control = {
rules = [
{
domain = "llama.darksailor.dev";
policy = "one_factor";
}
];
};
};
};
};
};
systemd.services.caddy = {
serviceConfig = {
EnvironmentFile = config.sops.templates."LLAMA_API_KEY.env".path;
};
};
}

View File

@@ -0,0 +1,53 @@
{config, ...}: {
sops = {
secrets."llama/api_key".owner = config.services.caddy.user;
secrets."openai/api_key" = {};
templates = {
"LLAMA_API_KEY.env".content = ''
LLAMA_API_KEY=${config.sops.placeholder."llama/api_key"}
'';
};
};
services = {
open-webui = {
enable = true;
port = 7070;
environment = {
SCARF_NO_ANALYTICS = "True";
DO_NOT_TRACK = "True";
ANONYMIZED_TELEMETRY = "False";
WEBUI_AUTH = "False";
ENABLE_LOGIN_FORM = "False";
WEBUI_URL = "https://llama.darksailor.dev";
OLLAMA_API_BASE_URL = "https://ollama.darksailor.dev";
};
environmentFile = "${config.sops.templates."LLAMA_API_KEY.env".path}";
};
caddy = {
virtualHosts."chat.darksailor.dev".extraConfig = ''
import auth
reverse_proxy localhost:${builtins.toString config.services.open-webui.port}
'';
};
authelia = {
instances.darksailor = {
settings = {
access_control = {
rules = [
{
domain = "chat.darksailor.dev";
policy = "one_factor";
}
];
};
};
};
};
};
systemd.services.caddy = {
serviceConfig = {
EnvironmentFile = config.sops.templates."LLAMA_API_KEY.env".path;
};
};
}