feat: Added wireshark and refactor ollama modules
Some checks failed
Flake checker / Build Nix targets (push) Has been cancelled
Some checks failed
Flake checker / Build Nix targets (push) Has been cancelled
This commit is contained in:
@@ -16,42 +16,21 @@
|
||||
enableNushellIntegration = false;
|
||||
settings = {
|
||||
save_session = true;
|
||||
# model = "openai:gpt-4o";
|
||||
model = "ryu:qwen3-coder-30b";
|
||||
model = "ryu:qwen3:30b-a3b";
|
||||
rag_embedding_model = "ollama:RobinBially/nomic-embed-text-8k";
|
||||
clients = [
|
||||
{
|
||||
type = "openai-compatible";
|
||||
name = "mirai";
|
||||
name = "ryu";
|
||||
api_base = "https://ollama.darksailor.dev/v1";
|
||||
api_key_cmd = "cat ${config.sops.secrets."llama/api_key".path}";
|
||||
models = [
|
||||
# {
|
||||
# name = "RobinBially/nomic-embed-text-8k";
|
||||
# type = "embedding";
|
||||
# default_chunk_size = 8000;
|
||||
# }
|
||||
{
|
||||
name = "gpt-oss-20b";
|
||||
name = "gpt-oss:20b";
|
||||
type = "chat";
|
||||
}
|
||||
# {
|
||||
# name = "deepseek-r1:14b";
|
||||
# type = "chat";
|
||||
# }
|
||||
# {
|
||||
# name = "qwen3:8b";
|
||||
# type = "chat";
|
||||
# }
|
||||
];
|
||||
}
|
||||
{
|
||||
type = "openai-compatible";
|
||||
name = "ryu";
|
||||
api_base = "https://llama.ryu.darksailor.dev/v1";
|
||||
models = [
|
||||
{
|
||||
name = "qwen3-coder-30b";
|
||||
name = "qwen3:30b-a3b";
|
||||
type = "chat";
|
||||
}
|
||||
# {
|
||||
@@ -60,14 +39,6 @@
|
||||
# default_chunk_size = 8000;
|
||||
# }
|
||||
# {
|
||||
# name = "deepseek-r1:7b";
|
||||
# type = "chat";
|
||||
# }
|
||||
# {
|
||||
# name = "qwen3:30b-a3b";
|
||||
# type = "chat";
|
||||
# }
|
||||
# {
|
||||
# name = "deepseek-r1:14b";
|
||||
# type = "chat";
|
||||
# }
|
||||
@@ -75,10 +46,6 @@
|
||||
# name = "qwen3:8b";
|
||||
# type = "chat";
|
||||
# }
|
||||
# {
|
||||
# name = "qwen3:14b";
|
||||
# type = "chat";
|
||||
# }
|
||||
];
|
||||
}
|
||||
{
|
||||
@@ -174,7 +141,7 @@
|
||||
*/
|
||||
''
|
||||
---
|
||||
model: ryu:qwen3-coder-30b
|
||||
model: ryu:gpt-oss:20b
|
||||
---
|
||||
Your task is to generate a concise and informative commit message based on the provided diff. Use the conventional commit format, which includes a type (feat, fix, chore, docs, style, refactor, perf, test) and an optional scope. The message should be in the imperative mood and should not exceed 72 characters in the subject line. Do not under any circumstance include any additional text or explanations, just add the commit message.
|
||||
'';
|
||||
|
||||
5
justfile
5
justfile
@@ -1,12 +1,13 @@
|
||||
set dotenv-load
|
||||
|
||||
|
||||
[macos]
|
||||
install:
|
||||
sudo nix run nix-darwin -- switch --flake .
|
||||
|
||||
[linux]
|
||||
install:
|
||||
sudo nixos-rebuild switch --flake . --builders '' --max-jobs 1
|
||||
install cores='32':
|
||||
sudo nixos-rebuild switch --flake . --builders '' --max-jobs 1 --cores {{cores}}
|
||||
|
||||
[macos]
|
||||
build:
|
||||
|
||||
@@ -10,5 +10,6 @@
|
||||
./nix-ld.nix
|
||||
./gamemode.nix
|
||||
./droidcam.nix
|
||||
./wireshark.nix
|
||||
];
|
||||
}
|
||||
|
||||
14
nixos/ryu/programs/wireshark.nix
Normal file
14
nixos/ryu/programs/wireshark.nix
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
pkgs,
|
||||
device,
|
||||
...
|
||||
}: {
|
||||
programs.wireshark = {
|
||||
enable = true;
|
||||
dumpcap.enable = true;
|
||||
};
|
||||
environment.systemPackages = with pkgs; [
|
||||
wireshark-qt
|
||||
];
|
||||
users.users.${device.user}.extraGroups = ["wireshark"];
|
||||
}
|
||||
@@ -4,7 +4,6 @@
|
||||
# ./sunshine.nix
|
||||
# ./zerotier.nix
|
||||
# ./dnscrypt.nix
|
||||
./llama.nix
|
||||
./ollama.nix
|
||||
./tailscale.nix
|
||||
./samba.nix
|
||||
|
||||
@@ -1,35 +0,0 @@
|
||||
{
|
||||
pkgs,
|
||||
config,
|
||||
inputs,
|
||||
...
|
||||
}: {
|
||||
# llama-cpp = {
|
||||
# enable = false;
|
||||
# port = 11345;
|
||||
# # model = "/nix/store/ch6z9di3l0k54ad29pzv8k3zv47q30d1-Qwen3-Coder-30B-A3B-Instruct-Q4_K_M.gguf";
|
||||
# model = pkgs.fetchurl {
|
||||
# # url = "https://huggingface.co/lmstudio-community/gpt-oss-20b-GGUF/resolve/main/gpt-oss-20b-MXFP4.gguf";
|
||||
# # sha256 = "65d06d31a3977d553cb3af137b5c26b5f1e9297a6aaa29ae7caa98788cde53ab";
|
||||
# url = "https://huggingface.co/lmstudio-community/Qwen3-Coder-30B-A3B-Instruct-GGUF/resolve/main/Qwen3-Coder-30B-A3B-Instruct-Q4_K_M.gguf";
|
||||
# sha256 = "79ad15a5ee3caddc3f4ff0db33a14454a5a3eb503d7fa1c1e35feafc579de486";
|
||||
# };
|
||||
# extraFlags = [
|
||||
# "-c"
|
||||
# "98304"
|
||||
# "--jinja"
|
||||
# "--chat-template-file"
|
||||
# "${../../../assets/chat.hbs}"
|
||||
# # "/nix/store/4zk1p50hrzghp3jzzysz96pa64i2kmjl-promp.hbs"
|
||||
# ];
|
||||
# # package = inputs.llama-cpp.packages.${pkgs.system}.cuda;
|
||||
# };
|
||||
services = {
|
||||
caddy = {
|
||||
virtualHosts."llama.ryu.darksailor.dev".extraConfig = ''
|
||||
import cloudflare
|
||||
reverse_proxy localhost:11345
|
||||
'';
|
||||
};
|
||||
};
|
||||
}
|
||||
@@ -4,43 +4,53 @@
|
||||
config,
|
||||
...
|
||||
}: {
|
||||
sops = {
|
||||
secrets."openai/api_key" = {};
|
||||
secrets."llama/api_key".owner = config.services.caddy.user;
|
||||
templates = {
|
||||
"LLAMA_API_KEY.env".content = ''
|
||||
LLAMA_API_KEY=${config.sops.placeholder."llama/api_key"}
|
||||
'';
|
||||
};
|
||||
};
|
||||
services = {
|
||||
ollama = {
|
||||
enable = true;
|
||||
host = "0.0.0.0";
|
||||
# loadModels = ["deepseek-r1:7b" "deepseek-r1:14b" "RobinBially/nomic-embed-text-8k" "qwen3:8b" "qwen3:14b"];
|
||||
# loadModels = ["deepseek-r1:7b" "deepseek-r1:14b" "RobinBially/nomic-embed-text-8k" "qwen3:8b" "qwen3:14b"];
|
||||
port = 11434;
|
||||
# acceleration = "cuda";
|
||||
environmentVariables = {
|
||||
OLLAMA_ORIGINS = "*";
|
||||
OLLAMA_LLM_LIBRARY = "cuda";
|
||||
LD_LIBRARY_PATH = "run/opengl-driver/lib";
|
||||
HTTP_PROXY = "https://ollama.ryu.darksailor.dev";
|
||||
HTTP_PROXY = "https://ollama.darksailor.dev";
|
||||
};
|
||||
package = pkgs.ollama-cuda;
|
||||
};
|
||||
# open-webui = {
|
||||
# enable = false;
|
||||
# environment = {
|
||||
# OLLAMA_BASE_URL = "http://127.0.0.1:11434";
|
||||
# WEBUI_AUTH = "False";
|
||||
# ENABLE_LOGIN_FORM = "False";
|
||||
# };
|
||||
# };
|
||||
caddy = {
|
||||
# virtualHosts."llama.ryu.darksailor.dev".extraConfig = ''
|
||||
# import cloudflare
|
||||
# forward_auth tako:5555 {
|
||||
# uri /api/authz/forward-auth
|
||||
# copy_headers Remote-User Remote-Groups Remote-Email Remote-Name
|
||||
# }
|
||||
# reverse_proxy localhost:${builtins.toString config.services.open-webui.port}
|
||||
# '';
|
||||
virtualHosts."ollama.ryu.darksailor.dev".extraConfig = ''
|
||||
virtualHosts."ollama.darksailor.dev".extraConfig = ''
|
||||
import cloudflare
|
||||
reverse_proxy localhost:${builtins.toString config.services.ollama.port}
|
||||
@apikey {
|
||||
header Authorization "Bearer {env.LLAMA_API_KEY}"
|
||||
}
|
||||
|
||||
handle @apikey {
|
||||
header {
|
||||
# Set response headers or proxy to a different service if API key is valid
|
||||
Access-Control-Allow-Origin *
|
||||
-Authorization "Bearer {env.LLAMA_API_KEY}" # Remove the header after validation
|
||||
}
|
||||
reverse_proxy localhost:${builtins.toString config.services.ollama.port}
|
||||
}
|
||||
|
||||
respond "Unauthorized" 403
|
||||
'';
|
||||
};
|
||||
};
|
||||
systemd.services.caddy = {
|
||||
serviceConfig = {
|
||||
EnvironmentFile = config.sops.templates."LLAMA_API_KEY.env".path;
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
imports = [
|
||||
./games
|
||||
# ./headscale.nix
|
||||
# ./llama.nix
|
||||
./llms.nix
|
||||
# ./monitoring.nix
|
||||
# ./paperless.nix
|
||||
./navidrome.nix
|
||||
|
||||
@@ -1,103 +0,0 @@
|
||||
{
|
||||
config,
|
||||
pkgs,
|
||||
inputs,
|
||||
...
|
||||
}: {
|
||||
sops = {
|
||||
secrets."llama/api_key".owner = config.services.caddy.user;
|
||||
secrets."llama/user".owner = config.services.caddy.user;
|
||||
secrets."openai/api_key" = {};
|
||||
templates = {
|
||||
"LLAMA_API_KEY.env".content = ''
|
||||
LLAMA_API_KEY=${config.sops.placeholder."llama/api_key"}
|
||||
'';
|
||||
api_key_env.owner = config.services.caddy.user;
|
||||
"OPENAI_API_KEY.env".content = ''
|
||||
OPENAI_API_KEY="${config.sops.placeholder."openai/api_key"}"
|
||||
'';
|
||||
};
|
||||
};
|
||||
services = {
|
||||
llama-cpp = {
|
||||
enable = false;
|
||||
port = 11435;
|
||||
model = pkgs.fetchurl {
|
||||
url = "https://huggingface.co/lmstudio-community/gpt-oss-20b-GGUF/resolve/main/gpt-oss-20b-MXFP4.gguf";
|
||||
sha256 = "65d06d31a3977d553cb3af137b5c26b5f1e9297a6aaa29ae7caa98788cde53ab";
|
||||
};
|
||||
# package = pkgs.ik_llama;
|
||||
};
|
||||
ollama = {
|
||||
enable = false;
|
||||
loadModels = [
|
||||
"deepseek-r1:7b"
|
||||
"deepseek-r1:14b"
|
||||
"RobinBially/nomic-embed-text-8k"
|
||||
"qwen3:8b"
|
||||
];
|
||||
port = 11434;
|
||||
host = "0.0.0.0";
|
||||
environmentVariables = {
|
||||
OLLAMA_ORIGINS = "*";
|
||||
};
|
||||
};
|
||||
open-webui = {
|
||||
enable = false;
|
||||
port = 7070;
|
||||
environment = {
|
||||
SCARF_NO_ANALYTICS = "True";
|
||||
DO_NOT_TRACK = "True";
|
||||
ANONYMIZED_TELEMETRY = "False";
|
||||
WEBUI_AUTH = "False";
|
||||
ENABLE_LOGIN_FORM = "False";
|
||||
WEBUI_URL = "https://llama.darksailor.dev";
|
||||
OPENAI_BASE_URL = "https://ollama.darksailor.dev/v1";
|
||||
OLLAMA_API_BASE_URL = "https://ollama.ryu.darksailor.dev";
|
||||
};
|
||||
environmentFile = "${config.sops.templates."LLAMA_API_KEY.env".path}";
|
||||
};
|
||||
|
||||
caddy = {
|
||||
virtualHosts."llama.darksailor.dev".extraConfig = ''
|
||||
import auth
|
||||
reverse_proxy localhost:${builtins.toString config.services.open-webui.port}
|
||||
'';
|
||||
virtualHosts."ollama.darksailor.dev".extraConfig = ''
|
||||
@apikey {
|
||||
header Authorization "Bearer {env.LLAMA_API_KEY}"
|
||||
}
|
||||
|
||||
handle @apikey {
|
||||
header {
|
||||
# Set response headers or proxy to a different service if API key is valid
|
||||
Access-Control-Allow-Origin *
|
||||
-Authorization "Bearer {env.LLAMA_API_KEY}" # Remove the header after validation
|
||||
}
|
||||
reverse_proxy localhost:${builtins.toString config.services.llama-cpp.port}
|
||||
}
|
||||
|
||||
respond "Unauthorized" 403
|
||||
'';
|
||||
};
|
||||
authelia = {
|
||||
instances.darksailor = {
|
||||
settings = {
|
||||
access_control = {
|
||||
rules = [
|
||||
{
|
||||
domain = "llama.darksailor.dev";
|
||||
policy = "one_factor";
|
||||
}
|
||||
];
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
systemd.services.caddy = {
|
||||
serviceConfig = {
|
||||
EnvironmentFile = config.sops.templates."LLAMA_API_KEY.env".path;
|
||||
};
|
||||
};
|
||||
}
|
||||
53
nixos/tako/services/llms.nix
Normal file
53
nixos/tako/services/llms.nix
Normal file
@@ -0,0 +1,53 @@
|
||||
{config, ...}: {
|
||||
sops = {
|
||||
secrets."llama/api_key".owner = config.services.caddy.user;
|
||||
secrets."openai/api_key" = {};
|
||||
templates = {
|
||||
"LLAMA_API_KEY.env".content = ''
|
||||
LLAMA_API_KEY=${config.sops.placeholder."llama/api_key"}
|
||||
'';
|
||||
};
|
||||
};
|
||||
services = {
|
||||
open-webui = {
|
||||
enable = true;
|
||||
port = 7070;
|
||||
environment = {
|
||||
SCARF_NO_ANALYTICS = "True";
|
||||
DO_NOT_TRACK = "True";
|
||||
ANONYMIZED_TELEMETRY = "False";
|
||||
WEBUI_AUTH = "False";
|
||||
ENABLE_LOGIN_FORM = "False";
|
||||
WEBUI_URL = "https://llama.darksailor.dev";
|
||||
OLLAMA_API_BASE_URL = "https://ollama.darksailor.dev";
|
||||
};
|
||||
environmentFile = "${config.sops.templates."LLAMA_API_KEY.env".path}";
|
||||
};
|
||||
|
||||
caddy = {
|
||||
virtualHosts."chat.darksailor.dev".extraConfig = ''
|
||||
import auth
|
||||
reverse_proxy localhost:${builtins.toString config.services.open-webui.port}
|
||||
'';
|
||||
};
|
||||
authelia = {
|
||||
instances.darksailor = {
|
||||
settings = {
|
||||
access_control = {
|
||||
rules = [
|
||||
{
|
||||
domain = "chat.darksailor.dev";
|
||||
policy = "one_factor";
|
||||
}
|
||||
];
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
systemd.services.caddy = {
|
||||
serviceConfig = {
|
||||
EnvironmentFile = config.sops.templates."LLAMA_API_KEY.env".path;
|
||||
};
|
||||
};
|
||||
}
|
||||
Reference in New Issue
Block a user