diff --git a/nixos/mirai/services/default.nix b/nixos/mirai/services/default.nix index 28614605..30dd7731 100644 --- a/nixos/mirai/services/default.nix +++ b/nixos/mirai/services/default.nix @@ -19,7 +19,7 @@ ./grafana.nix ./excalidraw.nix - ./desmos.nix + # ./desmos.nix # ./ldap.nix # ./llama.nix # ./navidrome.nix diff --git a/nixos/mirai/services/llama.nix b/nixos/mirai/services/llama.nix index e38ae71a..6fcf4dce 100644 --- a/nixos/mirai/services/llama.nix +++ b/nixos/mirai/services/llama.nix @@ -1,6 +1,7 @@ { config, pkgs, + inputs, ... }: { sops = { @@ -18,6 +19,15 @@ }; }; services = { + llama-cpp = { + enable = true; + port = 11435; + model = pkgs.fetchurl { + url = "https://huggingface.co/unsloth/gpt-oss-20b-GGUF/resolve/main/gpt-oss-20b-F16.gguf"; + sha256 = "sha256-vE1SpG4diQiP88u0viGnyZ8LtotTUU19UGecnwfjOkE="; + }; + package = pkgs.ik_llama; + }; ollama = { enable = true; loadModels = ["deepseek-r1:7b" "deepseek-r1:14b" "RobinBially/nomic-embed-text-8k" "qwen3:8b"]; diff --git a/nixos/ryu/services/caddy.nix b/nixos/ryu/services/caddy.nix index 71203e63..0e58aa2a 100644 --- a/nixos/ryu/services/caddy.nix +++ b/nixos/ryu/services/caddy.nix @@ -26,7 +26,9 @@ ''; package = pkgs.caddy.withPlugins { plugins = ["github.com/caddy-dns/hetzner@v1.0.0"]; - hash = "sha256-9ea0CfOHG7JhejB73HjfXQpnonn+ZRBqLNz1fFRkcDQ="; + # hash = "sha256-9ea0CfOHG7JhejB73HjfXQpnonn+ZRBqLNz1fFRkcDQ="; + # hash = "sha256-9ea0CfOHG7JhejB73HjfXQpnonn+ZRBqLNz1fFRkcDQ=" + hash = "sha256-YUrprDZQL+cX3P8fVLKHouXTMG4rw3sCaQdGqiq37uA="; }; }; }; diff --git a/nixos/ryu/services/default.nix b/nixos/ryu/services/default.nix index 292beec4..11261a80 100644 --- a/nixos/ryu/services/default.nix +++ b/nixos/ryu/services/default.nix @@ -5,7 +5,7 @@ # ./zerotier.nix # ./dnscrypt.nix # ./ollama.nix - # ./llama.nix + ./llama.nix ./tailscale.nix ./samba.nix ./mullvad.nix diff --git a/nixos/ryu/services/llama.nix b/nixos/ryu/services/llama.nix index ea1777aa..be9d9bcd 100644 --- a/nixos/ryu/services/llama.nix +++ b/nixos/ryu/services/llama.nix @@ -8,14 +8,12 @@ services = { llama-cpp = { enable = true; + port = 11435; model = pkgs.fetchurl { url = "https://huggingface.co/unsloth/gpt-oss-20b-GGUF/resolve/main/gpt-oss-20b-F16.gguf"; - sha256 = "b93a63c42fc2432396b56031bb1a4aa5f598af1de369de397a900888032cad64"; + sha256 = "sha256-vE1SpG4diQiP88u0viGnyZ8LtotTUU19UGecnwfjOkE="; }; - # package = pkgs.llama-cpp.overrideAttrs (old: { - # src = inputs.ik_llama; - # version = "5995"; - # }); + package = pkgs.ik_llama; }; # caddy = { # virtualHosts."llama.ryu.darksailor.dev".extraConfig = '' diff --git a/overlays.nix b/overlays.nix index 49c26c9c..a5dd5146 100644 --- a/overlays.nix +++ b/overlays.nix @@ -121,7 +121,10 @@ }; }; ddcbacklight = inputs.ddcbacklight.packages.${prev.system}.ddcbacklight; - # ghostty = inputs.ghostty.packages.${prev.system}.default; + ik_llama = prev.llama-cpp.overrideAttrs (oldAttrs: { + src = inputs.ik_llama; + version = "5995"; + }); python312 = prev.python312.override { packageOverrides = final: prev: { pysaml2 = prev.pysaml2.overridePythonAttrs (orig: {