Files
dotfiles/nixos/ryu/services/llama.nix
uttarayan21 5f951eb9c7
All checks were successful
Flake checker / Build Nix targets (push) Successful in 9m27s
feat: Downgrade to 575 driver for nvidia and disable llama-cpp
2025-08-16 15:24:34 +05:30

26 lines
607 B
Nix

{
inputs,
pkgs,
lib,
config,
...
}: {
services = {
llama-cpp = {
enable = false;
port = 11435;
model = pkgs.fetchurl {
url = "https://huggingface.co/lmstudio-community/gpt-oss-20b-GGUF/resolve/main/gpt-oss-20b-MXFP4.gguf";
sha256 = "65d06d31a3977d553cb3af137b5c26b5f1e9297a6aaa29ae7caa98788cde53ab";
};
# package = pkgs.ik_llama;
};
caddy = {
virtualHosts."llama.ryu.darksailor.dev".extraConfig = ''
import hetzner
reverse_proxy localhost:${builtins.toString config.services.llama-cpp.port}
'';
};
};
}