diff --git a/home/apps/ghostty.nix b/home/apps/ghostty.nix index db9fbd40..ba517db3 100644 --- a/home/apps/ghostty.nix +++ b/home/apps/ghostty.nix @@ -12,10 +12,25 @@ "Hasklug Nerd Font Mono" ]; window-decoration = false; - title = ""; + title = "ghostty"; command = "fish"; background-opacity = 0.8; theme = "catppuccin-mocha"; + custom-shader = "~/.config/ghostty/shader.glsl"; + # custom-shader = toString (pkgs.writeText "shader.glsl" + # /* + # glsl + # */ + # '' + # void mainImage(out vec4 fragColor, in vec2 fragCoord) { + # vec2 uv = fragCoord / iResolution.xy; + # vec3 col = vec3(0.0); + # col.r = 0.1 + 0.9 * uv.x; + # col.g = 0.1 + 0.9 * uv.y; + # col.b = 0.2; + # fragColor = vec4(col, 1.0); + # } + # ''); }; systemd.enable = true; themes = { diff --git a/nixos/ryu/containers/default.nix b/nixos/ryu/containers/default.nix index 93872c9c..d01fe771 100644 --- a/nixos/ryu/containers/default.nix +++ b/nixos/ryu/containers/default.nix @@ -1,6 +1,6 @@ {device, ...}: { imports = [ - # ./immich-machine-learning.nix + ./immich-machine-learning.nix ]; virtualisation = { docker.enable = true; diff --git a/nixos/ryu/containers/immich-machine-learning.nix b/nixos/ryu/containers/immich-machine-learning.nix index 43e66266..0b2338bb 100644 --- a/nixos/ryu/containers/immich-machine-learning.nix +++ b/nixos/ryu/containers/immich-machine-learning.nix @@ -2,12 +2,11 @@ port = 3003; in { virtualisation.oci-containers = { - backend = "docker"; containers = { immich-machine-learning = { image = "ghcr.io/immich-app/immich-machine-learning:v${pkgs.immich.version}-cuda"; ports = [ - "0.0.0.0:${toString port}:3003" + "127.0.0.1:${toString port}:3003" ]; volumes = [ "model-cache:/cache" @@ -20,7 +19,4 @@ in { nvidia-docker nvidia-container-toolkit ]; - # services.caddy.virtualHosts."ml.ryu.darksailor.dev".extraConfig = '' - # reverse_proxy localhost:${toString port} - # ''; } diff --git a/nixos/ryu/services/ollama.nix b/nixos/ryu/services/ollama.nix index e21844d0..25d710b9 100644 --- a/nixos/ryu/services/ollama.nix +++ b/nixos/ryu/services/ollama.nix @@ -25,6 +25,8 @@ OLLAMA_LLM_LIBRARY = "cuda"; LD_LIBRARY_PATH = "run/opengl-driver/lib"; HTTP_PROXY = "https://ollama.darksailor.dev"; + OLLAMA_CONTEXT_LENGTH = "32000"; + OLLAMA_KEEP_ALIVE = "30m"; }; package = pkgs.ollama-cuda; }; diff --git a/nixos/tako/services/llms.nix b/nixos/tako/services/llms.nix index efc200f6..2f6fb2f5 100644 --- a/nixos/tako/services/llms.nix +++ b/nixos/tako/services/llms.nix @@ -3,8 +3,9 @@ secrets."llama/api_key".owner = config.services.caddy.user; secrets."openai/api_key" = {}; templates = { - "LLAMA_API_KEY.env".content = '' + "ollama.env".content = '' LLAMA_API_KEY=${config.sops.placeholder."llama/api_key"} + OPENAI_API_KEYS=${config.sops.placeholder."openai/api_key"} ''; }; }; @@ -21,7 +22,7 @@ WEBUI_URL = "https://chat.darksailor.dev"; OLLAMA_BASE_URL = "https://ollama.darksailor.dev"; }; - environmentFile = "${config.sops.templates."LLAMA_API_KEY.env".path}"; + environmentFile = "${config.sops.templates."ollama.env".path}"; }; caddy = { @@ -47,7 +48,7 @@ }; systemd.services.caddy = { serviceConfig = { - EnvironmentFile = config.sops.templates."LLAMA_API_KEY.env".path; + EnvironmentFile = config.sops.templates."ollama.env".path; }; }; }