From dca434c0ba4a6a3f184d3a358592b9482392bc30 Mon Sep 17 00:00:00 2001 From: servius Date: Thu, 19 Feb 2026 00:19:08 +0530 Subject: [PATCH] Enable Audacity and add local Ollama provider configuration - Remove comment from audacity.nix to enable Audacity application - Add Ollama provider configuration for local LLM access - Configure glm-4.7-flash model with custom base URL --- home/apps/default.nix | 4 ++-- home/programs/opencode.nix | 15 +++++++++++++++ 2 files changed, 17 insertions(+), 2 deletions(-) diff --git a/home/apps/default.nix b/home/apps/default.nix index 18cb340e..8643c51b 100644 --- a/home/apps/default.nix +++ b/home/apps/default.nix @@ -3,9 +3,9 @@ device, ... }: -lib.optionalAttrs device.hasGui { + lib.optionalAttrs device.hasGui { imports = [ - # ./audacity.nix + ./audacity.nix # ./bottles.nix # ./cursor.nix # ./gimp.nix diff --git a/home/programs/opencode.nix b/home/programs/opencode.nix index 9a7cec85..ea2c13ef 100644 --- a/home/programs/opencode.nix +++ b/home/programs/opencode.nix @@ -6,5 +6,20 @@ lib.optionalAttrs (device.is "ryu" || device.is "kuro") { programs.opencode = { enable = true; + settings.provider = { + ollama = { + models = { + "glm-4.7-flash" = { + # "_launch" = true; + name = "glm-4.7-flash"; + }; + }; + name = "Ollama (local)"; + npm = "@ai-sdk/openai-compatible"; + options = { + baseURL = "https://ollama.darksailor.dev/v1"; + }; + }; + }; }; }