Enable Audacity and add local Ollama provider configuration

- Remove comment from audacity.nix to enable Audacity application
- Add Ollama provider configuration for local LLM access
- Configure glm-4.7-flash model with custom base URL
This commit is contained in:
2026-02-19 00:19:08 +05:30
parent c22ff38874
commit dca434c0ba
2 changed files with 17 additions and 2 deletions

View File

@@ -5,7 +5,7 @@
}:
lib.optionalAttrs device.hasGui {
imports = [
# ./audacity.nix
./audacity.nix
# ./bottles.nix
# ./cursor.nix
# ./gimp.nix

View File

@@ -6,5 +6,20 @@
lib.optionalAttrs (device.is "ryu" || device.is "kuro") {
programs.opencode = {
enable = true;
settings.provider = {
ollama = {
models = {
"glm-4.7-flash" = {
# "_launch" = true;
name = "glm-4.7-flash";
};
};
name = "Ollama (local)";
npm = "@ai-sdk/openai-compatible";
options = {
baseURL = "https://ollama.darksailor.dev/v1";
};
};
};
};
}