Enable Audacity and add local Ollama provider configuration
- Remove comment from audacity.nix to enable Audacity application - Add Ollama provider configuration for local LLM access - Configure glm-4.7-flash model with custom base URL
This commit is contained in:
@@ -3,9 +3,9 @@
|
||||
device,
|
||||
...
|
||||
}:
|
||||
lib.optionalAttrs device.hasGui {
|
||||
lib.optionalAttrs device.hasGui {
|
||||
imports = [
|
||||
# ./audacity.nix
|
||||
./audacity.nix
|
||||
# ./bottles.nix
|
||||
# ./cursor.nix
|
||||
# ./gimp.nix
|
||||
|
||||
@@ -6,5 +6,20 @@
|
||||
lib.optionalAttrs (device.is "ryu" || device.is "kuro") {
|
||||
programs.opencode = {
|
||||
enable = true;
|
||||
settings.provider = {
|
||||
ollama = {
|
||||
models = {
|
||||
"glm-4.7-flash" = {
|
||||
# "_launch" = true;
|
||||
name = "glm-4.7-flash";
|
||||
};
|
||||
};
|
||||
name = "Ollama (local)";
|
||||
npm = "@ai-sdk/openai-compatible";
|
||||
options = {
|
||||
baseURL = "https://ollama.darksailor.dev/v1";
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user