feat: disable unused stuff
This commit is contained in:
6
flake.lock
generated
6
flake.lock
generated
@@ -1701,11 +1701,11 @@
|
||||
"ik_llama": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1754576781,
|
||||
"narHash": "sha256-DeXIbHb5RtPn70Tg+s90oxq7EDxHRlB1wi8UuJ1v00E=",
|
||||
"lastModified": 1754653098,
|
||||
"narHash": "sha256-GyIIE9uSUv7Ipljk6lPJobtiHqEZNsl8bcO1MDq42DI=",
|
||||
"owner": "ikawrakow",
|
||||
"repo": "ik_llama.cpp",
|
||||
"rev": "58f3bda0ae66e2ddf9e09ed7fe2b6242b0f8bb73",
|
||||
"rev": "293f4aa433d1d38811d0f23328fa05289dbc30d6",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
||||
@@ -45,33 +45,37 @@
|
||||
{
|
||||
type = "openai-compatible";
|
||||
name = "ryu";
|
||||
api_base = "https://ollama.ryu.darksailor.dev/v1";
|
||||
api_base = "https://llama.ryu.darksailor.dev/v1";
|
||||
models = [
|
||||
{
|
||||
name = "RobinBially/nomic-embed-text-8k";
|
||||
type = "embedding";
|
||||
default_chunk_size = 8000;
|
||||
}
|
||||
{
|
||||
name = "deepseek-r1:7b";
|
||||
type = "chat";
|
||||
}
|
||||
{
|
||||
name = "qwen3:30b-a3b";
|
||||
type = "chat";
|
||||
}
|
||||
{
|
||||
name = "deepseek-r1:14b";
|
||||
type = "chat";
|
||||
}
|
||||
{
|
||||
name = "qwen3:8b";
|
||||
type = "chat";
|
||||
}
|
||||
{
|
||||
name = "qwen3:14b";
|
||||
name = "gpt-oss-20b";
|
||||
type = "chat";
|
||||
}
|
||||
# {
|
||||
# name = "RobinBially/nomic-embed-text-8k";
|
||||
# type = "embedding";
|
||||
# default_chunk_size = 8000;
|
||||
# }
|
||||
# {
|
||||
# name = "deepseek-r1:7b";
|
||||
# type = "chat";
|
||||
# }
|
||||
# {
|
||||
# name = "qwen3:30b-a3b";
|
||||
# type = "chat";
|
||||
# }
|
||||
# {
|
||||
# name = "deepseek-r1:14b";
|
||||
# type = "chat";
|
||||
# }
|
||||
# {
|
||||
# name = "qwen3:8b";
|
||||
# type = "chat";
|
||||
# }
|
||||
# {
|
||||
# name = "qwen3:14b";
|
||||
# type = "chat";
|
||||
# }
|
||||
];
|
||||
}
|
||||
{
|
||||
|
||||
@@ -17,7 +17,7 @@ in {
|
||||
type = attrsOf str;
|
||||
default = {};
|
||||
description = ''
|
||||
Urls that will be fetched ~/.config/tuifeed/urls.yml
|
||||
Sources that will be fetched
|
||||
'';
|
||||
example = {};
|
||||
};
|
||||
@@ -29,7 +29,7 @@ in {
|
||||
"show-timestamp" = true;
|
||||
};
|
||||
description = ''
|
||||
Urls that will be fetched ~/.config/tuifeed/urls.yml
|
||||
Options for article titles, such as showing the author and timestamp.
|
||||
'';
|
||||
example = {};
|
||||
};
|
||||
|
||||
@@ -15,11 +15,11 @@
|
||||
};
|
||||
# package = pkgs.ik_llama;
|
||||
};
|
||||
# caddy = {
|
||||
# virtualHosts."llama.ryu.darksailor.dev".extraConfig = ''
|
||||
# import hetzner
|
||||
# reverse_proxy localhost:${builtins.toString config.services.llama-cpp.port}
|
||||
# '';
|
||||
# };
|
||||
caddy = {
|
||||
virtualHosts."llama.ryu.darksailor.dev".extraConfig = ''
|
||||
import hetzner
|
||||
reverse_proxy localhost:${builtins.toString config.services.llama-cpp.port}
|
||||
'';
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
...
|
||||
}: {
|
||||
sops = {
|
||||
secrets."minecraft/craftmine".owner = "minecraft";
|
||||
secrets."minecraft/craftmine" = {};
|
||||
templates = {
|
||||
"craftmine.env".content = ''
|
||||
CRAFTMINE_RCON_PASSWORD=${config.sops.placeholder."minecraft/craftmine"}
|
||||
@@ -22,7 +22,7 @@
|
||||
};
|
||||
in {
|
||||
minecraft-servers = {
|
||||
enable = true;
|
||||
enable = false;
|
||||
eula = true;
|
||||
openFirewall = true;
|
||||
environmentFile = config.sops.templates."craftmine.env".path;
|
||||
@@ -66,7 +66,7 @@
|
||||
# };
|
||||
craftmine-v2 = {
|
||||
inherit whitelist;
|
||||
enable = true;
|
||||
enable = false;
|
||||
jvmOpts = "-Xmx16G -Xms8G";
|
||||
package = let
|
||||
getJavaVersion = v: (builtins.getAttr "openjdk${toString v}" pkgs.javaPackages.compiler).headless;
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
}: {
|
||||
services = {
|
||||
ollama = {
|
||||
enable = true;
|
||||
enable = false;
|
||||
host = "0.0.0.0";
|
||||
# loadModels = ["deepseek-r1:7b" "deepseek-r1:14b" "RobinBially/nomic-embed-text-8k" "qwen3:8b" "qwen3:14b"];
|
||||
port = 11434;
|
||||
|
||||
@@ -128,6 +128,12 @@
|
||||
llama-cpp = prev.llama-cpp.overrideAttrs (oldAttrs: {
|
||||
src = inputs.llama-cpp;
|
||||
version = "b6116";
|
||||
cmakeFlags =
|
||||
oldAttrs.cmakeFlags
|
||||
++ [
|
||||
"-DLLAMA_CUDA=ON"
|
||||
"-DGGML_CUDA_FORCE_CUBLAS=ON"
|
||||
];
|
||||
});
|
||||
python312 = prev.python312.override {
|
||||
packageOverrides = final: prev: {
|
||||
|
||||
Reference in New Issue
Block a user