feat: Use proprietary nvidia driver and change refresh rates to max
This commit is contained in:
@@ -168,11 +168,11 @@
|
||||
isNix = true;
|
||||
monitors = {
|
||||
# Gigabyte fo27q3
|
||||
primary = "HDMI-A-3";
|
||||
primary = "HDMI-A-1";
|
||||
# Acer XV272U
|
||||
secondary = "DP-4";
|
||||
secondary = "DP-3";
|
||||
# Gigabyte M27Q
|
||||
tertiary = "DP-2";
|
||||
tertiary = "DP-1";
|
||||
};
|
||||
}
|
||||
{
|
||||
|
||||
@@ -33,9 +33,9 @@
|
||||
monitor = [
|
||||
# ",preferred,auto,auto"
|
||||
# ",highrr,auto,1"
|
||||
"${device.monitors.primary}, 2560x1440@120, 0x0, 1, transform, 0"
|
||||
"${device.monitors.secondary}, 2560x1440@60, -1440x-1120,1, transform, 1"
|
||||
"${device.monitors.tertiary}, 2560x1440@60, 2560x-1120,1, transform, 3"
|
||||
"${device.monitors.primary}, 2560x1440@360, 0x0, 1, transform, 0"
|
||||
"${device.monitors.secondary}, 2560x1440@170, -1440x-1120,1, transform, 1"
|
||||
"${device.monitors.tertiary}, 2560x1440@170, 2560x-1120,1, transform, 3"
|
||||
"Unknown-1,disable"
|
||||
];
|
||||
|
||||
@@ -130,7 +130,7 @@
|
||||
"float, class:org.kde.kdeconnect.app"
|
||||
];
|
||||
|
||||
"misc:vfr" = true;
|
||||
# "misc:vfr" = true;
|
||||
|
||||
env = [
|
||||
"XCURSOR_SIZE,24"
|
||||
|
||||
@@ -13,12 +13,13 @@
|
||||
hardware.graphics = {
|
||||
enable = true;
|
||||
enable32Bit = true;
|
||||
extraPackages = [pkgs.intel-compute-runtime];
|
||||
extraPackages = [pkgs.intel-compute-runtime pkgs.nvidia-vaapi-driver];
|
||||
};
|
||||
|
||||
virtualisation.libvirtd.enable = true;
|
||||
users.extraUsers.servius.extraGroups = ["libvirtd" "adbusers" "kvm"];
|
||||
|
||||
# options nvidia_drm modeset=1 fbdev=1
|
||||
boot.extraModprobeConfig = ''
|
||||
options kvm_intel nested=1
|
||||
options kvm_intel emulate_invalid_guest_state=0
|
||||
@@ -37,7 +38,7 @@
|
||||
# Enable this if you have graphical corruption issues or application crashes after waking
|
||||
# up from sleep. This fixes it by saving the entire VRAM memory to /tmp/ instead
|
||||
# of just the bare essentials.
|
||||
powerManagement.enable = false;
|
||||
powerManagement.enable = true;
|
||||
|
||||
# Fine-grained power management. Turns off GPU when not in use.
|
||||
# Experimental and only works on modern Nvidia GPUs (Turing or newer).
|
||||
@@ -50,7 +51,7 @@
|
||||
# https://github.com/NVIDIA/open-gpu-kernel-modules#compatible-gpus
|
||||
# Only available from driver 515.43.04+
|
||||
# Currently alpha-quality/buggy, so false is currently the recommended setting.
|
||||
open = true;
|
||||
open = false;
|
||||
|
||||
# Enable the Nvidia settings menu,
|
||||
# accessible via `nvidia-settings`.
|
||||
@@ -60,10 +61,16 @@
|
||||
package = config.boot.kernelPackages.nvidiaPackages.beta;
|
||||
};
|
||||
|
||||
environment.sessionVariables = {
|
||||
LIBVA_DRIVER_NAME = "nvidia";
|
||||
NVD_BACKEND = "direct";
|
||||
__GLX_VENDOR_LIBRARY_NAME = "nvidia";
|
||||
};
|
||||
|
||||
# hardware.bluetooth.settings = {
|
||||
|
||||
boot.initrd.availableKernelModules = ["vmd" "xhci_pci" "ahci" "nvme" "usbhid" "usb_storage" "sd_mod"];
|
||||
boot.initrd.kernelModules = [];
|
||||
boot.initrd.kernelModules = ["nvidia" "nvidia_modeset" "nvidia_drm"];
|
||||
boot.kernelModules = ["kvm-intel" "i2c-dev"];
|
||||
boot.extraModulePackages = [];
|
||||
# services.udev.packages = [pkgs.yubikey-personalization pkgs.yubikey-personalization-gui pkgs.via];
|
||||
|
||||
Reference in New Issue
Block a user