{ config, pkgs, lib, ... }: { services = { ollama = { enable = true; loadModels = [ "qwen3:14b" "qwen3-coder:30b" "gemma3:12b" "gpt-oss:20b" "deepseek-r1:14b" "phi4-reasoning:14b" ]; acceleration = "rocm"; environmentVariables = { HCC_AMDGPU_TARGET = "gfx1031"; }; rocmOverrideGfx = "10.3.0"; }; open-webui = { enable = false; port = 3030; environment = { ANONYMIZED_TELEMETRY = "False"; DO_NOT_TRACK = "True"; SCARF_NO_ANALYTICS = "True"; WEBUI_AUTH = "False"; OLLAMA_API_BASE_URL = "http://localhost:11434"; #dir stuff STATIC_DIR = "${config.services.open-webui.stateDir}/static"; DATA_DIR = "${config.services.open-webui.stateDir}/data"; HF_HOME = "${config.services.open-webui.stateDir}/hf_home"; SENTENCE_TRANSFORMERS_HOME = "${config.services.open-webui.stateDir}/transformers_home"; }; }; xserver.videoDrivers = [ "amdgpu" ]; nginx.virtualHosts = { "chat.internal" = { locations."/" = { proxyPass = "http://127.0.0.1:3030"; proxyWebsockets = true; }; }; "ollama.internal" = { locations."/" = { proxyPass = "http://127.0.0.1:11434"; proxyWebsockets = true; }; }; }; }; nixpkgs.config = { allowUnfree = lib.mkForce true; rocmSupport = lib.mkForce true; rocmTargets = [ "gfx1031" ]; }; environment.systemPackages = with pkgs; [ rocmPackages.rocminfo rocmPackages.rocm-smi rocmPackages.hipcc ]; }