Skip to content

Commit 1fdc48e

Browse files
committed
add nextjs-ollama-llm-ui to ai module, with fix for ollamaUrl
1 parent 4a694ca commit 1fdc48e

File tree

1 file changed

+13
-6
lines changed

1 file changed

+13
-6
lines changed

modules/nixos/desktop/ai.nix

Lines changed: 13 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -33,19 +33,26 @@ in {
3333
ollama = {
3434
enable = true;
3535

36-
environmentVariables = { OLLAMA_ORIGINS = "app://obsidian.md*"; };
36+
environmentVariables = {
37+
OLLAMA_ORIGINS = let origins = [ "app://obsidian.md*" ];
38+
in (lib.concatStringsSep "," origins);
39+
};
3740

3841
acceleration =
3942
if cfg.amd then "rocm" else if cfg.nvidia then "cuda" else null;
4043
};
4144

42-
# disabled until i update nixpkgs
43-
# nextjs-ollama-llm-ui = mkIf cfg.web-ui {
44-
# enable = true;
45-
# port = 6000;
46-
# };
45+
nextjs-ollama-llm-ui = mkIf cfg.web-ui {
46+
enable = true;
47+
port = 6060;
48+
hostname = "0.0.0.0";
49+
};
4750
};
4851

52+
# override the service to use the correct binary, until https://github.com/NixOS/nixpkgs/pull/319456 is merged
53+
systemd.services.nextjs-ollama-llm-ui.serviceConfig.ExecStart =
54+
lib.mkForce "${lib.getExe config.services.nextjs-ollama-llm-ui.package}";
55+
4956
environment.systemPackages = with pkgs;
5057
lib.optionals cfg.lmstudio [ lmstudio ];
5158
};

0 commit comments

Comments
 (0)