about summary refs log tree commit diff
path: root/nixos/tests/web-apps
diff options
context:
space:
mode:
authorMalte Neuss <malteneuss@users.noreply.github.com>2024-05-20 14:54:08 +0200
committerMalte Neuss <malteneuss@users.noreply.github.com>2024-05-23 23:48:55 +0200
commit8a05b4f8d4bec7d1ae32c836c3cda8265689270b (patch)
tree8cc7fcd664dd8c0a6ba99e223dc40a70a2a4c483 /nixos/tests/web-apps
parent1df1f8d3be8c916aa50cc5dd0c2d828a3472a70b (diff)
nixos/nextjs-ollama-llm-ui: init module
NixOS already has good support for the Ollama
backend service. Now we can benefit from
having a convenient web frontend as well for it.
Diffstat (limited to 'nixos/tests/web-apps')
-rw-r--r--nixos/tests/web-apps/nextjs-ollama-llm-ui.nix22
1 files changed, 22 insertions, 0 deletions
diff --git a/nixos/tests/web-apps/nextjs-ollama-llm-ui.nix b/nixos/tests/web-apps/nextjs-ollama-llm-ui.nix
new file mode 100644
index 0000000000000..3bb9d1e62aefe
--- /dev/null
+++ b/nixos/tests/web-apps/nextjs-ollama-llm-ui.nix
@@ -0,0 +1,22 @@
+{ lib, ... }:
+
+{
+  name = "nextjs-ollama-llm-ui";
+  meta.maintainers = with lib.maintainers; [ malteneuss ];
+
+  nodes.machine =
+    { pkgs, ... }:
+    {
+      services.nextjs-ollama-llm-ui = {
+        enable = true;
+        port = 8080;
+      };
+    };
+
+  testScript = ''
+    # Ensure the service is started and reachable
+    machine.wait_for_unit("nextjs-ollama-llm-ui.service")
+    machine.wait_for_open_port(8080)
+    machine.succeed("curl --fail http://127.0.0.1:8080")
+  '';
+}