nixos/ollama: add test for the ollama service

This commit is contained in:
abysssol 2024-03-11 20:53:45 -04:00
parent 52544c4a0a
commit efed30f903
2 changed files with 57 additions and 0 deletions

View File

@ -640,6 +640,7 @@ in {
nzbget = handleTest ./nzbget.nix {};
nzbhydra2 = handleTest ./nzbhydra2.nix {};
oh-my-zsh = handleTest ./oh-my-zsh.nix {};
ollama = handleTest ./ollama.nix {};
ombi = handleTest ./ombi.nix {};
openarena = handleTest ./openarena.nix {};
openldap = handleTest ./openldap.nix {};

56
nixos/tests/ollama.nix Normal file
View File

@ -0,0 +1,56 @@
import ./make-test-python.nix ({ pkgs, lib, ... }:
let
mainPort = "11434";
altPort = "11435";
curlRequest = port: request:
"curl http://127.0.0.1:${port}/api/generate -d '${builtins.toJSON request}'";
prompt = {
model = "tinydolphin";
prompt = "lorem ipsum";
options = {
seed = 69;
temperature = 0;
};
};
in
{
name = "ollama";
meta = with lib.maintainers; {
maintainers = [ abysssol ];
};
nodes = {
cpu = { ... }: {
services.ollama.enable = true;
};
rocm = { ... }: {
services.ollama.enable = true;
services.ollama.acceleration = "rocm";
};
cuda = { ... }: {
services.ollama.enable = true;
services.ollama.acceleration = "cuda";
};
altAddress = { ... }: {
services.ollama.enable = true;
services.ollama.listenAddress = "127.0.0.1:${altPort}";
};
};
testScript = ''
vms = [ cpu, rocm, cuda, altAddress ];
start_all()
for vm in vms:
vm.wait_for_unit("multi-user.target")
stdout = cpu.succeed("""${curlRequest mainPort prompt}""", timeout=100)
stdout = altAddress.succeed("""${curlRequest altPort prompt}""", timeout=100)
'';
})