Merge pull request #326939 from abysssol/ollama-split-test

nixos/ollama: split cuda and rocm from service test
This commit is contained in:
Pol Dellaiera 2024-07-16 21:02:07 +02:00 committed by GitHub
commit dfef8af6e9
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 77 additions and 42 deletions

View File

@ -687,7 +687,9 @@ in {
ocis = handleTest ./ocis.nix {};
oddjobd = handleTestOn [ "x86_64-linux" "aarch64-linux" ] ./oddjobd.nix {};
oh-my-zsh = handleTest ./oh-my-zsh.nix {};
ollama = handleTest ./ollama.nix {};
ollama = runTest ./ollama.nix;
ollama-cuda = runTestOn ["x86_64-linux" "aarch64-linux"] ./ollama-cuda.nix;
ollama-rocm = runTestOn ["x86_64-linux" "aarch64-linux"] ./ollama-rocm.nix;
ombi = handleTest ./ombi.nix {};
openarena = handleTest ./openarena.nix {};
openldap = handleTest ./openldap.nix {};

View File

@ -0,0 +1,17 @@
{ lib, ... }:
{
name = "ollama-cuda";
meta.maintainers = with lib.maintainers; [ abysssol ];
nodes.cuda =
{ ... }:
{
services.ollama.enable = true;
services.ollama.acceleration = "cuda";
};
testScript = ''
cuda.wait_for_unit("multi-user.target")
cuda.wait_for_open_port(11434)
'';
}

View File

@ -0,0 +1,17 @@
{ lib, ... }:
{
name = "ollama-rocm";
meta.maintainers = with lib.maintainers; [ abysssol ];
nodes.rocm =
{ ... }:
{
services.ollama.enable = true;
services.ollama.acceleration = "rocm";
};
testScript = ''
rocm.wait_for_unit("multi-user.target")
rocm.wait_for_open_port(11434)
'';
}

View File

@ -1,56 +1,53 @@
import ./make-test-python.nix ({ pkgs, lib, ... }:
{ lib, ... }:
let
mainPort = 11434;
altPort = 11435;
curlRequest = port: request:
"curl http://127.0.0.1:${toString port}/api/generate -d '${builtins.toJSON request}'";
prompt = {
model = "tinydolphin";
prompt = "lorem ipsum";
options = {
seed = 69;
temperature = 0;
};
};
in
{
name = "ollama";
meta = with lib.maintainers; {
maintainers = [ abysssol ];
};
meta.maintainers = with lib.maintainers; [ abysssol ];
nodes = {
cpu = { ... }: {
services.ollama.enable = true;
};
cpu =
{ ... }:
{
services.ollama.enable = true;
};
rocm = { ... }: {
services.ollama.enable = true;
services.ollama.acceleration = "rocm";
};
cuda = { ... }: {
services.ollama.enable = true;
services.ollama.acceleration = "cuda";
};
altAddress = { ... }: {
services.ollama.enable = true;
services.ollama.port = altPort;
};
altAddress =
{ ... }:
{
services.ollama.enable = true;
services.ollama.port = altPort;
};
};
testScript = ''
vms = [ cpu, rocm, cuda, altAddress ];
import json
def curl_request_ollama(prompt, port):
json_prompt = json.dumps(prompt)
return f"""curl http://127.0.0.1:{port}/api/generate -d '{json_prompt}'"""
prompt = {
"model": "tinydolphin",
"prompt": "lorem ipsum",
"options": {
"seed": 69,
"temperature": 0,
},
}
vms = [
(cpu, ${toString mainPort}),
(altAddress, ${toString altPort}),
]
start_all()
for vm in vms:
vm.wait_for_unit("multi-user.target")
stdout = cpu.succeed("""${curlRequest mainPort prompt}""", timeout=100)
stdout = altAddress.succeed("""${curlRequest altPort prompt}""", timeout=100)
for (vm, port) in vms:
vm.wait_for_unit("multi-user.target")
vm.wait_for_open_port(port)
stdout = vm.succeed(curl_request_ollama(prompt, port), timeout = 100)
'';
})
}

View File

@ -214,6 +214,8 @@ goBuild ((lib.optionalAttrs enableRocm {
};
} // lib.optionalAttrs stdenv.isLinux {
inherit ollama-rocm ollama-cuda;
service-cuda = nixosTests.ollama-cuda;
service-rocm = nixosTests.ollama-rocm;
};
meta = {