feat(self/ollama): GPU doesn't work with ia

This commit is contained in:
Raphael 2025-07-03 23:34:26 +02:00
parent 210816195b
commit c212d0c671
2 changed files with 2 additions and 2 deletions

View file

@ -22,7 +22,7 @@
service = {
selfhost = {
htop = true;
ollama = true;
ollama = false;
monitor = true;
nextcloud = true;
};

View file

@ -8,7 +8,7 @@ in
ollama = {
enable = true;
loadModels = [
"mistral:7b"
"qwen2.5:3b"
];
acceleration = "cuda";
};