feat(self/ollama): GPU doesn't work with ia

This commit is contained in:
Raphael 2025-07-03 23:34:26 +02:00
parent 210816195b
commit c212d0c671
2 changed files with 2 additions and 2 deletions

View file

@ -22,7 +22,7 @@
service = { service = {
selfhost = { selfhost = {
htop = true; htop = true;
ollama = true; ollama = false;
monitor = true; monitor = true;
nextcloud = true; nextcloud = true;
}; };

View file

@ -8,7 +8,7 @@ in
ollama = { ollama = {
enable = true; enable = true;
loadModels = [ loadModels = [
"mistral:7b" "qwen2.5:3b"
]; ];
acceleration = "cuda"; acceleration = "cuda";
}; };