feat(self/ollama): Mistral 7b setup with ollama

This commit is contained in:
Raphael 2025-07-03 23:22:54 +02:00
parent 8634895181
commit 210816195b
5 changed files with 41 additions and 1 deletions

View file

@ -7,6 +7,9 @@ let
monitor = import ./self_host/monitor.nix {
inherit inputs config pkgs lib;
};
ollama = import ./self_host/ollama.nix {
inherit inputs config pkgs lib;
};
nextcloud = import ./self_host/nextcloud.nix {
inherit inputs config pkgs lib;
};
@ -16,6 +19,7 @@ in
imports = [
nextcloud
htop
ollama
monitor
];
@ -30,6 +34,11 @@ in
default = false;
description = "Enable the htop";
};
ollama = lib.mkOption {
type = lib.types.bool;
default = false;
description = "Enable the ollama";
};
monitor = lib.mkOption {
type = lib.types.bool;
default = false;

View file

@ -95,6 +95,7 @@ in
"https://nextcloud.enium.eu"
"https://htop.enium.eu"
"https://monitor.enium.eu"
"https://ollama.enium.eu"
"http://relance-pas-stp.me:4242"
];
}];

View file

@ -0,0 +1,29 @@
{ config, pkgs, lib, ... }:
let
cfg = config.service.selfhost.ollama;
in
{
services = {
ollama = {
enable = true;
loadModels = [
"mistral:7b"
];
acceleration = "cuda";
};
open-webui = {
enable = true;
port = 13007;
};
nginx.virtualHosts."ollama.enium.eu" = {
enableACME = true;
forceSSL = true;
locations."/" = {
proxyPass = "http://127.0.0.1:13007";
proxyWebsockets = true;
};
};
};
}