Add Ollama service with GPU support
This commit is contained in:
parent
fa16e5232f
commit
f47f761bb8
4 changed files with 41 additions and 0 deletions
|
|
@ -2,5 +2,6 @@
|
|||
imports = [
|
||||
./lnxlink.nix
|
||||
./beszel.nix
|
||||
./ollama.nix
|
||||
];
|
||||
}
|
||||
|
|
|
|||
36
modules/nixos/services/ollama.nix
Normal file
36
modules/nixos/services/ollama.nix
Normal file
|
|
@ -0,0 +1,36 @@
|
|||
{
|
||||
pkgs,
|
||||
config,
|
||||
lib,
|
||||
...
|
||||
}:
|
||||
with lib; let
|
||||
cfg = config.services.elements.ollama;
|
||||
in {
|
||||
options.services.elements = {
|
||||
ollama.enable = mkEnableOption "Enable Ollama";
|
||||
};
|
||||
|
||||
config = mkIf cfg.enable {
|
||||
services = {
|
||||
ollama = {
|
||||
enable = true;
|
||||
user = "ollama";
|
||||
host = "0.0.0.0";
|
||||
openFirewall = true;
|
||||
acceleration = "rocm";
|
||||
};
|
||||
|
||||
qdrant = {
|
||||
enable = true;
|
||||
# settings = {};
|
||||
};
|
||||
|
||||
open-webui = {
|
||||
enable = true;
|
||||
host = "0.0.0.0";
|
||||
openFirewall = true;
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
|
|
@ -9,6 +9,9 @@
|
|||
orca-slicer
|
||||
claude-code
|
||||
lutris
|
||||
ollama
|
||||
# currently doesn't build on unstable
|
||||
# open-webui
|
||||
;
|
||||
|
||||
bambu-studio = channels.unstable.bambu-studio.overrideAttrs (old: let
|
||||
|
|
|
|||
|
|
@ -102,6 +102,7 @@ with lib._elements; {
|
|||
lnxlink.enable = true;
|
||||
beszel-agent.enable = true;
|
||||
beszel-agent.key = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIMkUPOw28Cu2LMuzfmvjT/L2ToNHcADwGyGvSpJ4wH2T";
|
||||
elements.ollama.enable = true;
|
||||
|
||||
pipewire = {
|
||||
enable = lib.mkForce true;
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue