Merge pull request #295837 from abysssol/ollama-env-vars

nixos/ollama: add option to set environment variables
This commit is contained in:
Pol Dellaiera 2024-03-16 08:02:55 +01:00 committed by GitHub
commit 4285a30496
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -13,48 +13,60 @@ in
{ {
options = { options = {
services.ollama = { services.ollama = {
enable = lib.mkEnableOption ( enable = lib.mkEnableOption "ollama server for local large language models";
lib.mdDoc "Server for local large language models" package = lib.mkPackageOption pkgs "ollama" { };
);
listenAddress = lib.mkOption { listenAddress = lib.mkOption {
type = types.str; type = types.str;
default = "127.0.0.1:11434"; default = "127.0.0.1:11434";
description = lib.mdDoc '' example = "0.0.0.0:11111";
Specifies the bind address on which the ollama server HTTP interface listens. description = ''
The address which the ollama server HTTP interface binds and listens to.
''; '';
}; };
acceleration = lib.mkOption { acceleration = lib.mkOption {
type = types.nullOr (types.enum [ "rocm" "cuda" ]); type = types.nullOr (types.enum [ "rocm" "cuda" ]);
default = null; default = null;
example = "rocm"; example = "rocm";
description = lib.mdDoc '' description = ''
Specifies the interface to use for hardware acceleration. What interface to use for hardware acceleration.
- `rocm`: supported by modern AMD GPUs - `rocm`: supported by modern AMD GPUs
- `cuda`: supported by modern NVIDIA GPUs - `cuda`: supported by modern NVIDIA GPUs
''; '';
}; };
package = lib.mkPackageOption pkgs "ollama" { }; environmentVariables = lib.mkOption {
type = types.attrsOf types.str;
default = { };
example = {
HOME = "/tmp";
OLLAMA_LLM_LIBRARY = "cpu";
};
description = ''
Set arbitrary environment variables for the ollama service.
Be aware that these are only seen by the ollama server (systemd service),
not normal invocations like `ollama run`.
Since `ollama run` is mostly a shell around the ollama server, this is usually sufficient.
'';
};
}; };
}; };
config = lib.mkIf cfg.enable { config = lib.mkIf cfg.enable {
systemd = { systemd.services.ollama = {
services.ollama = { description = "Server for local large language models";
wantedBy = [ "multi-user.target" ]; wantedBy = [ "multi-user.target" ];
description = "Server for local large language models"; after = [ "network.target" ];
after = [ "network.target" ]; environment = cfg.environmentVariables // {
environment = { HOME = "%S/ollama";
HOME = "%S/ollama"; OLLAMA_MODELS = "%S/ollama/models";
OLLAMA_MODELS = "%S/ollama/models"; OLLAMA_HOST = cfg.listenAddress;
OLLAMA_HOST = cfg.listenAddress; };
}; serviceConfig = {
serviceConfig = { ExecStart = "${lib.getExe ollamaPackage} serve";
ExecStart = "${lib.getExe ollamaPackage} serve"; WorkingDirectory = "%S/ollama";
WorkingDirectory = "/var/lib/ollama"; StateDirectory = [ "ollama" ];
StateDirectory = [ "ollama" ]; DynamicUser = true;
DynamicUser = true;
};
}; };
}; };