Files
nix-home-manager/home/llm.nix

68 lines
1.5 KiB
Nix

{ config, pkgs, lib, ... }:
let
ollamaModelDir = "/mnt/models/ollama"; # ✅ change to your external disk or large partition
modelsToInstall = [
"llama3"
"mistral"
"codellama"
"gemma"
];
cfg = config.homeModules.llm;
in
lib.mkIf cfg.enable {
home.packages = with pkgs; [
ollama
opencode
];
# Set up environment variables
home.sessionVariables = {
OLLAMA_MODELS = ollamaModelDir;
OLLAMA_HOST = "127.0.0.1:11434";
};
# Systemd user service for ollama daemon
systemd.user.services.ollama = {
Unit = {
Description = "Ollama LLM Inference Daemon";
After = [ "network.target" ];
};
Service = {
ExecStart = "${pkgs.ollama}/bin/ollama serve";
Environment = [
"OLLAMA_MODELS=${ollamaModelDir}"
"OLLAMA_HOST=127.0.0.1:11434"
];
Restart = "on-failure";
};
Install = {
WantedBy = [ "default.target" ];
};
};
# Setup ollama models via activation script
home.activation.installOllamaModels = lib.hm.dag.entryAfter [ "writeBoundary" ] ''
export OLLAMA_MODELS=${ollamaModelDir}
export OLLAMA_HOST=127.0.0.1:11434
mkdir -p ${ollamaModelDir}
echo " Checking and pulling ollama models..."
for model in ${builtins.toString modelsToInstall}; do
if ! ${pkgs.ollama}/bin/ollama list | grep -q "$model"; then
${pkgs.ollama}/bin/ollama pull "$model"
fi
done
'';
# Optional aliases for quick usage
programs.zsh.shellAliases = {
llm = "opencode";
ollama-ui = "xdg-open http://localhost:11434";
};
}