Files
dev/modules/optionnals/ai.nix
2025-10-08 02:59:01 +02:00

35 lines
929 B
Nix

{
pkgs,
...
}:
{
# Activer Ollama comme service
services.ollama = {
enable = true;
acceleration = "cuda";
};
services.open-webui = {
enable = true;
port = 8080; # Port par défaut
host = "127.0.0.1"; # Localhost uniquement
openFirewall = true;
# Pour accès réseau : host = "0.0.0.0";
environment = {
ANONYMIZED_TELEMETRY = "True";
DO_NOT_TRACK = "True";
SCARF_NO_ANALYTICS = "True";
# URL d'Ollama (local)
OLLAMA_BASE_URL = "http://127.0.0.1:11434";
# Autres options optionnelles (https://docs.openwebui.com/getting-started/env-configuration/#web-search)
WEBUI_AUTH = "False"; # Desactive l'authentification
# ENABLE_WEB_SEARCH = "True";
# ENABLE_SEARCH_QUERY_GENERATION = "True";
# WEB_SEARCH_ENGINE = "duckduckgo";
# WEB_LOADER_ENGINE = "safe_web";
};
};
environment.systemPackages = [ pkgs.lmstudio ];
}