47 lines
1.3 KiB
Nix
47 lines
1.3 KiB
Nix
{
|
|
pkgs,
|
|
...
|
|
}:
|
|
{
|
|
# Activer Ollama comme service
|
|
services.ollama = {
|
|
enable = true;
|
|
acceleration = "cuda";
|
|
environmentVariables = {
|
|
OLLAMA_FLASH_ATTENTION = "1"; # ← Flash Attention
|
|
OLLAMA_NUM_PARALLEL = "2"; # ← Requêtes parallèles
|
|
OLLAMA_MAX_LOADED_MODELS = "1"; # ← Garder 2 modèles en VRAM
|
|
OLLAMA_KEEP_ALIVE = "5m";
|
|
};
|
|
};
|
|
# services.open-webui = {
|
|
# enable = true;
|
|
# port = 8080; # Port par défaut
|
|
# host = "127.0.0.1"; # Localhost uniquement
|
|
# openFirewall = true;
|
|
# # Pour accès réseau : host = "0.0.0.0";
|
|
# environment = {
|
|
# ANONYMIZED_TELEMETRY = "True";
|
|
# DO_NOT_TRACK = "True";
|
|
# SCARF_NO_ANALYTICS = "True";
|
|
# # URL d'Ollama (local)
|
|
# OLLAMA_BASE_URL = "http://127.0.0.1:11434";
|
|
# # Autres options optionnelles (https://docs.openwebui.com/getting-started/env-configuration/#web-search)
|
|
# WEBUI_AUTH = "False"; # Desactive l'authentification
|
|
# # ENABLE_WEB_SEARCH = "True";
|
|
# # ENABLE_SEARCH_QUERY_GENERATION = "True";
|
|
# # WEB_SEARCH_ENGINE = "duckduckgo";
|
|
# # WEB_LOADER_ENGINE = "safe_web";
|
|
# };
|
|
# };
|
|
environment = {
|
|
systemPackages = [
|
|
pkgs.lmstudio
|
|
pkgs.aider-chat-full
|
|
];
|
|
variables = {
|
|
OLLAMA_API_BASE = "http://localhost:11434";
|
|
};
|
|
};
|
|
}
|