mirror of
https://github.com/tecno-consultores/llm-lab
synced 2026-04-21 17:17:17 +00:00
189 lines
5 KiB
Text
189 lines
5 KiB
Text
# para generar claves en Linux: dd if=/dev/urandom bs=1 count=64 2>/dev/null | base64 -w 0 | tr -dc _A-Z-a-z-0-9 | cut -c 1-32
|
|
# ollama
|
|
OLLAMA_DEBUG=false
|
|
OLLAMA_KEEP_ALIVE=1m
|
|
OLLAMA_MAX_LOADED_MODELS=1
|
|
OLLAMA_MAX_QUEUE=512
|
|
OLLAMA_NUM_PARALLEL=1
|
|
OLLAMA_NOPRUNE=false
|
|
OLLAMA_SCHED_SPREAD=false
|
|
OLLAMA_FLASH_ATTENTION=true
|
|
OLLAMA_KV_CACHE_TYPE=q4_0
|
|
OLLAMA_LOAD_TIMEOUT=1m
|
|
OPORT=11434
|
|
ohostname=ollama
|
|
|
|
# openwebui
|
|
WEBUI_SECRET_KEY=zyORE5fZONBV3y0j5pji9D2xoNCsQN4i #change me
|
|
WEBUI_URL=https://oui.example.com #change me
|
|
PORT=8383
|
|
ENABLE_OPENAI_API=false
|
|
OPENAI_API_BASE_URL=https://api.openai.com/v1
|
|
CUSTOM_NAME=lab
|
|
openhostname=openwebui
|
|
|
|
# n8n
|
|
N8N_ENCRYPTION_KEY=5hPh0By12GSRjeL9MA0QQhGTaJMeQDxz #change me
|
|
N8N_USER_MANAGEMENT_JWT_SECRET=pUXz0lQstdrk3LzF4H1Y6amX6Jt7QNLU #change me
|
|
N8N_RUNNERS_AUTH_TOKEN=vdqx9u7LGhT9taaOWwe2wXrQPwtyNfVP #change me
|
|
N8N_PORT=5678
|
|
N8N_PORT2=5679
|
|
DB_TYPE=postgresdb
|
|
nhostname=n8n
|
|
WEBHOOK_URL=https://n8n.example.com/ #change me
|
|
N8N_PAYLOAD_SIZE_MAX=32
|
|
N8N_FORMDATA_FILE_SIZE_MAX=400
|
|
N8N_SMTP_HOST=smtp.example.com
|
|
N8N_SMTP_PORT=587
|
|
N8N_SMTP_USER=user@example.com
|
|
N8N_SMTP_PASS=123456789
|
|
TIMEZONE="America/Caracas"
|
|
LANGCHAIN_API_KEY="ssafgasfgasfgasfgasfg" #change me
|
|
workername=n8n-worker
|
|
runnername=n8n-runner
|
|
|
|
# postgres
|
|
POSTGRES_USER=user
|
|
POSTGRES_PASSWORD=KrFgLvKJRvinkEG5 #change me
|
|
POSTGRES_DB=lab
|
|
POSTGRESDB_PORT=5432
|
|
DATABASE_HOST=postgres-ai
|
|
|
|
# redis
|
|
REDIS_PORT=6379
|
|
INSIGHT_PORT=8001
|
|
rhostname=redis-ai
|
|
|
|
# qdrant
|
|
qdrant=6333
|
|
qhostname=qdrant
|
|
|
|
# kafka
|
|
kafka1=9092
|
|
kafka2=9093
|
|
khostname=kafka
|
|
|
|
# whisper
|
|
wport=7860
|
|
ASR_MODEL=base #Model selection (tiny, base, small, medium, large-v3, etc.)
|
|
ASR_ENGINE=openai_whisper #Engine selection (openai_whisper, faster_whisper, whisperx)
|
|
whostname=whisper
|
|
|
|
# crawl4ai
|
|
CRAWL4AI_API_TOKEN=example #change me
|
|
CLAUDE_API_KEY=example #change me
|
|
CRAW1=11235
|
|
CRAW2=8001
|
|
CRAW3=9222
|
|
CRAW4=8081
|
|
crawhostname=crawl4ai
|
|
|
|
# nginx proxy manager
|
|
proxyhostname=proxy
|
|
pproxy=81
|
|
INITIAL_ADMIN_EMAIL=my@example.com #change me
|
|
INITIAL_ADMIN_PASSWORD=mypassword1 #change me
|
|
DISABLE_IPV6=false
|
|
# change to true if IPv6 is not enabled on your host
|
|
|
|
# searxng
|
|
searxnghostname=searxng
|
|
psearch=8181
|
|
SEARXNG_HOSTNAME=searxng.example.com #change me
|
|
SEARXNG_UWSGI_WORKERS=4
|
|
SEARXNG_UWSGI_THREADS=4
|
|
|
|
# Mongodb
|
|
mhostname=mongo
|
|
mongoport=9081
|
|
MONGO_INITDB_ROOT_USERNAME=root
|
|
MONGO_INITDB_ROOT_PASSWORD=KrFgLvKJRvinkEG5 #change me
|
|
|
|
# evolution api
|
|
EVOLUTION_SERVER_URL=https://evo.example.com #change me
|
|
evoapikey=GSAQM73l6zcFYAECYfPUIaB9iCfjTCOp #change me
|
|
evohostname=evolutionapi
|
|
evoport=9191
|
|
evodb=evolution
|
|
|
|
# openai-bridge
|
|
obridge=n8n-openai-bridge
|
|
obridgep=3333
|
|
obearer=TH76Ki41mS2LUlvj64nHieTTsl45K1hM #change me
|
|
wtoken=your-secret-token-here #change me
|
|
napi=n8n-secret-token-here #change me
|
|
|
|
# flowise
|
|
FPORT=3001
|
|
FLOWISE_USERNAME=flowuser
|
|
FLOWISE_PASSWORD=UGO3IeC8pEqHVOz7c8OKlkjtuP6bKVpA #change me
|
|
DATABASE_TYPE=postgres
|
|
FLOWISE_FILE_SIZE_LIMIT=100mb
|
|
CORS_ORIGINS=*
|
|
IFRAME_ORIGINS=*
|
|
LOG_LEVEL=info
|
|
ENABLE_METRICS=false
|
|
METRICS_OPEN_TELEMETRY_DEBUG=false
|
|
DISABLE_FLOWISE_TELEMETRY=true
|
|
fhostname=flowise
|
|
|
|
# opencode
|
|
codehostname=opencode
|
|
OPENCODE_SERVER_PASSWORD=MtsNFjb3541wObuWVYA48DPi21BSjRdA #change me
|
|
OCPORT=4096
|
|
|
|
# openclaw
|
|
clawhostname=openclaw
|
|
CLAWPORT=18789
|
|
CLAWBRIDGE=18790
|
|
OPENCLAW_GATEWAY_TOKEN=7e4b8a14a946e6a2cc660dc8694c8d673722424d3eb784eec153eab0459eae6d # Example generator: openssl rand -hex 32
|
|
AUTH_PASSWORD=svhq1x6qODSjnucpq8maGI9bKxaCNg6V #change me
|
|
OPENCLAW_PRIMARY_MODEL=example #change me
|
|
OPENCLAW_GATEWAY_BIND=lan
|
|
OPENCLAW_INIT_GATEWAY_MODE=local
|
|
# Set DOCKER_GID to the host's docker group GID (run: stat -c '%g' /var/run/docker.sock).
|
|
DOCKER_GID=999 #change me
|
|
OPENCLAW_SANDBOX=1
|
|
DOCKER_SOCKET_PATH=/var/run/docker.sock
|
|
|
|
# global
|
|
restart=always
|
|
pull_policy=always
|
|
UOLLAMA='http://ollama:11434'
|
|
OPENAI_API_KEY=example #change me
|
|
ANTHROPIC_API_KEY=example #change me
|
|
GEMINI_API_KEY=example #change me
|
|
OPENROUTER_API_KEY=example #change me
|
|
XAI_API_KEY=example #change me
|
|
GROQ_API_KEY=example #change me
|
|
MISTRAL_API_KEY=example #change me
|
|
CEREBRAS_API_KEY=example #change me
|
|
VENICE_API_KEY=example #change me
|
|
MOONSHOT_API_KEY=example #change me
|
|
KIMI_API_KEY=example #change me
|
|
MINIMAX_API_KEY=example #change me
|
|
ZAI_API_KEY=example #change me
|
|
AI_GATEWAY_API_KEY=example #change me
|
|
OPENCODE_API_KEY=example #change me
|
|
SYNTHETIC_API_KEY=example #change me
|
|
COPILOT_GITHUB_TOKEN=example #change me
|
|
XIAOMI_API_KEY=example #change me
|
|
DEEPGRAM_API_KEY=example #change me
|
|
AWS_ACCESS_KEY_ID=example #change me
|
|
AWS_SECRET_ACCESS_KEY=example #change me
|
|
AWS_REGION=example #change me
|
|
AWS_SESSION_TOKEN=example #change me
|
|
BEDROCK_PROVIDER_FILTER=example #change me
|
|
OLLAMA_API_KEY=example #change me
|
|
TELEGRAM_BOT_TOKEN=example #change me
|
|
DISCORD_BOT_TOKEN=example #change me
|
|
SLACK_BOT_TOKEN=example #change me
|
|
SLACK_APP_TOKEN=example #change me
|
|
ZALO_BOT_TOKEN=example #change me
|
|
OPENCLAW_TWITCH_ACCESS_TOKEN=example #change me
|
|
BRAVE_API_KEY=example #change me
|
|
PERPLEXITY_API_KEY=example #change me
|
|
FIRECRAWL_API_KEY=example #change me
|
|
ELEVENLABS_API_KEY=example #change me
|
|
XI_API_KEY=example #change me
|
|
DEEPGRAM_API_KEY=example #change me
|