mirror of
https://github.com/tecno-consultores/llm-lab
synced 2026-04-21 17:17:17 +00:00
WIP
This commit is contained in:
parent
f53187fc3a
commit
8e5b247cc4
7 changed files with 141 additions and 1 deletions
54
.env
Normal file
54
.env
Normal file
|
|
@ -0,0 +1,54 @@
|
|||
# flowise
|
||||
FLOWISE_USERNAME=user
|
||||
FLOWISE_PASSWORD=1234
|
||||
DATABASE_TYPE=postgres
|
||||
DATABASE_HOST=postgres
|
||||
DATABASE_PORT=5432
|
||||
DATABASE_USER=user
|
||||
DATABASE_PASSWORD=KrFgLvKJRvinkEG5
|
||||
DATABASE_NAME=lab
|
||||
|
||||
# openwebui
|
||||
ENABLE_OPENAI_API=false
|
||||
OPENAI_API_KEY=sk-124781258123
|
||||
RAG_WEB_SEARCH_ENGINE=brave
|
||||
BRAVE_SEARCH_API_KEY=
|
||||
|
||||
# n8n
|
||||
DB_TYPE=postgresdb
|
||||
DB_POSTGRESDB_HOST=postgres
|
||||
DB_POSTGRESDB_PORT=5432
|
||||
DB_POSTGRESDB_DATABASE=lab
|
||||
DB_POSTGRESDB_USER=user
|
||||
DB_POSTGRESDB_PASSWORD=KrFgLvKJRvinkEG5
|
||||
|
||||
# langflow
|
||||
LANGFLOW_SUPERUSER=user
|
||||
LANGFLOW_SUPERUSER_PASSWORD=123456
|
||||
|
||||
# anythingllm
|
||||
SERVER_PORT=3001
|
||||
UID='1000'
|
||||
GID='1000'
|
||||
LLM_PROVIDER='ollama'
|
||||
OLLAMA_BASE_PATH='http://ollama:11434'
|
||||
OLLAMA_MODEL_PREF='llama3.2'
|
||||
OLLAMA_MODEL_TOKEN_LIMIT=4096
|
||||
OPEN_AI_KEY=
|
||||
OPEN_MODEL_PREF='gpt-4o'
|
||||
EMBEDDING_ENGINE='ollama'
|
||||
EMBEDDING_BASE_PATH='http://ollama:11434'
|
||||
EMBEDDING_MODEL_PREF='nomic-embed-text:latest'
|
||||
EMBEDDING_MODEL_MAX_CHUNK_LENGTH=8192
|
||||
PASSWORDMINCHAR=8
|
||||
PASSWORDMAXCHAR=250
|
||||
PASSWORDLOWERCASE=1
|
||||
PASSWORDUPPERCASE=1
|
||||
PASSWORDNUMERIC=1
|
||||
PASSWORDSYMBOL=1
|
||||
PASSWORDREQUIREMENTS=4
|
||||
|
||||
# postgres
|
||||
POSTGRES_USER=user
|
||||
POSTGRES_PASSWORD=KrFgLvKJRvinkEG5
|
||||
POSTGRES_DB=lab
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
|
|
@ -6,3 +6,4 @@ openwebui
|
|||
flowise
|
||||
ollama
|
||||
.idea/
|
||||
.idea/
|
||||
|
|
|
|||
BIN
anythingllm/anythingllm.db
Normal file
BIN
anythingllm/anythingllm.db
Normal file
Binary file not shown.
27
anythingllm/comkey/ipc-priv.pem
Normal file
27
anythingllm/comkey/ipc-priv.pem
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
-----BEGIN RSA PRIVATE KEY-----
|
||||
MIIEogIBAAKCAQEAy4oAaL2Lj3xwc94vaB5JjQFGfaT+KLpRv+ZXd+VGK/5abZN+
|
||||
+ETBS5y89p0yHlGSWq66xbnTp1AlvjiFLNeKkSehmnALEsKD+KyClv/WWazr29nX
|
||||
bBXaq8Li9aTfEztgjImz5wf1vqEB1oK9g9PdhvT3BsB+0C6JlHSOZGVWbiZ8BdyP
|
||||
l1T97Nu7Vdlpz5jBLSUcivDoTy1Du1ax9dGju4P/auDlJ76Vx8/bBS+x9AipGIaw
|
||||
WWOBf6tEmNHIk6AV8pT/ZJQoEy1KwzYao0YMkgXjiR/vn0iggnQbKIpATCDeUCwj
|
||||
FW3ad5/SctOB0lCHIvXLQWLJQdGYeGJQzIuupQIDAQABAoH/IIgDXg+YKWwk4mAL
|
||||
XljOPfs2M7QV60HcTw+XFf1PJI5CuxYX9RZdeU2pO2wPK/QIWvYSZAfktu6DGc7E
|
||||
JPv46bpWhJiOgdu/vBSBICFD4TwCITbFd5Zb8kKeg88WLby6cbvl78cV6qH76TfR
|
||||
koBfp8eqCSo7GosAR6SuyDJqM1sqRxdOJPpmkssLHjTkFJlCK2IvISt1Sadvm3YU
|
||||
H1mf9TBKpelfLvzABjZiqMKjMGJFGTEb1UuGMzIrhLIq66fwDaeE3eU6tDg7Zg2T
|
||||
xU5tq+4QJn9ChETJiZuqJPjhcRbvd1cGLNZ/bBkAjq+93x4fau0aKAdcafx7uwDx
|
||||
MoYBAoGBAO02L5n29emFWaR4EAovEU4XwnLhqtQyY5n16RoFCpdrTKYkR8jH320T
|
||||
aYPcO2ilnEkJ7rwm/ETL2+xYigQENFDv+Dwn+35WzWFyVjiEaJiY2UahYd25mThU
|
||||
BN+wyUROc7iXjnFPY5rLH86xi5qRDVEpbh7wBWzVlgFFFPPYIJClAoGBANupDfCA
|
||||
4STEJRK62oX6RbhQdMjcwy74HDTpAQZ4yKERQLzWywoNVOLERpRxjth7poGJW73Q
|
||||
/DPev/7Lzq9/UYtkDn5IFKw9/1WLklkOYz7uEZjAyIm6wMvvewCJJXRypyNJZwJ+
|
||||
blDQ6+SLcMksPo3eHKo8u5+Epphzg4y1BkYBAoGBAOZXgru4Uyzyv4x0xeaNs/ko
|
||||
GzrhJbguQgekAjmxAYrtt51xh39RjwTlH8XaWpoQjwVhrsBn/vlzF9iWVVb3GjIq
|
||||
A9uuW4lgEsl4dH8nHEZpzjrGxTjAXCeVdrIcgiRhJf/us3QWSjtq2TUltNFf3xWb
|
||||
7IIAQtRfRu1uvpzWlzC9AoGATOULSivzpMk9KSdcJBaOnY0ctMwDT8fvJ96BS2Cw
|
||||
6yF9Bh4zCZvNIW0QQBrYpJP0n9gT04j7ILVThMCX4/uv8hnesPpOslUpsyqe2qoc
|
||||
uATjCBREn2JO55U3++IGyJLyS3oAo3Da8VORoHnny6S/aS5d3oJQM/Fd2HRnkrQ0
|
||||
NAECgYEAixlW1u3KwhHqk4WTt0kZCaFSJ3l7lf7s6nEwN/Ax7oTD0sOk67TZCoGK
|
||||
36x5CFi7wYjpzz0slJm8rDKBH+smDO/pvMkp/raZ6HCs29bBuNai1lMMg8SzKvwk
|
||||
lx9MX1Ud5GAUg0cC6I4HnNZKimhrBK8P+Fj/MVBWZTuvP5QptWw=
|
||||
-----END RSA PRIVATE KEY-----
|
||||
8
anythingllm/comkey/ipc-pub.pem
Normal file
8
anythingllm/comkey/ipc-pub.pem
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
-----BEGIN RSA PUBLIC KEY-----
|
||||
MIIBCgKCAQEAy4oAaL2Lj3xwc94vaB5JjQFGfaT+KLpRv+ZXd+VGK/5abZN++ETB
|
||||
S5y89p0yHlGSWq66xbnTp1AlvjiFLNeKkSehmnALEsKD+KyClv/WWazr29nXbBXa
|
||||
q8Li9aTfEztgjImz5wf1vqEB1oK9g9PdhvT3BsB+0C6JlHSOZGVWbiZ8BdyPl1T9
|
||||
7Nu7Vdlpz5jBLSUcivDoTy1Du1ax9dGju4P/auDlJ76Vx8/bBS+x9AipGIawWWOB
|
||||
f6tEmNHIk6AV8pT/ZJQoEy1KwzYao0YMkgXjiR/vn0iggnQbKIpATCDeUCwjFW3a
|
||||
d5/SctOB0lCHIvXLQWLJQdGYeGJQzIuupQIDAQAB
|
||||
-----END RSA PUBLIC KEY-----
|
||||
|
|
@ -1,3 +1,13 @@
|
|||
volumes:
|
||||
# n8n_storage:
|
||||
# postgres_storage:
|
||||
# ollama_storage:
|
||||
qdrant_storage:
|
||||
|
||||
networks:
|
||||
demo:
|
||||
|
||||
|
||||
x-ollama-base: &ollama-base
|
||||
image: ollama/ollama:latest
|
||||
hostname: ollama
|
||||
|
|
@ -6,7 +16,7 @@ x-ollama-base: &ollama-base
|
|||
restart: ${restart}
|
||||
tty: true
|
||||
ports:
|
||||
- 11434:11434
|
||||
- 11435:11434
|
||||
ulimits:
|
||||
memlock: -1
|
||||
volumes:
|
||||
|
|
@ -142,6 +152,10 @@ x-n8n-base: &n8n-base
|
|||
- N8N_TEMPLATES_ENABLED=true
|
||||
- N8N_GRACEFUL_SHUTDOWN_TIMEOUT=10
|
||||
- QUEUE_HEALTH_CHECK_ACTIVE=${QUEUE_HEALTH_CHECK_ACTIVE}
|
||||
- N8N_HOST=n8n.seraph13.dev
|
||||
- WEBHOOK_URL=https://n8n.seraph13.dev
|
||||
- GENERIC_TIMEZONE=America/Caracas
|
||||
- TZ="America/Caracas"
|
||||
ports:
|
||||
- ${N8N_PORT}:${N8N_PORT}
|
||||
volumes:
|
||||
|
|
@ -275,6 +289,18 @@ x-chroma-base: &chroma-base
|
|||
timeout: 10s
|
||||
retries: 3
|
||||
|
||||
x-qdrant:
|
||||
image: qdrant/qdrant
|
||||
hostname: qdrant
|
||||
container_name: qdrant
|
||||
networks: ['demo']
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- 6333:6333
|
||||
volumes:
|
||||
- qdrant_storage:/qdrant/storage
|
||||
|
||||
|
||||
################################################
|
||||
|
||||
services:
|
||||
|
|
|
|||
24
finetuning/load_basemodel.py
Normal file
24
finetuning/load_basemodel.py
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
import torch
|
||||
from datasets import load_dataset
|
||||
from transformers import (
|
||||
AutoModelForCausalLM,
|
||||
AutoTokenizer,
|
||||
TrainingArguments
|
||||
)
|
||||
from peft import LoraConfig
|
||||
from trl import SFTTrainer
|
||||
|
||||
|
||||
# Base model and tokenizer names.
|
||||
base_model_name = "meta-llama/Llama-2-7b-chat-hf"
|
||||
|
||||
# Load base model to GPU memory.
|
||||
device = "cuda:0"
|
||||
base_model = AutoModelForCausalLM.from_pretrained(base_model_name, trust_remote_code = True).to(device)
|
||||
|
||||
# Load tokenizer.
|
||||
tokenizer = AutoTokenizer.from_pretrained(
|
||||
base_model_name,
|
||||
trust_remote_code = True)
|
||||
tokenizer.pad_token = tokenizer.eos_token
|
||||
tokenizer.padding_side = "right"
|
||||
Loading…
Reference in a new issue