52 lines
1.2 KiB
Bash
52 lines
1.2 KiB
Bash
|
|
################################
|
|
# General Settings
|
|
################################
|
|
LOGGING_LEVEL=DEBUG
|
|
|
|
|
|
################################
|
|
# Object Storage Settings
|
|
################################
|
|
|
|
# NOTE! MinIO requires an key at least consisting of 8 chars for the secret key
|
|
S3_ENDPOINT=http://localhost:9000
|
|
S3_ACCESS_KEY=admin
|
|
S3_SECRET_KEY=secret-admin-key
|
|
BUCKET_NAME=rag-bucket
|
|
BUCKET_FILE_PATH=my-project-name
|
|
|
|
|
|
################################
|
|
# Vector DB Settings
|
|
################################
|
|
|
|
VECTOR_STORE_ENDPOINT=localhost
|
|
VECTOR_STORE_PORT=9200
|
|
|
|
OPENSEARCH_USE_SSL=True
|
|
|
|
|
|
################################
|
|
# LLM Settings
|
|
################################
|
|
|
|
# Which LLm to use
|
|
LLM_OPTION=ollamallm
|
|
|
|
# LLM_API_ENDPOINT=http://<ip-of-ollama-instance>:11434
|
|
# LLM_API_ENDPOINT=http://127.0.0.1:11434
|
|
LLM_API_ENDPOINT=http://18.192.122.66:11434
|
|
|
|
# if required for you llm setup (for self hosted ollama not needed)
|
|
LLM_API_KEY=placeholder
|
|
|
|
# valid Language options - "en" "de" "multi"
|
|
LLM_LANGUAGE=de
|
|
|
|
# LLM Model to be used - "mistral", "phi3", "llama3", etc. (Check available with <ip_of_ollama_instance>:11434/api/tags)
|
|
LLM_MODEL_NAME=mistral
|
|
|
|
# CHUNKING
|
|
CHUNKING_CHUNK_SIZE=100
|
|
CHUNKING_CHUNK_OVERLAP=10
|