# Model configuration MODEL_NAME = "llama-4-scout" # User specified model EMBEDDING_MODEL = "BAAI/bge-m3" # Best multilingual embedding model # RAG Configuration SIMILARITY_TOP_K = 5 CHUNK_SIZE = 1024 CHUNK_OVERLAP = 200 # Llama.cpp configuration LLM_TEMPERATURE = 0.1 LLM_TOP_P = 0.9 # ChromaDB configuration PERSIST_DIR = "./chroma_db"