Examples
Fully local setup
pipeline: naive
plugins:
llm:
provider: llama
model_path: ./models/llama-3.1-8b.gguf
embedding:
provider: bge
model: BAAI/bge-large-en-v1.5
vectorstore:
provider: faiss
index_path: .ragway/indexHybrid cloud setup
pipeline: hybrid
plugins:
llm:
provider: groq
model: llama-3.3-70b-versatile
api_key: ${GROQ_API_KEY}
embedding:
provider: openai
model: text-embedding-3-small
api_key: ${OPENAI_API_KEY}
vectorstore:
provider: qdrant
retrieval:
strategy: hybrid
hybrid_alpha: 0.5Long context summarization
pipeline: long_context
plugins:
llm:
provider: anthropic
model: claude-opus-4-6