ollama.yaml 223 B

12345678910111213
  1. llm:
  2. provider: ollama
  3. config:
  4. model: 'llama2'
  5. temperature: 0.5
  6. top_p: 1
  7. stream: true
  8. base_url: http://localhost:11434
  9. embedder:
  10. provider: huggingface
  11. config:
  12. model: 'BAAI/bge-small-en-v1.5'