App.py 1.5 KB

1234567891011121314151617181920212223242526272829303132333435363738394041
  1. from typing import Optional
  2. from embedchain.config import (AppConfig, BaseEmbedderConfig, BaseLlmConfig,
  3. ChromaDbConfig)
  4. from embedchain.embedchain import EmbedChain
  5. from embedchain.embedder.openai_embedder import OpenAiEmbedder
  6. from embedchain.helper_classes.json_serializable import register_deserializable
  7. from embedchain.llm.openai_llm import OpenAiLlm
  8. from embedchain.vectordb.chroma_db import ChromaDB
  9. @register_deserializable
  10. class App(EmbedChain):
  11. """
  12. The EmbedChain app.
  13. Has two functions: add and query.
  14. adds(data_type, url): adds the data from the given URL to the vector db.
  15. query(query): finds answer to the given query using vector database and LLM.
  16. dry_run(query): test your prompt without consuming tokens.
  17. """
  18. def __init__(
  19. self,
  20. config: AppConfig = None,
  21. llm_config: BaseLlmConfig = None,
  22. chromadb_config: Optional[ChromaDbConfig] = None,
  23. system_prompt: Optional[str] = None,
  24. ):
  25. """
  26. :param config: AppConfig instance to load as configuration. Optional.
  27. :param system_prompt: System prompt string. Optional.
  28. """
  29. if config is None:
  30. config = AppConfig()
  31. llm = OpenAiLlm(config=llm_config)
  32. embedder = OpenAiEmbedder(config=BaseEmbedderConfig(model="text-embedding-ada-002"))
  33. database = ChromaDB(config=chromadb_config)
  34. super().__init__(config, llm, db=database, embedder=embedder, system_prompt=system_prompt)