factory.py 4.1 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192
  1. import importlib
  2. def load_class(class_type):
  3. module_path, class_name = class_type.rsplit(".", 1)
  4. module = importlib.import_module(module_path)
  5. return getattr(module, class_name)
  6. class LlmFactory:
  7. provider_to_class = {
  8. "anthropic": "embedchain.llm.anthropic.AnthropicLlm",
  9. "azure_openai": "embedchain.llm.azure_openai.AzureOpenAILlm",
  10. "cohere": "embedchain.llm.cohere.CohereLlm",
  11. "gpt4all": "embedchain.llm.gpt4all.GPT4ALLLlm",
  12. "huggingface": "embedchain.llm.huggingface.HuggingFaceLlm",
  13. "jina": "embedchain.llm.jina.JinaLlm",
  14. "llama2": "embedchain.llm.llama2.Llama2Llm",
  15. "openai": "embedchain.llm.openai.OpenAILlm",
  16. "vertexai": "embedchain.llm.vertex_ai.VertexAILlm",
  17. }
  18. provider_to_config_class = {
  19. "embedchain": "embedchain.config.llm.base.BaseLlmConfig",
  20. "openai": "embedchain.config.llm.base.BaseLlmConfig",
  21. "anthropic": "embedchain.config.llm.base.BaseLlmConfig",
  22. }
  23. @classmethod
  24. def create(cls, provider_name, config_data):
  25. class_type = cls.provider_to_class.get(provider_name)
  26. # Default to embedchain base config if the provider is not in the config map
  27. config_name = "embedchain" if provider_name not in cls.provider_to_config_class else provider_name
  28. config_class_type = cls.provider_to_config_class.get(config_name)
  29. if class_type:
  30. llm_class = load_class(class_type)
  31. llm_config_class = load_class(config_class_type)
  32. return llm_class(config=llm_config_class(**config_data))
  33. else:
  34. raise ValueError(f"Unsupported Llm provider: {provider_name}")
  35. class EmbedderFactory:
  36. provider_to_class = {
  37. "gpt4all": "embedchain.embedder.gpt4all.GPT4AllEmbedder",
  38. "huggingface": "embedchain.embedder.huggingface.HuggingFaceEmbedder",
  39. "vertexai": "embedchain.embedder.vertexai.VertexAIEmbedder",
  40. "azure_openai": "embedchain.embedder.openai.OpenAIEmbedder",
  41. "openai": "embedchain.embedder.openai.OpenAIEmbedder",
  42. }
  43. provider_to_config_class = {
  44. "openai": "embedchain.config.embedder.base.BaseEmbedderConfig",
  45. "azure_openai": "embedchain.config.embedder.base.BaseEmbedderConfig",
  46. }
  47. @classmethod
  48. def create(cls, provider_name, config_data):
  49. class_type = cls.provider_to_class.get(provider_name)
  50. # Default to openai config if the provider is not in the config map
  51. config_name = "openai" if provider_name not in cls.provider_to_config_class else provider_name
  52. config_class_type = cls.provider_to_config_class.get(config_name)
  53. if class_type:
  54. embedder_class = load_class(class_type)
  55. embedder_config_class = load_class(config_class_type)
  56. return embedder_class(config=embedder_config_class(**config_data))
  57. else:
  58. raise ValueError(f"Unsupported Embedder provider: {provider_name}")
  59. class VectorDBFactory:
  60. provider_to_class = {
  61. "chroma": "embedchain.vectordb.chroma.ChromaDB",
  62. "elasticsearch": "embedchain.vectordb.elasticsearch.ElasticsearchDB",
  63. "opensearch": "embedchain.vectordb.opensearch.OpenSearchDB",
  64. "pinecone": "embedchain.vectordb.pinecone.PineconeDB",
  65. }
  66. provider_to_config_class = {
  67. "chroma": "embedchain.config.vectordb.chroma.ChromaDbConfig",
  68. "elasticsearch": "embedchain.config.vectordb.elasticsearch.ElasticsearchDBConfig",
  69. "opensearch": "embedchain.config.vectordb.opensearch.OpenSearchDBConfig",
  70. "pinecone": "embedchain.config.vectordb.pinecone.PineconeDBConfig",
  71. }
  72. @classmethod
  73. def create(cls, provider_name, config_data):
  74. class_type = cls.provider_to_class.get(provider_name)
  75. config_class_type = cls.provider_to_config_class.get(provider_name)
  76. if class_type:
  77. embedder_class = load_class(class_type)
  78. embedder_config_class = load_class(config_class_type)
  79. return embedder_class(config=embedder_config_class(**config_data))
  80. else:
  81. raise ValueError(f"Unsupported Embedder provider: {provider_name}")