factory.py 5.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115
  1. import importlib
  2. def load_class(class_type):
  3. module_path, class_name = class_type.rsplit(".", 1)
  4. module = importlib.import_module(module_path)
  5. return getattr(module, class_name)
  6. class LlmFactory:
  7. provider_to_class = {
  8. "anthropic": "embedchain.llm.anthropic.AnthropicLlm",
  9. "azure_openai": "embedchain.llm.azure_openai.AzureOpenAILlm",
  10. "cohere": "embedchain.llm.cohere.CohereLlm",
  11. "together": "embedchain.llm.together.TogetherLlm",
  12. "gpt4all": "embedchain.llm.gpt4all.GPT4ALLLlm",
  13. "ollama": "embedchain.llm.ollama.OllamaLlm",
  14. "huggingface": "embedchain.llm.huggingface.HuggingFaceLlm",
  15. "jina": "embedchain.llm.jina.JinaLlm",
  16. "llama2": "embedchain.llm.llama2.Llama2Llm",
  17. "openai": "embedchain.llm.openai.OpenAILlm",
  18. "vertexai": "embedchain.llm.vertex_ai.VertexAILlm",
  19. "google": "embedchain.llm.google.GoogleLlm",
  20. "aws_bedrock": "embedchain.llm.aws_bedrock.AWSBedrockLlm",
  21. "mistralai": "embedchain.llm.mistralai.MistralAILlm",
  22. "groq": "embedchain.llm.groq.GroqLlm",
  23. "nvidia": "embedchain.llm.nvidia.NvidiaLlm",
  24. "vllm": "embedchain.llm.vllm.VLLM",
  25. }
  26. provider_to_config_class = {
  27. "embedchain": "embedchain.config.llm.base.BaseLlmConfig",
  28. "openai": "embedchain.config.llm.base.BaseLlmConfig",
  29. "anthropic": "embedchain.config.llm.base.BaseLlmConfig",
  30. }
  31. @classmethod
  32. def create(cls, provider_name, config_data):
  33. class_type = cls.provider_to_class.get(provider_name)
  34. # Default to embedchain base config if the provider is not in the config map
  35. config_name = "embedchain" if provider_name not in cls.provider_to_config_class else provider_name
  36. config_class_type = cls.provider_to_config_class.get(config_name)
  37. if class_type:
  38. llm_class = load_class(class_type)
  39. llm_config_class = load_class(config_class_type)
  40. return llm_class(config=llm_config_class(**config_data))
  41. else:
  42. raise ValueError(f"Unsupported Llm provider: {provider_name}")
  43. class EmbedderFactory:
  44. provider_to_class = {
  45. "azure_openai": "embedchain.embedder.openai.OpenAIEmbedder",
  46. "gpt4all": "embedchain.embedder.gpt4all.GPT4AllEmbedder",
  47. "huggingface": "embedchain.embedder.huggingface.HuggingFaceEmbedder",
  48. "openai": "embedchain.embedder.openai.OpenAIEmbedder",
  49. "vertexai": "embedchain.embedder.vertexai.VertexAIEmbedder",
  50. "google": "embedchain.embedder.google.GoogleAIEmbedder",
  51. "mistralai": "embedchain.embedder.mistralai.MistralAIEmbedder",
  52. "nvidia": "embedchain.embedder.nvidia.NvidiaEmbedder",
  53. "cohere": "embedchain.embedder.cohere.CohereEmbedder",
  54. "ollama": "embedchain.embedder.ollama.OllamaEmbedder",
  55. }
  56. provider_to_config_class = {
  57. "azure_openai": "embedchain.config.embedder.base.BaseEmbedderConfig",
  58. "google": "embedchain.config.embedder.google.GoogleAIEmbedderConfig",
  59. "gpt4all": "embedchain.config.embedder.base.BaseEmbedderConfig",
  60. "huggingface": "embedchain.config.embedder.base.BaseEmbedderConfig",
  61. "openai": "embedchain.config.embedder.base.BaseEmbedderConfig",
  62. "ollama": "embedchain.config.embedder.ollama.OllamaEmbedderConfig",
  63. }
  64. @classmethod
  65. def create(cls, provider_name, config_data):
  66. class_type = cls.provider_to_class.get(provider_name)
  67. # Default to openai config if the provider is not in the config map
  68. config_name = "openai" if provider_name not in cls.provider_to_config_class else provider_name
  69. config_class_type = cls.provider_to_config_class.get(config_name)
  70. if class_type:
  71. embedder_class = load_class(class_type)
  72. embedder_config_class = load_class(config_class_type)
  73. return embedder_class(config=embedder_config_class(**config_data))
  74. else:
  75. raise ValueError(f"Unsupported Embedder provider: {provider_name}")
  76. class VectorDBFactory:
  77. provider_to_class = {
  78. "chroma": "embedchain.vectordb.chroma.ChromaDB",
  79. "elasticsearch": "embedchain.vectordb.elasticsearch.ElasticsearchDB",
  80. "opensearch": "embedchain.vectordb.opensearch.OpenSearchDB",
  81. "pinecone": "embedchain.vectordb.pinecone.PineconeDB",
  82. "qdrant": "embedchain.vectordb.qdrant.QdrantDB",
  83. "weaviate": "embedchain.vectordb.weaviate.WeaviateDB",
  84. "zilliz": "embedchain.vectordb.zilliz.ZillizVectorDB",
  85. }
  86. provider_to_config_class = {
  87. "chroma": "embedchain.config.vectordb.chroma.ChromaDbConfig",
  88. "elasticsearch": "embedchain.config.vectordb.elasticsearch.ElasticsearchDBConfig",
  89. "opensearch": "embedchain.config.vectordb.opensearch.OpenSearchDBConfig",
  90. "pinecone": "embedchain.config.vectordb.pinecone.PineconeDBConfig",
  91. "qdrant": "embedchain.config.vectordb.qdrant.QdrantDBConfig",
  92. "weaviate": "embedchain.config.vectordb.weaviate.WeaviateDBConfig",
  93. "zilliz": "embedchain.config.vectordb.zilliz.ZillizDBConfig",
  94. }
  95. @classmethod
  96. def create(cls, provider_name, config_data):
  97. class_type = cls.provider_to_class.get(provider_name)
  98. config_class_type = cls.provider_to_config_class.get(provider_name)
  99. if class_type:
  100. embedder_class = load_class(class_type)
  101. embedder_config_class = load_class(config_class_type)
  102. return embedder_class(config=embedder_config_class(**config_data))
  103. else:
  104. raise ValueError(f"Unsupported Embedder provider: {provider_name}")