vertex_ai.py 1.2 KB

12345678910111213141516171819202122232425262728293031323334353637
  1. import importlib
  2. import logging
  3. from typing import Optional
  4. from embedchain.config import BaseLlmConfig
  5. from embedchain.helper.json_serializable import register_deserializable
  6. from embedchain.llm.base import BaseLlm
  7. try:
  8. importlib.import_module("vertexai")
  9. except ModuleNotFoundError:
  10. raise ModuleNotFoundError(
  11. "The required dependencies for VertexAI are not installed."
  12. 'Please install with `pip install --upgrade "embedchain[vertexai]"`'
  13. ) from None
  14. @register_deserializable
  15. class VertexAILlm(BaseLlm):
  16. def __init__(self, config: Optional[BaseLlmConfig] = None):
  17. super().__init__(config=config)
  18. def get_llm_model_answer(self, prompt):
  19. return VertexAILlm._get_answer(prompt=prompt, config=self.config)
  20. @staticmethod
  21. def _get_answer(prompt: str, config: BaseLlmConfig) -> str:
  22. from langchain.chat_models import ChatVertexAI
  23. chat = ChatVertexAI(temperature=config.temperature, model=config.model)
  24. if config.top_p and config.top_p != 1:
  25. logging.warning("Config option `top_p` is not supported by this model.")
  26. messages = BaseLlm._get_messages(prompt, system_prompt=config.system_prompt)
  27. return chat(messages).content