test_vertex_ai.py 1.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051
  1. from unittest.mock import MagicMock, patch
  2. import pytest
  3. from langchain.schema import HumanMessage, SystemMessage
  4. from embedchain.config import BaseLlmConfig
  5. from embedchain.core.db.database import database_manager
  6. from embedchain.llm.vertex_ai import VertexAILlm
  7. @pytest.fixture(autouse=True)
  8. def setup_database():
  9. database_manager.setup_engine()
  10. @pytest.fixture
  11. def vertexai_llm():
  12. config = BaseLlmConfig(temperature=0.6, model="chat-bison")
  13. return VertexAILlm(config)
  14. def test_get_llm_model_answer(vertexai_llm):
  15. with patch.object(VertexAILlm, "_get_answer", return_value="Test Response") as mock_method:
  16. prompt = "Test Prompt"
  17. response = vertexai_llm.get_llm_model_answer(prompt)
  18. assert response == "Test Response"
  19. mock_method.assert_called_once_with(prompt=prompt, config=vertexai_llm.config)
  20. @patch("embedchain.llm.vertex_ai.ChatVertexAI")
  21. def test_get_answer(mock_chat_vertexai, vertexai_llm, caplog):
  22. mock_chat_vertexai.return_value.invoke.return_value = MagicMock(content="Test Response")
  23. config = vertexai_llm.config
  24. prompt = "Test Prompt"
  25. messages = vertexai_llm._get_messages(prompt)
  26. response = vertexai_llm._get_answer(prompt, config)
  27. mock_chat_vertexai.return_value.invoke.assert_called_once_with(messages)
  28. assert response == "Test Response" # Assertion corrected
  29. assert "Config option `top_p` is not supported by this model." not in caplog.text
  30. def test_get_messages(vertexai_llm):
  31. prompt = "Test Prompt"
  32. system_prompt = "Test System Prompt"
  33. messages = vertexai_llm._get_messages(prompt, system_prompt)
  34. assert messages == [
  35. SystemMessage(content="Test System Prompt", additional_kwargs={}),
  36. HumanMessage(content="Test Prompt", additional_kwargs={}, example=False),
  37. ]