groq.py 1.6 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243
  1. import os
  2. from typing import Optional
  3. from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
  4. from langchain.schema import HumanMessage, SystemMessage
  5. try:
  6. from langchain_groq import ChatGroq
  7. except ImportError:
  8. raise ImportError("Groq requires extra dependencies. Install with `pip install langchain-groq`") from None
  9. from embedchain.config import BaseLlmConfig
  10. from embedchain.helpers.json_serializable import register_deserializable
  11. from embedchain.llm.base import BaseLlm
  12. @register_deserializable
  13. class GroqLlm(BaseLlm):
  14. def __init__(self, config: Optional[BaseLlmConfig] = None):
  15. super().__init__(config=config)
  16. def get_llm_model_answer(self, prompt) -> str:
  17. response = self._get_answer(prompt, self.config)
  18. return response
  19. def _get_answer(self, prompt: str, config: BaseLlmConfig) -> str:
  20. messages = []
  21. if config.system_prompt:
  22. messages.append(SystemMessage(content=config.system_prompt))
  23. messages.append(HumanMessage(content=prompt))
  24. api_key = config.api_key or os.environ["GROQ_API_KEY"]
  25. kwargs = {
  26. "model_name": config.model or "mixtral-8x7b-32768",
  27. "temperature": config.temperature,
  28. "groq_api_key": api_key,
  29. }
  30. if config.stream:
  31. callbacks = config.callbacks if config.callbacks else [StreamingStdOutCallbackHandler()]
  32. chat = ChatGroq(**kwargs, streaming=config.stream, callbacks=callbacks, api_key=api_key)
  33. else:
  34. chat = ChatGroq(**kwargs)
  35. return chat.invoke(messages).content