anthropic.py 1.2 KB

123456789101112131415161718192021222324252627282930313233
  1. import logging
  2. import os
  3. from typing import Optional
  4. from embedchain.config import BaseLlmConfig
  5. from embedchain.helpers.json_serializable import register_deserializable
  6. from embedchain.llm.base import BaseLlm
  7. @register_deserializable
  8. class AnthropicLlm(BaseLlm):
  9. def __init__(self, config: Optional[BaseLlmConfig] = None):
  10. if "ANTHROPIC_API_KEY" not in os.environ:
  11. raise ValueError("Please set the ANTHROPIC_API_KEY environment variable.")
  12. super().__init__(config=config)
  13. def get_llm_model_answer(self, prompt):
  14. return AnthropicLlm._get_answer(prompt=prompt, config=self.config)
  15. @staticmethod
  16. def _get_answer(prompt: str, config: BaseLlmConfig) -> str:
  17. from langchain.chat_models import ChatAnthropic
  18. chat = ChatAnthropic(
  19. anthropic_api_key=os.environ["ANTHROPIC_API_KEY"], temperature=config.temperature, model=config.model
  20. )
  21. if config.max_tokens and config.max_tokens != 1000:
  22. logging.warning("Config option `max_tokens` is not supported by this model.")
  23. messages = BaseLlm._get_messages(prompt, system_prompt=config.system_prompt)
  24. return chat(messages).content