ChatConfig.py 3.2 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192
  1. from string import Template
  2. from typing import Optional
  3. from embedchain.config.QueryConfig import QueryConfig
  4. from embedchain.helper_classes.json_serializable import register_deserializable
  5. DEFAULT_PROMPT = """
  6. You are a chatbot having a conversation with a human. You are given chat
  7. history and context.
  8. You need to answer the query considering context, chat history and your knowledge base. If you don't know the answer or the answer is neither contained in the context nor in history, then simply say "I don't know".
  9. $context
  10. History: $history
  11. Query: $query
  12. Helpful Answer:
  13. """ # noqa:E501
  14. DEFAULT_PROMPT_TEMPLATE = Template(DEFAULT_PROMPT)
  15. @register_deserializable
  16. class ChatConfig(QueryConfig):
  17. """
  18. Config for the `chat` method, inherits from `QueryConfig`.
  19. """
  20. def __init__(
  21. self,
  22. number_documents=None,
  23. template: Template = None,
  24. model=None,
  25. temperature=None,
  26. max_tokens=None,
  27. top_p=None,
  28. stream: bool = False,
  29. deployment_name=None,
  30. system_prompt: Optional[str] = None,
  31. where=None,
  32. ):
  33. """
  34. Initializes the ChatConfig instance.
  35. :param number_documents: Number of documents to pull from the database as
  36. context.
  37. :param template: Optional. The `Template` instance to use as a template for
  38. prompt.
  39. :param model: Optional. Controls the OpenAI model used.
  40. :param temperature: Optional. Controls the randomness of the model's output.
  41. Higher values (closer to 1) make output more random,lower values make it more
  42. deterministic.
  43. :param max_tokens: Optional. Controls how many tokens are generated.
  44. :param top_p: Optional. Controls the diversity of words.Higher values
  45. (closer to 1) make word selection more diverse, lower values make words less
  46. diverse.
  47. :param stream: Optional. Control if response is streamed back to the user
  48. :param deployment_name: t.b.a.
  49. :param system_prompt: Optional. System prompt string.
  50. :param where: Optional. A dictionary of key-value pairs to filter the database results.
  51. :raises ValueError: If the template is not valid as template should contain
  52. $context and $query and $history
  53. """
  54. if template is None:
  55. template = DEFAULT_PROMPT_TEMPLATE
  56. # History is set as 0 to ensure that there is always a history, that way,
  57. # there don't have to be two templates. Having two templates would make it
  58. # complicated because the history is not user controlled.
  59. super().__init__(
  60. number_documents=number_documents,
  61. template=template,
  62. model=model,
  63. temperature=temperature,
  64. max_tokens=max_tokens,
  65. top_p=top_p,
  66. history=[0],
  67. stream=stream,
  68. deployment_name=deployment_name,
  69. system_prompt=system_prompt,
  70. where=where,
  71. )
  72. def set_history(self, history):
  73. """
  74. Chat history is not user provided and not set at initialization time
  75. :param history: (string) history to set
  76. """
  77. self.history = history
  78. return