|
@@ -1,5 +1,6 @@
|
|
import openai
|
|
import openai
|
|
import os
|
|
import os
|
|
|
|
+import logging
|
|
from string import Template
|
|
from string import Template
|
|
|
|
|
|
from chromadb.utils import embedding_functions
|
|
from chromadb.utils import embedding_functions
|
|
@@ -181,7 +182,9 @@ class EmbedChain:
|
|
config = QueryConfig()
|
|
config = QueryConfig()
|
|
context = self.retrieve_from_database(input_query)
|
|
context = self.retrieve_from_database(input_query)
|
|
prompt = self.generate_prompt(input_query, context, config.template)
|
|
prompt = self.generate_prompt(input_query, context, config.template)
|
|
|
|
+ logging.info(f"Prompt: {prompt}")
|
|
answer = self.get_answer_from_llm(prompt, config)
|
|
answer = self.get_answer_from_llm(prompt, config)
|
|
|
|
+ logging.info(f"Answer: {answer}")
|
|
return answer
|
|
return answer
|
|
|
|
|
|
def generate_chat_prompt(self, input_query, context, chat_history=''):
|
|
def generate_chat_prompt(self, input_query, context, chat_history=''):
|
|
@@ -224,13 +227,16 @@ class EmbedChain:
|
|
context,
|
|
context,
|
|
chat_history=chat_history,
|
|
chat_history=chat_history,
|
|
)
|
|
)
|
|
|
|
+ logging.info(f"Prompt: {prompt}")
|
|
answer = self.get_answer_from_llm(prompt, config)
|
|
answer = self.get_answer_from_llm(prompt, config)
|
|
memory.chat_memory.add_user_message(input_query)
|
|
memory.chat_memory.add_user_message(input_query)
|
|
|
|
+
|
|
if isinstance(answer, str):
|
|
if isinstance(answer, str):
|
|
memory.chat_memory.add_ai_message(answer)
|
|
memory.chat_memory.add_ai_message(answer)
|
|
|
|
+ logging.info(f"Answer: {answer}")
|
|
return answer
|
|
return answer
|
|
else:
|
|
else:
|
|
- #this is a streamed response and needs to be handled differently
|
|
|
|
|
|
+ #this is a streamed response and needs to be handled differently.
|
|
return self._stream_chat_response(answer)
|
|
return self._stream_chat_response(answer)
|
|
|
|
|
|
def _stream_chat_response(self, answer):
|
|
def _stream_chat_response(self, answer):
|
|
@@ -239,6 +245,7 @@ class EmbedChain:
|
|
streamed_answer.join(chunk)
|
|
streamed_answer.join(chunk)
|
|
yield chunk
|
|
yield chunk
|
|
memory.chat_memory.add_ai_message(streamed_answer)
|
|
memory.chat_memory.add_ai_message(streamed_answer)
|
|
|
|
+ logging.info(f"Answer: {streamed_answer}")
|
|
|
|
|
|
|
|
|
|
def dry_run(self, input_query, config: QueryConfig = None):
|
|
def dry_run(self, input_query, config: QueryConfig = None):
|
|
@@ -258,6 +265,7 @@ class EmbedChain:
|
|
config = QueryConfig()
|
|
config = QueryConfig()
|
|
context = self.retrieve_from_database(input_query)
|
|
context = self.retrieve_from_database(input_query)
|
|
prompt = self.generate_prompt(input_query, context, config.template)
|
|
prompt = self.generate_prompt(input_query, context, config.template)
|
|
|
|
+ logging.info(f"Prompt: {prompt}")
|
|
return prompt
|
|
return prompt
|
|
|
|
|
|
def count(self):
|
|
def count(self):
|