|
@@ -14,7 +14,7 @@ class GPT4ALLLlm(BaseLlm):
|
|
self.instance = GPT4ALLLlm._get_instance(self.config.model)
|
|
self.instance = GPT4ALLLlm._get_instance(self.config.model)
|
|
|
|
|
|
def get_llm_model_answer(self, prompt):
|
|
def get_llm_model_answer(self, prompt):
|
|
- return self._get_gpt4all_answer(prompt=prompt, config=self.config)
|
|
|
|
|
|
+ return self._get_answer(prompt=prompt, config=self.config)
|
|
|
|
|
|
@staticmethod
|
|
@staticmethod
|
|
def _get_instance(model):
|
|
def _get_instance(model):
|
|
@@ -22,12 +22,12 @@ class GPT4ALLLlm(BaseLlm):
|
|
from gpt4all import GPT4All
|
|
from gpt4all import GPT4All
|
|
except ModuleNotFoundError:
|
|
except ModuleNotFoundError:
|
|
raise ModuleNotFoundError(
|
|
raise ModuleNotFoundError(
|
|
- "The GPT4All python package is not installed. Please install it with `pip install --upgrade embedchain[opensource]`" # noqa E501
|
|
|
|
|
|
+ "The GPT4All python package is not installed. Please install it with `pip install --upgrade embedchain[opensource]`" # noqa E501
|
|
) from None
|
|
) from None
|
|
|
|
|
|
return GPT4All(model_name=model)
|
|
return GPT4All(model_name=model)
|
|
|
|
|
|
- def _get_gpt4all_answer(self, prompt: str, config: BaseLlmConfig) -> Union[str, Iterable]:
|
|
|
|
|
|
+ def _get_answer(self, prompt: str, config: BaseLlmConfig) -> Union[str, Iterable]:
|
|
if config.model and config.model != self.config.model:
|
|
if config.model and config.model != self.config.model:
|
|
raise RuntimeError(
|
|
raise RuntimeError(
|
|
"OpenSourceApp does not support switching models at runtime. Please create a new app instance."
|
|
"OpenSourceApp does not support switching models at runtime. Please create a new app instance."
|