|
@@ -17,9 +17,6 @@ logger = logging.getLogger(__name__)
|
|
|
@register_deserializable
|
|
|
class HuggingFaceLlm(BaseLlm):
|
|
|
def __init__(self, config: Optional[BaseLlmConfig] = None):
|
|
|
- if "HUGGINGFACE_ACCESS_TOKEN" not in os.environ:
|
|
|
- raise ValueError("Please set the HUGGINGFACE_ACCESS_TOKEN environment variable.")
|
|
|
-
|
|
|
try:
|
|
|
importlib.import_module("huggingface_hub")
|
|
|
except ModuleNotFoundError:
|
|
@@ -29,6 +26,8 @@ class HuggingFaceLlm(BaseLlm):
|
|
|
) from None
|
|
|
|
|
|
super().__init__(config=config)
|
|
|
+ if not self.config.api_key and "HUGGINGFACE_ACCESS_TOKEN" not in os.environ:
|
|
|
+ raise ValueError("Please set the HUGGINGFACE_ACCESS_TOKEN environment variable or pass it in the config.")
|
|
|
|
|
|
def get_llm_model_answer(self, prompt):
|
|
|
if self.config.system_prompt:
|
|
@@ -60,9 +59,10 @@ class HuggingFaceLlm(BaseLlm):
|
|
|
raise ValueError("`top_p` must be > 0.0 and < 1.0")
|
|
|
|
|
|
model = config.model
|
|
|
+ api_key = config.api_key or os.getenv("HUGGINGFACE_ACCESS_TOKEN")
|
|
|
logger.info(f"Using HuggingFaceHub with model {model}")
|
|
|
llm = HuggingFaceHub(
|
|
|
- huggingfacehub_api_token=os.environ["HUGGINGFACE_ACCESS_TOKEN"],
|
|
|
+ huggingfacehub_api_token=api_key,
|
|
|
repo_id=model,
|
|
|
model_kwargs=model_kwargs,
|
|
|
)
|
|
@@ -70,8 +70,9 @@ class HuggingFaceLlm(BaseLlm):
|
|
|
|
|
|
@staticmethod
|
|
|
def _from_endpoint(prompt: str, config: BaseLlmConfig) -> str:
|
|
|
+ api_key = config.api_key or os.getenv("HUGGINGFACE_ACCESS_TOKEN")
|
|
|
llm = HuggingFaceEndpoint(
|
|
|
- huggingfacehub_api_token=os.environ["HUGGINGFACE_ACCESS_TOKEN"],
|
|
|
+ huggingfacehub_api_token=api_key,
|
|
|
endpoint_url=config.endpoint,
|
|
|
task="text-generation",
|
|
|
model_kwargs=config.model_kwargs,
|