Quellcode durchsuchen

Add support for http clients in config (#1355)

Abdur Rahman Nawaz vor 1 Jahr
Ursprung
Commit
ebc5e25f98
2 geänderte Dateien mit 6 neuen und 0 gelöschten Zeilen
  1. 4 0
      embedchain/config/llm/base.py
  2. 2 0
      embedchain/llm/openai.py

+ 4 - 0
embedchain/config/llm/base.py

@@ -98,6 +98,8 @@ class BaseLlmConfig(BaseConfig):
         base_url: Optional[str] = None,
         endpoint: Optional[str] = None,
         model_kwargs: Optional[dict[str, Any]] = None,
+        http_client: Optional[Any] = None,
+        http_async_client: Optional[Any] = None,
         local: Optional[bool] = False,
         default_headers: Optional[Mapping[str, str]] = None,
     ):
@@ -175,6 +177,8 @@ class BaseLlmConfig(BaseConfig):
         self.base_url = base_url
         self.endpoint = endpoint
         self.model_kwargs = model_kwargs
+        self.http_client = http_client
+        self.http_async_client = http_async_client
         self.local = local
         self.default_headers = default_headers
 

+ 2 - 0
embedchain/llm/openai.py

@@ -52,6 +52,8 @@ class OpenAILlm(BaseLlm):
                 callbacks=callbacks,
                 api_key=api_key,
                 base_url=base_url,
+                http_client=config.http_client,
+                http_async_client=config.http_async_client,
             )
         else:
             chat = ChatOpenAI(**kwargs, api_key=api_key, base_url=base_url)