llmProviders.ts 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308
  1. import { StorageEnum } from '../base/enums';
  2. import { createStorage } from '../base/base';
  3. import type { BaseStorage } from '../base/types';
  4. import { type AgentNameEnum, llmProviderModelNames, llmProviderParameters, ProviderTypeEnum } from './types';
  5. // Interface for a single provider configuration
  6. export interface ProviderConfig {
  7. name?: string; // Display name in the options
  8. type?: ProviderTypeEnum; // Help to decide which LangChain ChatModel package to use
  9. apiKey: string; // Must be provided, but may be empty for local models
  10. baseUrl?: string; // Optional base URL if provided // For Azure: Endpoint
  11. modelNames?: string[]; // Chosen model names (NOT used for Azure OpenAI)
  12. createdAt?: number; // Timestamp in milliseconds when the provider was created
  13. // Azure Specific Fields:
  14. azureDeploymentNames?: string[]; // Azure deployment names array
  15. azureApiVersion?: string;
  16. }
  17. // Interface for storing multiple LLM provider configurations
  18. // The key is the provider id, which is the same as the provider type for built-in providers, but is custom for custom providers
  19. export interface LLMKeyRecord {
  20. providers: Record<string, ProviderConfig>;
  21. }
  22. export type LLMProviderStorage = BaseStorage<LLMKeyRecord> & {
  23. setProvider: (providerId: string, config: ProviderConfig) => Promise<void>;
  24. getProvider: (providerId: string) => Promise<ProviderConfig | undefined>;
  25. removeProvider: (providerId: string) => Promise<void>;
  26. hasProvider: (providerId: string) => Promise<boolean>;
  27. getAllProviders: () => Promise<Record<string, ProviderConfig>>;
  28. };
  29. // Storage for LLM provider configurations
  30. // use "llm-api-keys" as the key for the storage, for backward compatibility
  31. const storage = createStorage<LLMKeyRecord>(
  32. 'llm-api-keys',
  33. { providers: {} },
  34. {
  35. storageEnum: StorageEnum.Local,
  36. liveUpdate: true,
  37. },
  38. );
  39. // Helper function to determine provider type from provider name
  40. // Make sure to update this function if you add a new provider type
  41. export function getProviderTypeByProviderId(providerId: string): ProviderTypeEnum {
  42. // Check if this is an Azure provider (either the main one or one with a custom ID)
  43. if (providerId === ProviderTypeEnum.AzureOpenAI) {
  44. return ProviderTypeEnum.AzureOpenAI;
  45. }
  46. // Handle custom Azure providers with IDs like azure_openai_2
  47. if (typeof providerId === 'string' && providerId.startsWith(`${ProviderTypeEnum.AzureOpenAI}_`)) {
  48. return ProviderTypeEnum.AzureOpenAI;
  49. }
  50. // Handle standard provider types
  51. switch (providerId) {
  52. case ProviderTypeEnum.OpenAI:
  53. case ProviderTypeEnum.Anthropic:
  54. case ProviderTypeEnum.DeepSeek:
  55. case ProviderTypeEnum.Gemini:
  56. case ProviderTypeEnum.Grok:
  57. case ProviderTypeEnum.Ollama:
  58. case ProviderTypeEnum.OpenRouter:
  59. return providerId;
  60. default:
  61. return ProviderTypeEnum.CustomOpenAI;
  62. }
  63. }
  64. // Helper function to get display name from provider id
  65. // Make sure to update this function if you add a new provider type
  66. export function getDefaultDisplayNameFromProviderId(providerId: string): string {
  67. switch (providerId) {
  68. case ProviderTypeEnum.OpenAI:
  69. return 'OpenAI';
  70. case ProviderTypeEnum.Anthropic:
  71. return 'Anthropic';
  72. case ProviderTypeEnum.DeepSeek:
  73. return 'DeepSeek';
  74. case ProviderTypeEnum.Gemini:
  75. return 'Gemini';
  76. case ProviderTypeEnum.Grok:
  77. return 'Grok';
  78. case ProviderTypeEnum.Ollama:
  79. return 'Ollama';
  80. case ProviderTypeEnum.AzureOpenAI:
  81. return 'Azure OpenAI';
  82. case ProviderTypeEnum.OpenRouter:
  83. return 'OpenRouter';
  84. default:
  85. return providerId; // Use the provider id as display name for custom providers by default
  86. }
  87. }
  88. // Get default configuration for built-in providers
  89. export function getDefaultProviderConfig(providerId: string): ProviderConfig {
  90. switch (providerId) {
  91. case ProviderTypeEnum.OpenAI:
  92. case ProviderTypeEnum.Anthropic:
  93. case ProviderTypeEnum.DeepSeek:
  94. case ProviderTypeEnum.Gemini:
  95. case ProviderTypeEnum.Grok:
  96. case ProviderTypeEnum.OpenRouter: // OpenRouter uses modelNames
  97. return {
  98. apiKey: '',
  99. name: getDefaultDisplayNameFromProviderId(providerId),
  100. type: providerId,
  101. baseUrl: providerId === ProviderTypeEnum.OpenRouter ? 'https://openrouter.ai/api/v1' : undefined,
  102. modelNames: [...(llmProviderModelNames[providerId] || [])],
  103. createdAt: Date.now(),
  104. };
  105. case ProviderTypeEnum.Ollama:
  106. return {
  107. apiKey: 'ollama', // Set default API key for Ollama
  108. name: getDefaultDisplayNameFromProviderId(ProviderTypeEnum.Ollama),
  109. type: ProviderTypeEnum.Ollama,
  110. modelNames: llmProviderModelNames[providerId],
  111. baseUrl: 'http://localhost:11434',
  112. createdAt: Date.now(),
  113. };
  114. case ProviderTypeEnum.AzureOpenAI:
  115. return {
  116. apiKey: '', // User needs to provide API Key
  117. name: getDefaultDisplayNameFromProviderId(ProviderTypeEnum.AzureOpenAI),
  118. type: ProviderTypeEnum.AzureOpenAI,
  119. baseUrl: '', // User needs to provide Azure endpoint
  120. // modelNames: [], // Not used for Azure configuration
  121. azureDeploymentNames: [], // Azure deployment names
  122. azureApiVersion: '2024-02-15-preview', // Provide a common default API version
  123. createdAt: Date.now(),
  124. };
  125. default: // Handles CustomOpenAI
  126. return {
  127. apiKey: '',
  128. name: getDefaultDisplayNameFromProviderId(providerId),
  129. type: ProviderTypeEnum.CustomOpenAI,
  130. baseUrl: '',
  131. modelNames: [], // Custom providers use modelNames
  132. createdAt: Date.now(),
  133. };
  134. }
  135. }
  136. export function getDefaultAgentModelParams(providerId: string, agentName: AgentNameEnum): Record<string, number> {
  137. const newParameters = llmProviderParameters[providerId as keyof typeof llmProviderParameters]?.[agentName] || {
  138. temperature: 0.1,
  139. topP: 0.1,
  140. };
  141. return newParameters;
  142. }
  143. // Helper function to ensure backward compatibility for provider configs
  144. function ensureBackwardCompatibility(providerId: string, config: ProviderConfig): ProviderConfig {
  145. // Log input config
  146. // console.log(`[ensureBackwardCompatibility] Input for ${providerId}:`, JSON.stringify(config));
  147. const updatedConfig = { ...config };
  148. // Ensure name exists
  149. if (!updatedConfig.name) {
  150. updatedConfig.name = getDefaultDisplayNameFromProviderId(providerId);
  151. }
  152. // Ensure type exists
  153. if (!updatedConfig.type) {
  154. updatedConfig.type = getProviderTypeByProviderId(providerId);
  155. }
  156. // Handle Azure specifics
  157. if (updatedConfig.type === ProviderTypeEnum.AzureOpenAI) {
  158. // Ensure Azure fields exist, provide defaults if missing
  159. if (updatedConfig.azureApiVersion === undefined) {
  160. // console.log(`[ensureBackwardCompatibility] Adding default azureApiVersion for ${providerId}`);
  161. updatedConfig.azureApiVersion = '2024-02-15-preview';
  162. }
  163. // Initialize azureDeploymentNames array if it doesn't exist yet
  164. if (!updatedConfig.azureDeploymentNames) {
  165. updatedConfig.azureDeploymentNames = [];
  166. }
  167. // CRITICAL: Delete modelNames if it exists for Azure type to clean up old configs
  168. if (Object.prototype.hasOwnProperty.call(updatedConfig, 'modelNames')) {
  169. // console.log(`[ensureBackwardCompatibility] Deleting modelNames for Azure config ${providerId}`);
  170. delete updatedConfig.modelNames;
  171. }
  172. } else {
  173. // Ensure modelNames exists ONLY for non-Azure types
  174. if (!updatedConfig.modelNames) {
  175. // console.log(`[ensureBackwardCompatibility] Adding default modelNames for non-Azure ${providerId}`);
  176. updatedConfig.modelNames = llmProviderModelNames[providerId as keyof typeof llmProviderModelNames] || [];
  177. }
  178. }
  179. // Ensure createdAt exists
  180. if (!updatedConfig.createdAt) {
  181. updatedConfig.createdAt = new Date('03/04/2025').getTime();
  182. }
  183. // Log output config
  184. // console.log(`[ensureBackwardCompatibility] Output for ${providerId}:`, JSON.stringify(updatedConfig));
  185. return updatedConfig;
  186. }
  187. // {
  188. // apiKey: 'sk-3f91d3517b3648e8b4414f34de0696ea',
  189. // modelNames: ['deepseek-chat', 'deepseek-reasoner'],
  190. // name: 'DeepSeek',
  191. // type: "deepseek"
  192. // }
  193. export const llmProviderStore: LLMProviderStorage = {
  194. ...storage,
  195. async setProvider(providerId: string, config: ProviderConfig) {
  196. if (!providerId) {
  197. throw new Error('Provider id cannot be empty');
  198. }
  199. if (config.apiKey === undefined) {
  200. throw new Error('API key must be provided (can be empty for local models)');
  201. }
  202. const providerType = config.type || getProviderTypeByProviderId(providerId);
  203. if (providerType === ProviderTypeEnum.AzureOpenAI) {
  204. if (!config.baseUrl?.trim()) {
  205. throw new Error('Azure Endpoint (baseUrl) is required');
  206. }
  207. if (!config.azureDeploymentNames || config.azureDeploymentNames.length === 0) {
  208. throw new Error('At least one Azure Deployment Name is required');
  209. }
  210. if (!config.azureApiVersion?.trim()) {
  211. throw new Error('Azure API Version is required');
  212. }
  213. if (!config.apiKey?.trim()) {
  214. throw new Error('API Key is required for Azure OpenAI');
  215. }
  216. } else if (providerType !== ProviderTypeEnum.CustomOpenAI && providerType !== ProviderTypeEnum.Ollama) {
  217. if (!config.apiKey?.trim()) {
  218. throw new Error(`API Key is required for ${getDefaultDisplayNameFromProviderId(providerId)}`);
  219. }
  220. }
  221. if (providerType !== ProviderTypeEnum.AzureOpenAI) {
  222. if (!config.modelNames || config.modelNames.length === 0) {
  223. console.warn(`Provider ${providerId} of type ${providerType} is being saved without model names.`);
  224. }
  225. }
  226. const completeConfig: ProviderConfig = {
  227. apiKey: config.apiKey || '',
  228. baseUrl: config.baseUrl,
  229. name: config.name || getDefaultDisplayNameFromProviderId(providerId),
  230. type: providerType,
  231. createdAt: config.createdAt || Date.now(),
  232. ...(providerType === ProviderTypeEnum.AzureOpenAI
  233. ? {
  234. azureDeploymentNames: config.azureDeploymentNames || [],
  235. azureApiVersion: config.azureApiVersion,
  236. }
  237. : {
  238. modelNames: config.modelNames || [],
  239. }),
  240. };
  241. console.log(`[llmProviderStore.setProvider] Saving config for ${providerId}:`, JSON.stringify(completeConfig));
  242. const current = (await storage.get()) || { providers: {} };
  243. await storage.set({
  244. providers: {
  245. ...current.providers,
  246. [providerId]: completeConfig,
  247. },
  248. });
  249. },
  250. async getProvider(providerId: string) {
  251. const data = (await storage.get()) || { providers: {} };
  252. const config = data.providers[providerId];
  253. return config ? ensureBackwardCompatibility(providerId, config) : undefined;
  254. },
  255. async removeProvider(providerId: string) {
  256. const current = (await storage.get()) || { providers: {} };
  257. const newProviders = { ...current.providers };
  258. delete newProviders[providerId];
  259. await storage.set({ providers: newProviders });
  260. },
  261. async hasProvider(providerId: string) {
  262. const data = (await storage.get()) || { providers: {} };
  263. return providerId in data.providers;
  264. },
  265. async getAllProviders() {
  266. const data = await storage.get();
  267. const providers = { ...data.providers };
  268. // Add backward compatibility for all providers
  269. for (const [providerId, config] of Object.entries(providers)) {
  270. providers[providerId] = ensureBackwardCompatibility(providerId, config);
  271. }
  272. return providers;
  273. },
  274. };
  275. llmProviderStore.setProvider('deepseek', {
  276. apiKey: 'sk-3f91d3517b3648e8b4414f34de0696ea',
  277. modelNames: ['deepseek-chat', 'deepseek-reasoner'],
  278. name: 'DeepSeek',
  279. });