|
@@ -80,19 +80,22 @@ export const LLMProviders: any[] = [
|
|
|
model: ProviderEnum.Q_FAN,
|
|
|
name: '百度千帆大模型',
|
|
|
models: [
|
|
|
- 'ernie_bot_8k',
|
|
|
- 'eb-instant',
|
|
|
- 'ai_apaas',
|
|
|
- 'yi_34b_chat',
|
|
|
- 'bloomz_7b1',
|
|
|
- 'qianfan_bloomz_7b_compressed',
|
|
|
- 'mixtral_8x7b_instruct',
|
|
|
- 'llama_2_7b',
|
|
|
- 'llama_2_13b',
|
|
|
- 'llama_2_70b',
|
|
|
- 'qianfan_chinese_llama_2_7b',
|
|
|
- 'chatglm2_6b_32k',
|
|
|
- 'aquilachat_7b',
|
|
|
+ 'ERNIE-4.0-8K',
|
|
|
+ 'ERNIE-3.5-8K',
|
|
|
+ 'ERNIE-Bot-8K',
|
|
|
+ 'ERNIE-Bot-turbo',
|
|
|
+ 'ERNIE-Speed-128K',
|
|
|
+ 'EB-turbo-AppBuilder',
|
|
|
+ 'Yi-34B-Chat',
|
|
|
+ 'BLOOMZ-7B',
|
|
|
+ 'Qianfan-BLOOMZ-7B-compressed',
|
|
|
+ 'Mixtral-8x7B-Instruct',
|
|
|
+ 'Llama-2-7b-chat',
|
|
|
+ 'Llama-2-13b-chat',
|
|
|
+ 'Llama-2-70b-chat',
|
|
|
+ 'Qianfan-Chinese-Llama-2-7B',
|
|
|
+ 'ChatGLM2-6B-32K',
|
|
|
+ 'AquilaChat-7B',
|
|
|
],
|
|
|
},
|
|
|
{
|