V4.8.20 feature (#3686)
* Aiproxy (#3649) * model config * feat: model config ui * perf: rename variable * feat: custom request url * perf: model buffer * perf: init model * feat: json model config * auto login * fix: ts * update packages * package * fix: dockerfile * feat: usage filter & export & dashbord (#3538) * feat: usage filter & export & dashbord * adjust ui * fix tmb scroll * fix code & selecte all * merge * perf: usages list;perf: move components (#3654) * perf: usages list * team sub plan load * perf: usage dashboard code * perf: dashboard ui * perf: move components * add default model config (#3653) * 4.8.20 test (#3656) * provider * perf: model config * model perf (#3657) * fix: model * dataset quote * perf: model config * model tag * doubao model config * perf: config model * feat: model test * fix: POST 500 error on dingtalk bot (#3655) * feat: default model (#3662) * move model config * feat: default model * fix: false triggerd org selection (#3661) * export usage csv i18n (#3660) * export usage csv i18n * fix build * feat: markdown extension (#3663) * feat: markdown extension * media cros * rerank test * default price * perf: default model * fix: cannot custom provider * fix: default model select * update bg * perf: default model selector * fix: usage export * i18n * fix: rerank * update init extension * perf: ip limit check * doubao model order * web default modle * perf: tts selector * perf: tts error * qrcode package * reload buffer (#3665) * reload buffer * reload buffer * tts selector * fix: err tip (#3666) * fix: err tip * perf: training queue * doc * fix interactive edge (#3659) * fix interactive edge * fix * comment * add gemini model * fix: chat model select * perf: supplement assistant empty response (#3669) * perf: supplement assistant empty response * check array * perf: max_token count;feat: support resoner output;fix: member scroll (#3681) * perf: supplement assistant empty response * check array * perf: max_token count * feat: support resoner output * member scroll * update provider order * i18n * fix: stream response (#3682) * perf: supplement assistant empty response * check array * fix: stream response * fix: model config cannot set to null * fix: reasoning response (#3684) * perf: supplement assistant empty response * check array * fix: reasoning response * fix: reasoning response * doc (#3685) * perf: supplement assistant empty response * check array * doc * lock * animation * update doc * update compose * doc * doc --------- Co-authored-by: heheer <heheer@sealos.io> Co-authored-by: a.e. <49438478+I-Info@users.noreply.github.com>
This commit is contained in:
@@ -1,5 +1,6 @@
|
||||
import type { NextApiResponse } from 'next';
|
||||
import { getAIApi } from '../config';
|
||||
import { getTTSModel } from '../model';
|
||||
|
||||
export async function text2Speech({
|
||||
res,
|
||||
@@ -18,15 +19,26 @@ export async function text2Speech({
|
||||
voice: string;
|
||||
speed?: number;
|
||||
}) {
|
||||
const modelData = getTTSModel(model)!;
|
||||
const ai = getAIApi();
|
||||
const response = await ai.audio.speech.create({
|
||||
model,
|
||||
// @ts-ignore
|
||||
voice,
|
||||
input,
|
||||
response_format: 'mp3',
|
||||
speed
|
||||
});
|
||||
const response = await ai.audio.speech.create(
|
||||
{
|
||||
model,
|
||||
// @ts-ignore
|
||||
voice,
|
||||
input,
|
||||
response_format: 'mp3',
|
||||
speed
|
||||
},
|
||||
modelData.requestUrl && modelData.requestAuth
|
||||
? {
|
||||
path: modelData.requestUrl,
|
||||
headers: {
|
||||
Authorization: `Bearer ${modelData.requestAuth}`
|
||||
}
|
||||
}
|
||||
: {}
|
||||
);
|
||||
|
||||
const readableStream = response.body as unknown as NodeJS.ReadableStream;
|
||||
readableStream.pipe(res);
|
||||
|
||||
@@ -2,6 +2,7 @@ import fs from 'fs';
|
||||
import { getAxiosConfig } from '../config';
|
||||
import axios from 'axios';
|
||||
import FormData from 'form-data';
|
||||
import { getSTTModel } from '../model';
|
||||
|
||||
export const aiTranscriptions = async ({
|
||||
model,
|
||||
@@ -14,13 +15,21 @@ export const aiTranscriptions = async ({
|
||||
data.append('model', model);
|
||||
data.append('file', fileStream);
|
||||
|
||||
const modelData = getSTTModel(model);
|
||||
const aiAxiosConfig = getAxiosConfig();
|
||||
|
||||
const { data: result } = await axios<{ text: string }>({
|
||||
method: 'post',
|
||||
baseURL: aiAxiosConfig.baseUrl,
|
||||
url: '/audio/transcriptions',
|
||||
...(modelData.requestUrl
|
||||
? { url: modelData.requestUrl }
|
||||
: {
|
||||
baseURL: aiAxiosConfig.baseUrl,
|
||||
url: modelData.requestUrl || '/audio/transcriptions'
|
||||
}),
|
||||
headers: {
|
||||
Authorization: aiAxiosConfig.authorization,
|
||||
Authorization: modelData.requestAuth
|
||||
? `Bearer ${modelData.requestAuth}`
|
||||
: aiAxiosConfig.authorization,
|
||||
...data.getHeaders()
|
||||
},
|
||||
data: data
|
||||
|
||||
@@ -7,14 +7,14 @@ import { getErrText } from '@fastgpt/global/common/error/utils';
|
||||
import { addLog } from '../../common/system/log';
|
||||
import { i18nT } from '../../../web/i18n/utils';
|
||||
import { OpenaiAccountType } from '@fastgpt/global/support/user/team/type';
|
||||
import { getLLMModel } from './model';
|
||||
|
||||
export const openaiBaseUrl = process.env.OPENAI_BASE_URL || 'https://api.openai.com/v1';
|
||||
|
||||
export const getAIApi = (props?: { userKey?: OpenaiAccountType; timeout?: number }) => {
|
||||
const { userKey, timeout } = props || {};
|
||||
|
||||
const baseUrl =
|
||||
userKey?.baseUrl || global?.systemEnv?.oneapiUrl || process.env.ONEAPI_URL || openaiBaseUrl;
|
||||
const baseUrl = userKey?.baseUrl || global?.systemEnv?.oneapiUrl || openaiBaseUrl;
|
||||
const apiKey = userKey?.key || global?.systemEnv?.chatApiKey || process.env.CHAT_API_KEY || '';
|
||||
|
||||
return new OpenAI({
|
||||
@@ -29,8 +29,7 @@ export const getAIApi = (props?: { userKey?: OpenaiAccountType; timeout?: number
|
||||
export const getAxiosConfig = (props?: { userKey?: OpenaiAccountType }) => {
|
||||
const { userKey } = props || {};
|
||||
|
||||
const baseUrl =
|
||||
userKey?.baseUrl || global?.systemEnv?.oneapiUrl || process.env.ONEAPI_URL || openaiBaseUrl;
|
||||
const baseUrl = userKey?.baseUrl || global?.systemEnv?.oneapiUrl || openaiBaseUrl;
|
||||
const apiKey = userKey?.key || global?.systemEnv?.chatApiKey || process.env.CHAT_API_KEY || '';
|
||||
|
||||
return {
|
||||
@@ -63,12 +62,23 @@ export const createChatCompletion = async <T extends CompletionsBodyType>({
|
||||
getEmptyResponseTip: () => string;
|
||||
}> => {
|
||||
try {
|
||||
const modelConstantsData = getLLMModel(body.model);
|
||||
|
||||
const formatTimeout = timeout ? timeout : body.stream ? 60000 : 600000;
|
||||
const ai = getAIApi({
|
||||
userKey,
|
||||
timeout: formatTimeout
|
||||
});
|
||||
const response = await ai.chat.completions.create(body, options);
|
||||
const response = await ai.chat.completions.create(body, {
|
||||
...options,
|
||||
...(modelConstantsData.requestUrl ? { path: modelConstantsData.requestUrl } : {}),
|
||||
headers: {
|
||||
...options?.headers,
|
||||
...(modelConstantsData.requestAuth
|
||||
? { Authorization: `Bearer ${modelConstantsData.requestAuth}` }
|
||||
: {})
|
||||
}
|
||||
});
|
||||
|
||||
const isStreamResponse =
|
||||
typeof response === 'object' &&
|
||||
|
||||
@@ -1,11 +0,0 @@
|
||||
{
|
||||
"provider": "OpenAI",
|
||||
"model": "text-embedding-ada-002",
|
||||
"name": "text-embedding-ada-002",
|
||||
|
||||
"defaultToken": 512, // 默认分块 token
|
||||
"maxToken": 3000, // 最大分块 token
|
||||
"weight": 0, // 权重
|
||||
|
||||
"charsPointsPrice": 0 // 积分/1k token
|
||||
}
|
||||
@@ -1,33 +0,0 @@
|
||||
{
|
||||
"provider": "OpenAI",
|
||||
"model": "gpt-4o-mini",
|
||||
"name": "GPT-4o-mini", // alias
|
||||
|
||||
"maxContext": 125000, // 最大上下文
|
||||
"maxResponse": 16000, // 最大回复
|
||||
"quoteMaxToken": 60000, // 最大引用
|
||||
"maxTemperature": 1.2, // 最大温度
|
||||
"presencePenaltyRange": [-2, 2], // 惩罚系数范围
|
||||
"frequencyPenaltyRange": [-2, 2], // 频率惩罚系数范围
|
||||
"responseFormatList": ["text", "json_object", "json_schema"], // 响应格式
|
||||
"showStopSign": true, // 是否显示停止符号
|
||||
|
||||
"vision": true, // 是否支持图片识别
|
||||
"toolChoice": true, // 是否支持工具调用
|
||||
"functionCall": false, // 是否支持函数调用(一般都可以 false 了,基本不用了)
|
||||
"defaultSystemChatPrompt": "", // 默认系统提示
|
||||
|
||||
"datasetProcess": true, // 用于知识库文本处理
|
||||
"usedInClassify": true, // 用于问题分类
|
||||
"customCQPrompt": "", // 自定义问题分类提示
|
||||
"usedInExtractFields": true, // 用于提取字段
|
||||
"customExtractPrompt": "", // 自定义提取提示
|
||||
"usedInToolCall": true, // 用于工具调用
|
||||
"usedInQueryExtension": true, // 用于问题优化
|
||||
|
||||
"defaultConfig": {}, // 额外的自定义 body
|
||||
"fieldMap": {}, // body 字段映射
|
||||
|
||||
"censor": false, // 是否开启敏感词过滤
|
||||
"charsPointsPrice": 0 // n 积分/1k token
|
||||
}
|
||||
4
packages/service/core/ai/config/provider/AliCloud.json
Normal file
4
packages/service/core/ai/config/provider/AliCloud.json
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"provider": "AliCloud",
|
||||
"list": []
|
||||
}
|
||||
17
packages/service/core/ai/config/provider/BAAI.json
Normal file
17
packages/service/core/ai/config/provider/BAAI.json
Normal file
@@ -0,0 +1,17 @@
|
||||
{
|
||||
"provider": "BAAI",
|
||||
"list": [
|
||||
{
|
||||
"model": "bge-m3",
|
||||
"name": "bge-m3",
|
||||
"defaultToken": 512,
|
||||
"maxToken": 8000,
|
||||
"type": "embedding"
|
||||
},
|
||||
{
|
||||
"model": "bge-reranker-v2-m3",
|
||||
"name": "bge-reranker-v2-m3",
|
||||
"type": "rerank"
|
||||
}
|
||||
]
|
||||
}
|
||||
4
packages/service/core/ai/config/provider/Baichuan.json
Normal file
4
packages/service/core/ai/config/provider/Baichuan.json
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"provider": "Baichuan",
|
||||
"list": []
|
||||
}
|
||||
147
packages/service/core/ai/config/provider/ChatGLM.json
Normal file
147
packages/service/core/ai/config/provider/ChatGLM.json
Normal file
@@ -0,0 +1,147 @@
|
||||
{
|
||||
"provider": "ChatGLM",
|
||||
"list": [
|
||||
{
|
||||
"model": "glm-4-air",
|
||||
"name": "glm-4-air",
|
||||
"maxContext": 128000,
|
||||
"maxResponse": 4000,
|
||||
"quoteMaxToken": 120000,
|
||||
"maxTemperature": 0.99,
|
||||
"vision": false,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "glm-4-flash",
|
||||
"name": "glm-4-flash",
|
||||
"maxContext": 128000,
|
||||
"maxResponse": 4000,
|
||||
"quoteMaxToken": 120000,
|
||||
"maxTemperature": 0.99,
|
||||
"vision": false,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "glm-4-long",
|
||||
"name": "glm-4-long",
|
||||
"maxContext": 1000000,
|
||||
"maxResponse": 4000,
|
||||
"quoteMaxToken": 900000,
|
||||
"maxTemperature": 0.99,
|
||||
"vision": false,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "glm-4-plus",
|
||||
"name": "GLM-4-plus",
|
||||
"maxContext": 128000,
|
||||
"maxResponse": 4000,
|
||||
"quoteMaxToken": 120000,
|
||||
"maxTemperature": 0.99,
|
||||
"vision": false,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "glm-4v-flash",
|
||||
"name": "glm-4v-flash",
|
||||
"maxContext": 8000,
|
||||
"maxResponse": 1000,
|
||||
"quoteMaxToken": 6000,
|
||||
"maxTemperature": 0.99,
|
||||
"vision": true,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "glm-4v-plus",
|
||||
"name": "GLM-4v-plus",
|
||||
"maxContext": 8000,
|
||||
"maxResponse": 1000,
|
||||
"quoteMaxToken": 6000,
|
||||
"maxTemperature": 0.99,
|
||||
"vision": true,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "embedding-3",
|
||||
"name": "embedding-3",
|
||||
"defaultToken": 512,
|
||||
"maxToken": 8000,
|
||||
"defaultConfig": {
|
||||
"dimensions": 1024
|
||||
},
|
||||
"type": "embedding"
|
||||
}
|
||||
]
|
||||
}
|
||||
93
packages/service/core/ai/config/provider/Claude.json
Normal file
93
packages/service/core/ai/config/provider/Claude.json
Normal file
@@ -0,0 +1,93 @@
|
||||
{
|
||||
"provider": "Claude",
|
||||
"list": [
|
||||
{
|
||||
"model": "claude-3-5-haiku-20241022",
|
||||
"name": "claude-3-5-haiku-20241022",
|
||||
"maxContext": 200000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 100000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "claude-3-5-sonnet-20240620",
|
||||
"name": "Claude-3-5-sonnet-20240620",
|
||||
"maxContext": 200000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 100000,
|
||||
"maxTemperature": 1,
|
||||
"vision": true,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "claude-3-5-sonnet-20241022",
|
||||
"name": "Claude-3-5-sonnet-20241022",
|
||||
"maxContext": 200000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 100000,
|
||||
"maxTemperature": 1,
|
||||
"vision": true,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "claude-3-opus-20240229",
|
||||
"name": "claude-3-opus-20240229",
|
||||
"maxContext": 200000,
|
||||
"maxResponse": 4096,
|
||||
"quoteMaxToken": 100000,
|
||||
"maxTemperature": 1,
|
||||
"vision": true,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
}
|
||||
]
|
||||
}
|
||||
48
packages/service/core/ai/config/provider/DeepSeek.json
Normal file
48
packages/service/core/ai/config/provider/DeepSeek.json
Normal file
@@ -0,0 +1,48 @@
|
||||
{
|
||||
"provider": "DeepSeek",
|
||||
"list": [
|
||||
{
|
||||
"model": "deepseek-chat",
|
||||
"name": "Deepseek-chat",
|
||||
"maxContext": 64000,
|
||||
"maxResponse": 4096,
|
||||
"quoteMaxToken": 60000,
|
||||
"maxTemperature": 1.5,
|
||||
"vision": false,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "deepseek-reasoner",
|
||||
"name": "Deepseek-reasoner",
|
||||
"maxContext": 64000,
|
||||
"maxResponse": 4096,
|
||||
"quoteMaxToken": 60000,
|
||||
"maxTemperature": null,
|
||||
"vision": false,
|
||||
"reasoning": true,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
}
|
||||
]
|
||||
}
|
||||
195
packages/service/core/ai/config/provider/Doubao.json
Normal file
195
packages/service/core/ai/config/provider/Doubao.json
Normal file
@@ -0,0 +1,195 @@
|
||||
{
|
||||
"provider": "Doubao",
|
||||
"list": [
|
||||
{
|
||||
"model": "Doubao-lite-4k",
|
||||
"name": "Doubao-lite-4k",
|
||||
"maxContext": 4000,
|
||||
"maxResponse": 4000,
|
||||
"quoteMaxToken": 4000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "Doubao-lite-32k",
|
||||
"name": "Doubao-lite-32k",
|
||||
"maxContext": 32000,
|
||||
"maxResponse": 4000,
|
||||
"quoteMaxToken": 32000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "Doubao-lite-128k",
|
||||
"name": "Doubao-lite-128k",
|
||||
"maxContext": 128000,
|
||||
"maxResponse": 4000,
|
||||
"quoteMaxToken": 120000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "Doubao-vision-lite-32k",
|
||||
"name": "Doubao-vision-lite-32k",
|
||||
"maxContext": 32000,
|
||||
"maxResponse": 4000,
|
||||
"quoteMaxToken": 32000,
|
||||
"maxTemperature": 1,
|
||||
"vision": true,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "Doubao-pro-4k",
|
||||
"name": "Doubao-pro-4k",
|
||||
"maxContext": 4000,
|
||||
"maxResponse": 4000,
|
||||
"quoteMaxToken": 4000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "Doubao-pro-32k",
|
||||
"name": "Doubao-pro-32k",
|
||||
"maxContext": 32000,
|
||||
"maxResponse": 4000,
|
||||
"quoteMaxToken": 32000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "Doubao-pro-128k",
|
||||
"name": "Doubao-pro-128k",
|
||||
"maxContext": 128000,
|
||||
"maxResponse": 4000,
|
||||
"quoteMaxToken": 120000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "Doubao-vision-pro-32k",
|
||||
"name": "Doubao-vision-pro-32k",
|
||||
"maxContext": 32000,
|
||||
"maxResponse": 4000,
|
||||
"quoteMaxToken": 32000,
|
||||
"maxTemperature": 1,
|
||||
"vision": true,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "Doubao-embedding-large",
|
||||
"name": "Doubao-embedding-large",
|
||||
"defaultToken": 512,
|
||||
"maxToken": 4096,
|
||||
"type": "embedding"
|
||||
},
|
||||
{
|
||||
"model": "Doubao-embedding",
|
||||
"name": "Doubao-embedding",
|
||||
"defaultToken": 512,
|
||||
"maxToken": 4096,
|
||||
"type": "embedding"
|
||||
}
|
||||
]
|
||||
}
|
||||
107
packages/service/core/ai/config/provider/Ernie.json
Normal file
107
packages/service/core/ai/config/provider/Ernie.json
Normal file
@@ -0,0 +1,107 @@
|
||||
{
|
||||
"provider": "Ernie",
|
||||
"list": [
|
||||
{
|
||||
"model": "ERNIE-4.0-8K",
|
||||
"name": "ERNIE-4.0-8K",
|
||||
"maxContext": 8000,
|
||||
"maxResponse": 2048,
|
||||
"quoteMaxToken": 5000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "ERNIE-4.0-Turbo-8K",
|
||||
"name": "ERNIE-4.0-Turbo-8K",
|
||||
"maxContext": 8000,
|
||||
"maxResponse": 2048,
|
||||
"quoteMaxToken": 5000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "ERNIE-Lite-8K",
|
||||
"name": "ERNIE-lite-8k",
|
||||
"maxContext": 8000,
|
||||
"maxResponse": 2048,
|
||||
"quoteMaxToken": 6000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "ERNIE-Speed-128K",
|
||||
"name": "ERNIE-Speed-128K",
|
||||
"maxContext": 128000,
|
||||
"maxResponse": 4096,
|
||||
"quoteMaxToken": 120000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "Embedding-V1",
|
||||
"name": "Embedding-V1",
|
||||
"defaultToken": 512,
|
||||
"maxToken": 1000,
|
||||
"type": "embedding"
|
||||
},
|
||||
{
|
||||
"model": "tao-8k",
|
||||
"name": "tao-8k",
|
||||
"defaultToken": 512,
|
||||
"maxToken": 8000,
|
||||
"type": "embedding"
|
||||
}
|
||||
]
|
||||
}
|
||||
4
packages/service/core/ai/config/provider/FishAudio.json
Normal file
4
packages/service/core/ai/config/provider/FishAudio.json
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"provider": "FishAudio",
|
||||
"list": []
|
||||
}
|
||||
144
packages/service/core/ai/config/provider/Gemini.json
Normal file
144
packages/service/core/ai/config/provider/Gemini.json
Normal file
@@ -0,0 +1,144 @@
|
||||
{
|
||||
"provider": "Gemini",
|
||||
"list": [
|
||||
{
|
||||
"model": "gemini-1.5-flash",
|
||||
"name": "gemini-1.5-flash",
|
||||
"maxContext": 1000000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 60000,
|
||||
"maxTemperature": 1,
|
||||
"vision": true,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "gemini-1.5-pro",
|
||||
"name": "gemini-1.5-pro",
|
||||
"maxContext": 2000000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 60000,
|
||||
"maxTemperature": 1,
|
||||
"vision": true,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "gemini-2.0-flash-exp",
|
||||
"name": "gemini-2.0-flash-exp",
|
||||
"maxContext": 1000000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 60000,
|
||||
"maxTemperature": 1,
|
||||
"vision": true,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "gemini-2.0-flash-thinking-exp-1219",
|
||||
"name": "gemini-2.0-flash-thinking-exp-1219",
|
||||
"maxContext": 1000000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 60000,
|
||||
"maxTemperature": 1,
|
||||
"vision": true,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "gemini-2.0-flash-thinking-exp-01-21",
|
||||
"name": "gemini-2.0-flash-thinking-exp-01-21",
|
||||
"maxContext": 1000000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 60000,
|
||||
"maxTemperature": 1,
|
||||
"vision": true,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "gemini-exp-1206",
|
||||
"name": "gemini-exp-1206",
|
||||
"maxContext": 128000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 120000,
|
||||
"maxTemperature": 1,
|
||||
"vision": true,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "text-embedding-004",
|
||||
"name": "text-embedding-004",
|
||||
"defaultToken": 512,
|
||||
"maxToken": 2000,
|
||||
"type": "embedding"
|
||||
}
|
||||
]
|
||||
}
|
||||
4
packages/service/core/ai/config/provider/Grok.json
Normal file
4
packages/service/core/ai/config/provider/Grok.json
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"provider": "Grok",
|
||||
"list": []
|
||||
}
|
||||
47
packages/service/core/ai/config/provider/Groq.json
Normal file
47
packages/service/core/ai/config/provider/Groq.json
Normal file
@@ -0,0 +1,47 @@
|
||||
{
|
||||
"provider": "Groq",
|
||||
"list": [
|
||||
{
|
||||
"model": "llama-3.1-8b-instant",
|
||||
"name": "Groq-llama-3.1-8b-instant",
|
||||
"maxContext": 128000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 60000,
|
||||
"maxTemperature": 1.2,
|
||||
"vision": true,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "llama-3.3-70b-versatile",
|
||||
"name": "Groq-llama-3.3-70b-versatile",
|
||||
"maxContext": 128000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 60000,
|
||||
"maxTemperature": 1.2,
|
||||
"vision": true,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"type": "llm"
|
||||
}
|
||||
]
|
||||
}
|
||||
166
packages/service/core/ai/config/provider/Hunyuan.json
Normal file
166
packages/service/core/ai/config/provider/Hunyuan.json
Normal file
@@ -0,0 +1,166 @@
|
||||
{
|
||||
"provider": "Hunyuan",
|
||||
"list": [
|
||||
{
|
||||
"model": "hunyuan-large",
|
||||
"name": "hunyuan-large",
|
||||
"maxContext": 28000,
|
||||
"maxResponse": 4000,
|
||||
"quoteMaxToken": 20000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "hunyuan-lite",
|
||||
"name": "hunyuan-lite",
|
||||
"maxContext": 250000,
|
||||
"maxResponse": 6000,
|
||||
"quoteMaxToken": 100000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "hunyuan-pro",
|
||||
"name": "hunyuan-pro",
|
||||
"maxContext": 28000,
|
||||
"maxResponse": 4000,
|
||||
"quoteMaxToken": 28000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "hunyuan-standard",
|
||||
"name": "hunyuan-standard",
|
||||
"maxContext": 32000,
|
||||
"maxResponse": 2000,
|
||||
"quoteMaxToken": 20000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "hunyuan-turbo-vision",
|
||||
"name": "hunyuan-turbo-vision",
|
||||
"maxContext": 6000,
|
||||
"maxResponse": 2000,
|
||||
"quoteMaxToken": 6000,
|
||||
"maxTemperature": 1,
|
||||
"vision": true,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "hunyuan-turbo",
|
||||
"name": "hunyuan-turbo",
|
||||
"maxContext": 28000,
|
||||
"maxResponse": 4000,
|
||||
"quoteMaxToken": 20000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "hunyuan-vision",
|
||||
"name": "hunyuan-vision",
|
||||
"maxContext": 6000,
|
||||
"maxResponse": 2000,
|
||||
"quoteMaxToken": 4000,
|
||||
"maxTemperature": 1,
|
||||
"vision": true,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "hunyuan-embedding",
|
||||
"name": "hunyuan-embedding",
|
||||
"defaultToken": 512,
|
||||
"maxToken": 1024,
|
||||
"type": "embedding"
|
||||
}
|
||||
]
|
||||
}
|
||||
49
packages/service/core/ai/config/provider/Intern.json
Normal file
49
packages/service/core/ai/config/provider/Intern.json
Normal file
@@ -0,0 +1,49 @@
|
||||
{
|
||||
"provider": "Intern",
|
||||
"list": [
|
||||
{
|
||||
"model": "internlm2-pro-chat",
|
||||
"name": "internlm2-pro-chat",
|
||||
"maxContext": 32000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 32000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "internlm3-8b-instruct",
|
||||
"name": "internlm3-8b-instruct",
|
||||
"maxContext": 32000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 32000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
}
|
||||
]
|
||||
}
|
||||
4
packages/service/core/ai/config/provider/Meta.json
Normal file
4
packages/service/core/ai/config/provider/Meta.json
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"provider": "Meta",
|
||||
"list": []
|
||||
}
|
||||
240
packages/service/core/ai/config/provider/MiniMax.json
Normal file
240
packages/service/core/ai/config/provider/MiniMax.json
Normal file
@@ -0,0 +1,240 @@
|
||||
{
|
||||
"provider": "MiniMax",
|
||||
"list": [
|
||||
{
|
||||
"model": "MiniMax-Text-01",
|
||||
"name": "MiniMax-Text-01",
|
||||
"maxContext": 1000000,
|
||||
"maxResponse": 1000000,
|
||||
"quoteMaxToken": 100000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "abab6.5s-chat",
|
||||
"name": "MiniMax-abab6.5s",
|
||||
"maxContext": 245000,
|
||||
"maxResponse": 10000,
|
||||
"quoteMaxToken": 240000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "speech-01-turbo",
|
||||
"name": "speech-01-turbo",
|
||||
"voices": [
|
||||
{
|
||||
"label": "male-qn-qingse",
|
||||
"value": "male-qn-qingse"
|
||||
},
|
||||
{
|
||||
"label": "male-qn-jingying",
|
||||
"value": "male-qn-jingying"
|
||||
},
|
||||
{
|
||||
"label": "male-qn-badao",
|
||||
"value": "male-qn-badao"
|
||||
},
|
||||
{
|
||||
"label": "male-qn-daxuesheng",
|
||||
"value": "male-qn-daxuesheng"
|
||||
},
|
||||
{
|
||||
"label": "female-shaonv",
|
||||
"value": "female-shaonv"
|
||||
},
|
||||
{
|
||||
"label": "female-yujie",
|
||||
"value": "female-yujie"
|
||||
},
|
||||
{
|
||||
"label": "female-chengshu",
|
||||
"value": "female-chengshu"
|
||||
},
|
||||
{
|
||||
"label": "female-tianmei",
|
||||
"value": "female-tianmei"
|
||||
},
|
||||
{
|
||||
"label": "presenter_male",
|
||||
"value": "presenter_male"
|
||||
},
|
||||
{
|
||||
"label": "presenter_female",
|
||||
"value": "presenter_female"
|
||||
},
|
||||
{
|
||||
"label": "audiobook_male_1",
|
||||
"value": "audiobook_male_1"
|
||||
},
|
||||
{
|
||||
"label": "audiobook_male_2",
|
||||
"value": "audiobook_male_2"
|
||||
},
|
||||
{
|
||||
"label": "audiobook_female_1",
|
||||
"value": "audiobook_female_1"
|
||||
},
|
||||
{
|
||||
"label": "audiobook_female_2",
|
||||
"value": "audiobook_female_2"
|
||||
},
|
||||
{
|
||||
"label": "male-qn-qingse-jingpin",
|
||||
"value": "male-qn-qingse-jingpin"
|
||||
},
|
||||
{
|
||||
"label": "male-qn-jingying-jingpin",
|
||||
"value": "male-qn-jingying-jingpin"
|
||||
},
|
||||
{
|
||||
"label": "male-qn-badao-jingpin",
|
||||
"value": "male-qn-badao-jingpin"
|
||||
},
|
||||
{
|
||||
"label": "male-qn-daxuesheng-jingpin",
|
||||
"value": "male-qn-daxuesheng-jingpin"
|
||||
},
|
||||
{
|
||||
"label": "female-shaonv-jingpin",
|
||||
"value": "female-shaonv-jingpin"
|
||||
},
|
||||
{
|
||||
"label": "female-yujie-jingpin",
|
||||
"value": "female-yujie-jingpin"
|
||||
},
|
||||
{
|
||||
"label": "female-chengshu-jingpin",
|
||||
"value": "female-chengshu-jingpin"
|
||||
},
|
||||
{
|
||||
"label": "female-tianmei-jingpin",
|
||||
"value": "female-tianmei-jingpin"
|
||||
},
|
||||
{
|
||||
"label": "clever_boy",
|
||||
"value": "clever_boy"
|
||||
},
|
||||
{
|
||||
"label": "cute_boy",
|
||||
"value": "cute_boy"
|
||||
},
|
||||
{
|
||||
"label": "lovely_girl",
|
||||
"value": "lovely_girl"
|
||||
},
|
||||
{
|
||||
"label": "cartoon_pig",
|
||||
"value": "cartoon_pig"
|
||||
},
|
||||
{
|
||||
"label": "bingjiao_didi",
|
||||
"value": "bingjiao_didi"
|
||||
},
|
||||
{
|
||||
"label": "junlang_nanyou",
|
||||
"value": "junlang_nanyou"
|
||||
},
|
||||
{
|
||||
"label": "chunzhen_xuedi",
|
||||
"value": "chunzhen_xuedi"
|
||||
},
|
||||
{
|
||||
"label": "lengdan_xiongzhang",
|
||||
"value": "lengdan_xiongzhang"
|
||||
},
|
||||
{
|
||||
"label": "badao_shaoye",
|
||||
"value": "badao_shaoye"
|
||||
},
|
||||
{
|
||||
"label": "tianxin_xiaoling",
|
||||
"value": "tianxin_xiaoling"
|
||||
},
|
||||
{
|
||||
"label": "qiaopi_mengmei",
|
||||
"value": "qiaopi_mengmei"
|
||||
},
|
||||
{
|
||||
"label": "wumei_yujie",
|
||||
"value": "wumei_yujie"
|
||||
},
|
||||
{
|
||||
"label": "diadia_xuemei",
|
||||
"value": "diadia_xuemei"
|
||||
},
|
||||
{
|
||||
"label": "danya_xuejie",
|
||||
"value": "danya_xuejie"
|
||||
},
|
||||
{
|
||||
"label": "Santa_Claus",
|
||||
"value": "Santa_Claus"
|
||||
},
|
||||
{
|
||||
"label": "Grinch",
|
||||
"value": "Grinch"
|
||||
},
|
||||
{
|
||||
"label": "Rudolph",
|
||||
"value": "Rudolph"
|
||||
},
|
||||
{
|
||||
"label": "Arnold",
|
||||
"value": "Arnold"
|
||||
},
|
||||
{
|
||||
"label": "Charming_Santa",
|
||||
"value": "Charming_Santa"
|
||||
},
|
||||
{
|
||||
"label": "Charming_Lady",
|
||||
"value": "Charming_Lady"
|
||||
},
|
||||
{
|
||||
"label": "Sweet_Girl",
|
||||
"value": "Sweet_Girl"
|
||||
},
|
||||
{
|
||||
"label": "Cute_Elf",
|
||||
"value": "Cute_Elf"
|
||||
},
|
||||
{
|
||||
"label": "Attractive_Girl",
|
||||
"value": "Attractive_Girl"
|
||||
},
|
||||
{
|
||||
"label": "Serene_Woman",
|
||||
"value": "Serene_Woman"
|
||||
}
|
||||
],
|
||||
"type": "tts"
|
||||
}
|
||||
]
|
||||
}
|
||||
93
packages/service/core/ai/config/provider/MistralAI.json
Normal file
93
packages/service/core/ai/config/provider/MistralAI.json
Normal file
@@ -0,0 +1,93 @@
|
||||
{
|
||||
"provider": "MistralAI",
|
||||
"list": [
|
||||
{
|
||||
"model": "ministral-3b-latest",
|
||||
"name": "Ministral-3b-latest",
|
||||
"maxContext": 130000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 60000,
|
||||
"maxTemperature": 1.2,
|
||||
"vision": false,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "ministral-8b-latest",
|
||||
"name": "Ministral-8b-latest",
|
||||
"maxContext": 130000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 60000,
|
||||
"maxTemperature": 1.2,
|
||||
"vision": false,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "mistral-large-latest",
|
||||
"name": "Mistral-large-latest",
|
||||
"maxContext": 130000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 60000,
|
||||
"maxTemperature": 1.2,
|
||||
"vision": false,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "mistral-small-latest",
|
||||
"name": "Mistral-small-latest",
|
||||
"maxContext": 32000,
|
||||
"maxResponse": 4000,
|
||||
"quoteMaxToken": 32000,
|
||||
"maxTemperature": 1.2,
|
||||
"vision": false,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
}
|
||||
]
|
||||
}
|
||||
4
packages/service/core/ai/config/provider/Moka.json
Normal file
4
packages/service/core/ai/config/provider/Moka.json
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"provider": "Moka",
|
||||
"list": []
|
||||
}
|
||||
71
packages/service/core/ai/config/provider/Moonshot.json
Normal file
71
packages/service/core/ai/config/provider/Moonshot.json
Normal file
@@ -0,0 +1,71 @@
|
||||
{
|
||||
"provider": "Moonshot",
|
||||
"list": [
|
||||
{
|
||||
"model": "moonshot-v1-8k",
|
||||
"name": "moonshot-v1-8k",
|
||||
"maxContext": 8000,
|
||||
"maxResponse": 4000,
|
||||
"quoteMaxToken": 6000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "moonshot-v1-32k",
|
||||
"name": "moonshot-v1-32k",
|
||||
"maxContext": 32000,
|
||||
"maxResponse": 4000,
|
||||
"quoteMaxToken": 32000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "moonshot-v1-128k",
|
||||
"name": "moonshot-v1-128k",
|
||||
"maxContext": 128000,
|
||||
"maxResponse": 4000,
|
||||
"quoteMaxToken": 60000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
}
|
||||
]
|
||||
}
|
||||
4
packages/service/core/ai/config/provider/Ollama.json
Normal file
4
packages/service/core/ai/config/provider/Ollama.json
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"provider": "Ollama",
|
||||
"list": []
|
||||
}
|
||||
252
packages/service/core/ai/config/provider/OpenAI.json
Normal file
252
packages/service/core/ai/config/provider/OpenAI.json
Normal file
@@ -0,0 +1,252 @@
|
||||
{
|
||||
"provider": "OpenAI",
|
||||
"list": [
|
||||
{
|
||||
"model": "gpt-4o-mini",
|
||||
"name": "GPT-4o-mini",
|
||||
"maxContext": 128000,
|
||||
"maxResponse": 16000,
|
||||
"quoteMaxToken": 60000,
|
||||
"maxTemperature": 1.2,
|
||||
"vision": true,
|
||||
"toolChoice": true,
|
||||
"functionCall": true,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "gpt-4o",
|
||||
"name": "GPT-4o",
|
||||
"maxContext": 128000,
|
||||
"maxResponse": 4000,
|
||||
"quoteMaxToken": 60000,
|
||||
"maxTemperature": 1.2,
|
||||
"vision": true,
|
||||
"toolChoice": true,
|
||||
"functionCall": true,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "o3-mini",
|
||||
"name": "o3-mini",
|
||||
"maxContext": 200000,
|
||||
"maxResponse": 100000,
|
||||
"quoteMaxToken": 120000,
|
||||
"maxTemperature": null,
|
||||
"vision": false,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {
|
||||
"stream": false
|
||||
},
|
||||
"fieldMap": {
|
||||
"max_tokens": "max_completion_tokens"
|
||||
},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "o1-mini",
|
||||
"name": "o1-mini",
|
||||
"maxContext": 128000,
|
||||
"maxResponse": 4000,
|
||||
"quoteMaxToken": 120000,
|
||||
"maxTemperature": null,
|
||||
"vision": false,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {
|
||||
"stream": false
|
||||
},
|
||||
"fieldMap": {
|
||||
"max_tokens": "max_completion_tokens"
|
||||
},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "o1",
|
||||
"name": "o1",
|
||||
"maxContext": 195000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 120000,
|
||||
"maxTemperature": null,
|
||||
"vision": true,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {
|
||||
"stream": false
|
||||
},
|
||||
"fieldMap": {
|
||||
"max_tokens": "max_completion_tokens"
|
||||
},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "o1-preview",
|
||||
"name": "o1-preview",
|
||||
"maxContext": 128000,
|
||||
"maxResponse": 4000,
|
||||
"quoteMaxToken": 120000,
|
||||
"maxTemperature": null,
|
||||
"vision": false,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {
|
||||
"stream": false
|
||||
},
|
||||
"fieldMap": {
|
||||
"max_tokens": "max_completion_tokens"
|
||||
},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "gpt-3.5-turbo",
|
||||
"name": "gpt-3.5-turbo",
|
||||
"maxContext": 16000,
|
||||
"maxResponse": 4000,
|
||||
"quoteMaxToken": 13000,
|
||||
"maxTemperature": 1.2,
|
||||
"vision": false,
|
||||
"toolChoice": true,
|
||||
"functionCall": true,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "gpt-4-turbo",
|
||||
"name": "gpt-4-turbo",
|
||||
"maxContext": 128000,
|
||||
"maxResponse": 4000,
|
||||
"quoteMaxToken": 60000,
|
||||
"maxTemperature": 1.2,
|
||||
"vision": true,
|
||||
"toolChoice": true,
|
||||
"functionCall": true,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "text-embedding-3-large",
|
||||
"name": "text-embedding-3-large",
|
||||
"defaultToken": 512,
|
||||
"maxToken": 8000,
|
||||
"defaultConfig": {
|
||||
"dimensions": 1024
|
||||
},
|
||||
"type": "embedding"
|
||||
},
|
||||
{
|
||||
"model": "text-embedding-3-small",
|
||||
"name": "text-embedding-3-small",
|
||||
"defaultToken": 512,
|
||||
"maxToken": 8000,
|
||||
"type": "embedding"
|
||||
},
|
||||
{
|
||||
"model": "text-embedding-ada-002",
|
||||
"name": "text-embedding-ada-002",
|
||||
"defaultToken": 512,
|
||||
"maxToken": 8000,
|
||||
"type": "embedding"
|
||||
},
|
||||
{
|
||||
"model": "tts-1",
|
||||
"name": "TTS1",
|
||||
"voices": [
|
||||
{
|
||||
"label": "Alloy",
|
||||
"value": "alloy"
|
||||
},
|
||||
{
|
||||
"label": "Echo",
|
||||
"value": "echo"
|
||||
},
|
||||
{
|
||||
"label": "Fable",
|
||||
"value": "fable"
|
||||
},
|
||||
{
|
||||
"label": "Onyx",
|
||||
"value": "onyx"
|
||||
},
|
||||
{
|
||||
"label": "Nova",
|
||||
"value": "nova"
|
||||
},
|
||||
{
|
||||
"label": "Shimmer",
|
||||
"value": "shimmer"
|
||||
}
|
||||
],
|
||||
"type": "tts"
|
||||
},
|
||||
{
|
||||
"model": "whisper-1",
|
||||
"name": "whisper-1",
|
||||
"type": "stt"
|
||||
}
|
||||
]
|
||||
}
|
||||
4
packages/service/core/ai/config/provider/Other.json
Normal file
4
packages/service/core/ai/config/provider/Other.json
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"provider": "Other",
|
||||
"list": []
|
||||
}
|
||||
223
packages/service/core/ai/config/provider/Qwen.json
Normal file
223
packages/service/core/ai/config/provider/Qwen.json
Normal file
@@ -0,0 +1,223 @@
|
||||
{
|
||||
"provider": "Qwen",
|
||||
"list": [
|
||||
{
|
||||
"model": "qwen-turbo",
|
||||
"name": "Qwen-turbo",
|
||||
"maxContext": 128000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 100000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "qwen-plus",
|
||||
"name": "Qwen-plus",
|
||||
"maxContext": 64000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 60000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "qwen-vl-plus",
|
||||
"name": "qwen-vl-plus",
|
||||
"maxContext": 32000,
|
||||
"maxResponse": 2000,
|
||||
"quoteMaxToken": 20000,
|
||||
"maxTemperature": 1.2,
|
||||
"vision": true,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "qwen-max",
|
||||
"name": "Qwen-max",
|
||||
"maxContext": 8000,
|
||||
"maxResponse": 4000,
|
||||
"quoteMaxToken": 6000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "qwen-vl-max",
|
||||
"name": "qwen-vl-max",
|
||||
"maxContext": 32000,
|
||||
"maxResponse": 2000,
|
||||
"quoteMaxToken": 20000,
|
||||
"maxTemperature": 1.2,
|
||||
"vision": true,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "qwen-coder-turbo",
|
||||
"name": "qwen-coder-turbo",
|
||||
"maxContext": 128000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 50000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "qwen2.5-7b-instruct",
|
||||
"name": "qwen2.5-7b-instruct",
|
||||
"maxContext": 128000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 50000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "qwen2.5-14b-instruct",
|
||||
"name": "qwen2.5-14b-instruct",
|
||||
"maxContext": 128000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 50000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "qwen2.5-32b-instruct",
|
||||
"name": "qwen2.5-32b-instruct",
|
||||
"maxContext": 128000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 50000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "qwen2.5-72b-instruct",
|
||||
"name": "Qwen2.5-72B-instruct",
|
||||
"maxContext": 128000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 50000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
}
|
||||
]
|
||||
}
|
||||
204
packages/service/core/ai/config/provider/Siliconflow.json
Normal file
204
packages/service/core/ai/config/provider/Siliconflow.json
Normal file
@@ -0,0 +1,204 @@
|
||||
{
|
||||
"provider": "Siliconflow",
|
||||
"list": [
|
||||
{
|
||||
"model": "Qwen/Qwen2.5-72B-Instruct",
|
||||
"name": "Qwen/Qwen2.5-72B-Instruct",
|
||||
"maxContext": 128000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 50000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "Qwen/Qwen2-VL-72B-Instruct",
|
||||
"name": "Qwen/Qwen2-VL-72B-Instruct",
|
||||
"maxContext": 32000,
|
||||
"maxResponse": 4000,
|
||||
"quoteMaxToken": 32000,
|
||||
"maxTemperature": 1,
|
||||
"censor": false,
|
||||
"vision": true,
|
||||
"datasetProcess": false,
|
||||
"usedInClassify": false,
|
||||
"usedInExtractFields": false,
|
||||
"usedInToolCall": false,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"customCQPrompt": "",
|
||||
"customExtractPrompt": "",
|
||||
"defaultSystemChatPrompt": "",
|
||||
"defaultConfig": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "deepseek-ai/DeepSeek-V2.5",
|
||||
"name": "deepseek-ai/DeepSeek-V2.5",
|
||||
"maxContext": 32000,
|
||||
"maxResponse": 4000,
|
||||
"quoteMaxToken": 32000,
|
||||
"maxTemperature": 1,
|
||||
"vision": true,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "BAAI/bge-m3",
|
||||
"name": "BAAI/bge-m3",
|
||||
"defaultToken": 512,
|
||||
"maxToken": 8000,
|
||||
"type": "embedding"
|
||||
},
|
||||
{
|
||||
"model": "FunAudioLLM/CosyVoice2-0.5B",
|
||||
"name": "FunAudioLLM/CosyVoice2-0.5B",
|
||||
"voices": [
|
||||
{
|
||||
"label": "alex",
|
||||
"value": "FunAudioLLM/CosyVoice2-0.5B:alex"
|
||||
},
|
||||
{
|
||||
"label": "anna",
|
||||
"value": "FunAudioLLM/CosyVoice2-0.5B:anna"
|
||||
},
|
||||
{
|
||||
"label": "bella",
|
||||
"value": "FunAudioLLM/CosyVoice2-0.5B:bella"
|
||||
},
|
||||
{
|
||||
"label": "benjamin",
|
||||
"value": "FunAudioLLM/CosyVoice2-0.5B:benjamin"
|
||||
},
|
||||
{
|
||||
"label": "charles",
|
||||
"value": "FunAudioLLM/CosyVoice2-0.5B:charles"
|
||||
},
|
||||
{
|
||||
"label": "claire",
|
||||
"value": "FunAudioLLM/CosyVoice2-0.5B:claire"
|
||||
},
|
||||
{
|
||||
"label": "david",
|
||||
"value": "FunAudioLLM/CosyVoice2-0.5B:david"
|
||||
},
|
||||
{
|
||||
"label": "diana",
|
||||
"value": "FunAudioLLM/CosyVoice2-0.5B:diana"
|
||||
}
|
||||
],
|
||||
"type": "tts"
|
||||
},
|
||||
{
|
||||
"model": "RVC-Boss/GPT-SoVITS",
|
||||
"name": "RVC-Boss/GPT-SoVITS",
|
||||
"voices": [
|
||||
{
|
||||
"label": "alex",
|
||||
"value": "RVC-Boss/GPT-SoVITS:alex"
|
||||
},
|
||||
{
|
||||
"label": "anna",
|
||||
"value": "RVC-Boss/GPT-SoVITS:anna"
|
||||
},
|
||||
{
|
||||
"label": "bella",
|
||||
"value": "RVC-Boss/GPT-SoVITS:bella"
|
||||
},
|
||||
{
|
||||
"label": "benjamin",
|
||||
"value": "RVC-Boss/GPT-SoVITS:benjamin"
|
||||
},
|
||||
{
|
||||
"label": "charles",
|
||||
"value": "RVC-Boss/GPT-SoVITS:charles"
|
||||
},
|
||||
{
|
||||
"label": "claire",
|
||||
"value": "RVC-Boss/GPT-SoVITS:claire"
|
||||
},
|
||||
{
|
||||
"label": "david",
|
||||
"value": "RVC-Boss/GPT-SoVITS:david"
|
||||
},
|
||||
{
|
||||
"label": "diana",
|
||||
"value": "RVC-Boss/GPT-SoVITS:diana"
|
||||
}
|
||||
],
|
||||
"type": "tts"
|
||||
},
|
||||
{
|
||||
"model": "fishaudio/fish-speech-1.5",
|
||||
"name": "fish-speech-1.5",
|
||||
"voices": [
|
||||
{
|
||||
"label": "alex",
|
||||
"value": "fishaudio/fish-speech-1.5:alex"
|
||||
},
|
||||
{
|
||||
"label": "anna",
|
||||
"value": "fishaudio/fish-speech-1.5:anna"
|
||||
},
|
||||
{
|
||||
"label": "bella",
|
||||
"value": "fishaudio/fish-speech-1.5:bella"
|
||||
},
|
||||
{
|
||||
"label": "benjamin",
|
||||
"value": "fishaudio/fish-speech-1.5:benjamin"
|
||||
},
|
||||
{
|
||||
"label": "charles",
|
||||
"value": "fishaudio/fish-speech-1.5:charles"
|
||||
},
|
||||
{
|
||||
"label": "claire",
|
||||
"value": "fishaudio/fish-speech-1.5:claire"
|
||||
},
|
||||
{
|
||||
"label": "david",
|
||||
"value": "fishaudio/fish-speech-1.5:david"
|
||||
},
|
||||
{
|
||||
"label": "diana",
|
||||
"value": "fishaudio/fish-speech-1.5:diana"
|
||||
}
|
||||
],
|
||||
"type": "tts"
|
||||
},
|
||||
{
|
||||
"model": "FunAudioLLM/SenseVoiceSmall",
|
||||
"name": "FunAudioLLM/SenseVoiceSmall",
|
||||
"type": "stt"
|
||||
},
|
||||
{
|
||||
"model": "BAAI/bge-reranker-v2-m3",
|
||||
"name": "BAAI/bge-reranker-v2-m3",
|
||||
"type": "rerank"
|
||||
}
|
||||
]
|
||||
}
|
||||
129
packages/service/core/ai/config/provider/SparkDesk.json
Normal file
129
packages/service/core/ai/config/provider/SparkDesk.json
Normal file
@@ -0,0 +1,129 @@
|
||||
{
|
||||
"provider": "SparkDesk",
|
||||
"list": [
|
||||
{
|
||||
"model": "lite",
|
||||
"name": "SparkDesk-lite",
|
||||
"maxContext": 32000,
|
||||
"maxResponse": 4000,
|
||||
"quoteMaxToken": 32000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"usedInExtractFields": true,
|
||||
"usedInToolCall": true,
|
||||
"usedInQueryExtension": true,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"customCQPrompt": "",
|
||||
"customExtractPrompt": "",
|
||||
"defaultSystemChatPrompt": "",
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "generalv3",
|
||||
"name": "SparkDesk-Pro",
|
||||
"maxContext": 8000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 8000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"usedInExtractFields": true,
|
||||
"usedInToolCall": true,
|
||||
"usedInQueryExtension": true,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"customCQPrompt": "",
|
||||
"customExtractPrompt": "",
|
||||
"defaultSystemChatPrompt": "",
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "pro-128k",
|
||||
"name": "SparkDesk-Pro-128k",
|
||||
"maxContext": 128000,
|
||||
"maxResponse": 4000,
|
||||
"quoteMaxToken": 128000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"usedInExtractFields": true,
|
||||
"usedInToolCall": true,
|
||||
"usedInQueryExtension": true,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"customCQPrompt": "",
|
||||
"customExtractPrompt": "",
|
||||
"defaultSystemChatPrompt": "",
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "generalv3.5",
|
||||
"name": "SparkDesk-max",
|
||||
"maxContext": 8000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 8000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"usedInExtractFields": true,
|
||||
"usedInToolCall": true,
|
||||
"usedInQueryExtension": true,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"customCQPrompt": "",
|
||||
"customExtractPrompt": "",
|
||||
"defaultSystemChatPrompt": "",
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "max-32k",
|
||||
"name": "SparkDesk-max-32k",
|
||||
"maxContext": 32000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 32000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "4.0Ultra",
|
||||
"name": "SparkDesk-v4.0 Ultra",
|
||||
"maxContext": 8000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 8000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
}
|
||||
]
|
||||
}
|
||||
308
packages/service/core/ai/config/provider/StepFun.json
Normal file
308
packages/service/core/ai/config/provider/StepFun.json
Normal file
@@ -0,0 +1,308 @@
|
||||
{
|
||||
"provider": "StepFun",
|
||||
"list": [
|
||||
{
|
||||
"model": "step-1-flash",
|
||||
"name": "step-1-flash",
|
||||
"maxContext": 8000,
|
||||
"maxResponse": 4000,
|
||||
"quoteMaxToken": 6000,
|
||||
"maxTemperature": 2,
|
||||
"vision": false,
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"usedInExtractFields": true,
|
||||
"usedInToolCall": true,
|
||||
"usedInQueryExtension": true,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"customCQPrompt": "",
|
||||
"customExtractPrompt": "",
|
||||
"defaultSystemChatPrompt": "",
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "step-1-8k",
|
||||
"name": "step-1-8k",
|
||||
"maxContext": 8000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 8000,
|
||||
"maxTemperature": 2,
|
||||
"vision": false,
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"usedInExtractFields": true,
|
||||
"usedInToolCall": true,
|
||||
"usedInQueryExtension": true,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"customCQPrompt": "",
|
||||
"customExtractPrompt": "",
|
||||
"defaultSystemChatPrompt": "",
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "step-1-32k",
|
||||
"name": "step-1-32k",
|
||||
"maxContext": 32000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 32000,
|
||||
"maxTemperature": 2,
|
||||
"vision": false,
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"usedInExtractFields": true,
|
||||
"usedInToolCall": true,
|
||||
"usedInQueryExtension": true,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"customCQPrompt": "",
|
||||
"customExtractPrompt": "",
|
||||
"defaultSystemChatPrompt": "",
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "step-1-128k",
|
||||
"name": "step-1-128k",
|
||||
"maxContext": 128000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 128000,
|
||||
"maxTemperature": 2,
|
||||
"vision": false,
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"usedInExtractFields": true,
|
||||
"usedInToolCall": true,
|
||||
"usedInQueryExtension": true,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"customCQPrompt": "",
|
||||
"customExtractPrompt": "",
|
||||
"defaultSystemChatPrompt": "",
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "step-1-256k",
|
||||
"name": "step-1-256k",
|
||||
"maxContext": 256000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 256000,
|
||||
"maxTemperature": 2,
|
||||
"vision": false,
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"usedInExtractFields": true,
|
||||
"usedInToolCall": true,
|
||||
"usedInQueryExtension": true,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"customCQPrompt": "",
|
||||
"customExtractPrompt": "",
|
||||
"defaultSystemChatPrompt": "",
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "step-1o-vision-32k",
|
||||
"name": "step-1o-vision-32k",
|
||||
"maxContext": 32000,
|
||||
"quoteMaxToken": 32000,
|
||||
"maxResponse": 8000,
|
||||
"maxTemperature": 2,
|
||||
"vision": true,
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"usedInExtractFields": true,
|
||||
"usedInToolCall": true,
|
||||
"usedInQueryExtension": true,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"customCQPrompt": "",
|
||||
"customExtractPrompt": "",
|
||||
"defaultSystemChatPrompt": "",
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "step-1v-8k",
|
||||
"name": "step-1v-8k",
|
||||
"maxContext": 8000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 8000,
|
||||
"maxTemperature": 2,
|
||||
"vision": true,
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"usedInExtractFields": true,
|
||||
"usedInToolCall": true,
|
||||
"usedInQueryExtension": true,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"customCQPrompt": "",
|
||||
"customExtractPrompt": "",
|
||||
"defaultSystemChatPrompt": "",
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "step-1v-32k",
|
||||
"name": "step-1v-32k",
|
||||
"maxContext": 32000,
|
||||
"quoteMaxToken": 32000,
|
||||
"maxResponse": 8000,
|
||||
"maxTemperature": 2,
|
||||
"vision": true,
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"usedInExtractFields": true,
|
||||
"usedInToolCall": true,
|
||||
"usedInQueryExtension": true,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"customCQPrompt": "",
|
||||
"customExtractPrompt": "",
|
||||
"defaultSystemChatPrompt": "",
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "step-2-mini",
|
||||
"name": "step-2-mini",
|
||||
"maxContext": 8000,
|
||||
"maxResponse": 4000,
|
||||
"quoteMaxToken": 6000,
|
||||
"maxTemperature": 2,
|
||||
"vision": false,
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"usedInExtractFields": true,
|
||||
"usedInToolCall": true,
|
||||
"usedInQueryExtension": true,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"customCQPrompt": "",
|
||||
"customExtractPrompt": "",
|
||||
"defaultSystemChatPrompt": "",
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "step-2-16k",
|
||||
"name": "step-2-16k",
|
||||
"maxContext": 16000,
|
||||
"maxResponse": 4000,
|
||||
"quoteMaxToken": 4000,
|
||||
"maxTemperature": 2,
|
||||
"vision": false,
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"usedInExtractFields": true,
|
||||
"usedInToolCall": true,
|
||||
"usedInQueryExtension": true,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"customCQPrompt": "",
|
||||
"customExtractPrompt": "",
|
||||
"defaultSystemChatPrompt": "",
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "step-2-16k-exp",
|
||||
"name": "step-2-16k-exp",
|
||||
"maxContext": 16000,
|
||||
"maxResponse": 4000,
|
||||
"quoteMaxToken": 4000,
|
||||
"maxTemperature": 2,
|
||||
"vision": false,
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"usedInExtractFields": true,
|
||||
"usedInToolCall": true,
|
||||
"usedInQueryExtension": true,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"customCQPrompt": "",
|
||||
"customExtractPrompt": "",
|
||||
"defaultSystemChatPrompt": "",
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "step-tts-mini",
|
||||
"name": "step-tts-mini",
|
||||
"voices": [
|
||||
{
|
||||
"label": "cixingnansheng",
|
||||
"value": "cixingnansheng"
|
||||
},
|
||||
{
|
||||
"label": "zhengpaiqingnian",
|
||||
"value": "zhengpaiqingnian"
|
||||
},
|
||||
{
|
||||
"label": "yuanqinansheng",
|
||||
"value": "yuanqinansheng"
|
||||
},
|
||||
{
|
||||
"label": "qingniandaxuesheng",
|
||||
"value": "qingniandaxuesheng"
|
||||
},
|
||||
{
|
||||
"label": "boyinnansheng",
|
||||
"value": "boyinnansheng"
|
||||
},
|
||||
{
|
||||
"label": "ruyananshi",
|
||||
"value": "ruyananshi"
|
||||
},
|
||||
{
|
||||
"label": "shenchennanyin",
|
||||
"value": "shenchennanyin"
|
||||
},
|
||||
{
|
||||
"label": "qinqienvsheng",
|
||||
"value": "qinqienvsheng"
|
||||
},
|
||||
{
|
||||
"label": "wenrounvsheng",
|
||||
"value": "wenrounvsheng"
|
||||
},
|
||||
{
|
||||
"label": "jilingshaonv",
|
||||
"value": "jilingshaonv"
|
||||
},
|
||||
{
|
||||
"label": "yuanqishaonv",
|
||||
"value": "yuanqishaonv"
|
||||
},
|
||||
{
|
||||
"label": "ruanmengnvsheng",
|
||||
"value": "ruanmengnvsheng"
|
||||
},
|
||||
{
|
||||
"label": "youyanvsheng",
|
||||
"value": "youyanvsheng"
|
||||
},
|
||||
{
|
||||
"label": "lengyanyujie",
|
||||
"value": "lengyanyujie"
|
||||
},
|
||||
{
|
||||
"label": "shuangkuaijiejie",
|
||||
"value": "shuangkuaijiejie"
|
||||
},
|
||||
{
|
||||
"label": "wenjingxuejie",
|
||||
"value": "wenjingxuejie"
|
||||
},
|
||||
{
|
||||
"label": "linjiajiejie",
|
||||
"value": "linjiajiejie"
|
||||
},
|
||||
{
|
||||
"label": "linjiameimei",
|
||||
"value": "linjiameimei"
|
||||
},
|
||||
{
|
||||
"label": "zhixingjiejie",
|
||||
"value": "zhixingjiejie"
|
||||
}
|
||||
],
|
||||
"type": "tts"
|
||||
}
|
||||
]
|
||||
}
|
||||
49
packages/service/core/ai/config/provider/Yi.json
Normal file
49
packages/service/core/ai/config/provider/Yi.json
Normal file
@@ -0,0 +1,49 @@
|
||||
{
|
||||
"provider": "Yi",
|
||||
"list": [
|
||||
{
|
||||
"model": "yi-lightning",
|
||||
"name": "yi-lightning",
|
||||
"maxContext": 16000,
|
||||
"maxResponse": 4000,
|
||||
"quoteMaxToken": 12000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "yi-vision-v2",
|
||||
"name": "yi-vision-v2",
|
||||
"maxContext": 16000,
|
||||
"maxResponse": 4000,
|
||||
"quoteMaxToken": 12000,
|
||||
"maxTemperature": 1,
|
||||
"vision": true,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,6 +0,0 @@
|
||||
{
|
||||
"provider": "BAAI",
|
||||
"model": "bge-reranker-v2-m3",
|
||||
"name": "bge-reranker-v2-m3",
|
||||
"charsPointsPrice": 0
|
||||
}
|
||||
21
packages/service/core/ai/config/schema.ts
Normal file
21
packages/service/core/ai/config/schema.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
import { connectionMongo, getMongoModel } from '../../../common/mongo';
|
||||
const { Schema } = connectionMongo;
|
||||
import type { SystemModelSchemaType } from '../type';
|
||||
|
||||
const SystemModelSchema = new Schema({
|
||||
model: {
|
||||
type: String,
|
||||
required: true,
|
||||
unique: true
|
||||
},
|
||||
metadata: {
|
||||
type: Object,
|
||||
required: true,
|
||||
default: {}
|
||||
}
|
||||
});
|
||||
|
||||
export const MongoSystemModel = getMongoModel<SystemModelSchemaType>(
|
||||
'system_models',
|
||||
SystemModelSchema
|
||||
);
|
||||
@@ -1,6 +0,0 @@
|
||||
{
|
||||
"provider": "OpenAI",
|
||||
"model": "whisper-1",
|
||||
"name": "whisper-1",
|
||||
"charsPointsPrice": 0
|
||||
}
|
||||
@@ -1,32 +0,0 @@
|
||||
{
|
||||
"provider": "OpenAI",
|
||||
"model": "tts-1",
|
||||
"name": "TTS1",
|
||||
"charsPointsPrice": 0,
|
||||
"voices": [
|
||||
{
|
||||
"label": "Alloy",
|
||||
"value": "alloy"
|
||||
},
|
||||
{
|
||||
"label": "Echo",
|
||||
"value": "echo"
|
||||
},
|
||||
{
|
||||
"label": "Fable",
|
||||
"value": "fable"
|
||||
},
|
||||
{
|
||||
"label": "Onyx",
|
||||
"value": "onyx"
|
||||
},
|
||||
{
|
||||
"label": "Nova",
|
||||
"value": "nova"
|
||||
},
|
||||
{
|
||||
"label": "Shimmer",
|
||||
"value": "shimmer"
|
||||
}
|
||||
]
|
||||
}
|
||||
195
packages/service/core/ai/config/utils.ts
Normal file
195
packages/service/core/ai/config/utils.ts
Normal file
@@ -0,0 +1,195 @@
|
||||
import path from 'path';
|
||||
import * as fs from 'fs';
|
||||
import { SystemModelItemType } from '../type';
|
||||
import { ModelTypeEnum } from '@fastgpt/global/core/ai/model';
|
||||
import { MongoSystemModel } from './schema';
|
||||
import {
|
||||
LLMModelItemType,
|
||||
EmbeddingModelItemType,
|
||||
TTSModelType,
|
||||
STTModelType,
|
||||
ReRankModelItemType
|
||||
} from '@fastgpt/global/core/ai/model.d';
|
||||
import { debounce } from 'lodash';
|
||||
import { ModelProviderType } from '@fastgpt/global/core/ai/provider';
|
||||
import { findModelFromAlldata } from '../model';
|
||||
import {
|
||||
reloadFastGPTConfigBuffer,
|
||||
updateFastGPTConfigBuffer
|
||||
} from '../../../common/system/config/controller';
|
||||
import { delay } from '@fastgpt/global/common/system/utils';
|
||||
|
||||
/*
|
||||
TODO: 分优先级读取:
|
||||
1. 有外部挂载目录,则读取外部的
|
||||
2. 没有外部挂载目录,则读取本地的。然后试图拉取云端的进行覆盖。
|
||||
*/
|
||||
export const loadSystemModels = async (init = false) => {
|
||||
const getProviderList = () => {
|
||||
const currentFileUrl = new URL(import.meta.url);
|
||||
const modelsPath = path.join(path.dirname(currentFileUrl.pathname), 'provider');
|
||||
|
||||
return fs.readdirSync(modelsPath) as string[];
|
||||
};
|
||||
const pushModel = (model: SystemModelItemType) => {
|
||||
global.systemModelList.push(model);
|
||||
|
||||
if (model.isActive) {
|
||||
global.systemActiveModelList.push(model);
|
||||
|
||||
if (model.type === ModelTypeEnum.llm) {
|
||||
global.llmModelMap.set(model.model, model);
|
||||
global.llmModelMap.set(model.name, model);
|
||||
if (model.isDefault) {
|
||||
global.systemDefaultModel.llm = model;
|
||||
}
|
||||
} else if (model.type === ModelTypeEnum.embedding) {
|
||||
global.embeddingModelMap.set(model.model, model);
|
||||
global.embeddingModelMap.set(model.name, model);
|
||||
if (model.isDefault) {
|
||||
global.systemDefaultModel.embedding = model;
|
||||
}
|
||||
} else if (model.type === ModelTypeEnum.tts) {
|
||||
global.ttsModelMap.set(model.model, model);
|
||||
global.ttsModelMap.set(model.name, model);
|
||||
if (model.isDefault) {
|
||||
global.systemDefaultModel.tts = model;
|
||||
}
|
||||
} else if (model.type === ModelTypeEnum.stt) {
|
||||
global.sttModelMap.set(model.model, model);
|
||||
global.sttModelMap.set(model.name, model);
|
||||
if (model.isDefault) {
|
||||
global.systemDefaultModel.stt = model;
|
||||
}
|
||||
} else if (model.type === ModelTypeEnum.rerank) {
|
||||
global.reRankModelMap.set(model.model, model);
|
||||
global.reRankModelMap.set(model.name, model);
|
||||
if (model.isDefault) {
|
||||
global.systemDefaultModel.rerank = model;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if (!init && global.systemModelList) return;
|
||||
|
||||
global.systemModelList = [];
|
||||
global.systemActiveModelList = [];
|
||||
global.llmModelMap = new Map<string, LLMModelItemType>();
|
||||
global.embeddingModelMap = new Map<string, EmbeddingModelItemType>();
|
||||
global.ttsModelMap = new Map<string, TTSModelType>();
|
||||
global.sttModelMap = new Map<string, STTModelType>();
|
||||
global.reRankModelMap = new Map<string, ReRankModelItemType>();
|
||||
// @ts-ignore
|
||||
global.systemDefaultModel = {};
|
||||
|
||||
try {
|
||||
const dbModels = await MongoSystemModel.find({}).lean();
|
||||
const providerList = getProviderList();
|
||||
|
||||
// System model
|
||||
await Promise.all(
|
||||
providerList.map(async (name) => {
|
||||
const fileContent = (await import(`./provider/${name}`))?.default as {
|
||||
provider: ModelProviderType;
|
||||
list: SystemModelItemType[];
|
||||
};
|
||||
|
||||
fileContent.list.forEach((fileModel) => {
|
||||
const dbModel = dbModels.find((item) => item.model === fileModel.model);
|
||||
|
||||
const modelData: any = {
|
||||
...fileModel,
|
||||
...dbModel?.metadata,
|
||||
provider: dbModel?.metadata?.provider || fileContent.provider,
|
||||
type: dbModel?.metadata?.type || fileModel.type,
|
||||
isCustom: false
|
||||
};
|
||||
|
||||
pushModel(modelData);
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
// Custom model
|
||||
dbModels.forEach((dbModel) => {
|
||||
if (global.systemModelList.find((item) => item.model === dbModel.model)) return;
|
||||
|
||||
pushModel({
|
||||
...dbModel.metadata,
|
||||
isCustom: true
|
||||
});
|
||||
});
|
||||
|
||||
// Default model check
|
||||
if (!global.systemDefaultModel.llm) {
|
||||
global.systemDefaultModel.llm = Array.from(global.llmModelMap.values())[0];
|
||||
}
|
||||
if (!global.systemDefaultModel.embedding) {
|
||||
global.systemDefaultModel.embedding = Array.from(global.embeddingModelMap.values())[0];
|
||||
}
|
||||
if (!global.systemDefaultModel.tts) {
|
||||
global.systemDefaultModel.tts = Array.from(global.ttsModelMap.values())[0];
|
||||
}
|
||||
if (!global.systemDefaultModel.stt) {
|
||||
global.systemDefaultModel.stt = Array.from(global.sttModelMap.values())[0];
|
||||
}
|
||||
if (!global.systemDefaultModel.rerank) {
|
||||
global.systemDefaultModel.rerank = Array.from(global.reRankModelMap.values())[0];
|
||||
}
|
||||
|
||||
console.log('Load models success', JSON.stringify(global.systemActiveModelList, null, 2));
|
||||
} catch (error) {
|
||||
console.error('Load models error', error);
|
||||
// @ts-ignore
|
||||
global.systemModelList = undefined;
|
||||
}
|
||||
};
|
||||
|
||||
export const getSystemModelConfig = async (model: string): Promise<SystemModelItemType> => {
|
||||
const modelData = findModelFromAlldata(model);
|
||||
if (!modelData) return Promise.reject('Model is not found');
|
||||
if (modelData.isCustom) return Promise.reject('Custom model not data');
|
||||
|
||||
// Read file
|
||||
const fileContent = (await import(`./provider/${modelData.provider}`))?.default as {
|
||||
provider: ModelProviderType;
|
||||
list: SystemModelItemType[];
|
||||
};
|
||||
|
||||
const config = fileContent.list.find((item) => item.model === model);
|
||||
|
||||
if (!config) return Promise.reject('Model config is not found');
|
||||
|
||||
return {
|
||||
...config,
|
||||
provider: modelData.provider,
|
||||
isCustom: false
|
||||
};
|
||||
};
|
||||
|
||||
export const watchSystemModelUpdate = () => {
|
||||
const changeStream = MongoSystemModel.watch();
|
||||
|
||||
changeStream.on(
|
||||
'change',
|
||||
debounce(async () => {
|
||||
try {
|
||||
// Main node will reload twice
|
||||
await loadSystemModels(true);
|
||||
// All node reaload buffer
|
||||
await reloadFastGPTConfigBuffer();
|
||||
} catch (error) {}
|
||||
}, 500)
|
||||
);
|
||||
};
|
||||
|
||||
// 更新完模型后,需要重载缓存
|
||||
export const updatedReloadSystemModel = async () => {
|
||||
// 1. 更新模型(所有节点都会触发)
|
||||
await loadSystemModels(true);
|
||||
// 2. 更新缓存(仅主节点触发)
|
||||
await updateFastGPTConfigBuffer();
|
||||
// 3. 延迟1秒,等待其他节点刷新
|
||||
await delay(1000);
|
||||
};
|
||||
@@ -1,11 +1,11 @@
|
||||
import { VectorModelItemType } from '@fastgpt/global/core/ai/model.d';
|
||||
import { EmbeddingModelItemType } from '@fastgpt/global/core/ai/model.d';
|
||||
import { getAIApi } from '../config';
|
||||
import { countPromptTokens } from '../../../common/string/tiktoken/index';
|
||||
import { EmbeddingTypeEnm } from '@fastgpt/global/core/ai/constants';
|
||||
import { addLog } from '../../../common/system/log';
|
||||
|
||||
type GetVectorProps = {
|
||||
model: VectorModelItemType;
|
||||
model: EmbeddingModelItemType;
|
||||
input: string;
|
||||
type?: `${EmbeddingTypeEnm}`;
|
||||
};
|
||||
@@ -24,13 +24,23 @@ export async function getVectorsByText({ model, input, type }: GetVectorProps) {
|
||||
|
||||
// input text to vector
|
||||
const result = await ai.embeddings
|
||||
.create({
|
||||
...model.defaultConfig,
|
||||
...(type === EmbeddingTypeEnm.db && model.dbConfig),
|
||||
...(type === EmbeddingTypeEnm.query && model.queryConfig),
|
||||
model: model.model,
|
||||
input: [input]
|
||||
})
|
||||
.create(
|
||||
{
|
||||
...model.defaultConfig,
|
||||
...(type === EmbeddingTypeEnm.db && model.dbConfig),
|
||||
...(type === EmbeddingTypeEnm.query && model.queryConfig),
|
||||
model: model.model,
|
||||
input: [input]
|
||||
},
|
||||
model.requestUrl && model.requestAuth
|
||||
? {
|
||||
path: model.requestUrl,
|
||||
headers: {
|
||||
Authorization: `Bearer ${model.requestAuth}`
|
||||
}
|
||||
}
|
||||
: {}
|
||||
)
|
||||
.then(async (res) => {
|
||||
if (!res.data) {
|
||||
addLog.error('Embedding API is not responding', res);
|
||||
|
||||
@@ -2,10 +2,12 @@ import { replaceVariable } from '@fastgpt/global/common/string/tools';
|
||||
import { createChatCompletion } from '../config';
|
||||
import { ChatItemType } from '@fastgpt/global/core/chat/type';
|
||||
import { countGptMessagesTokens, countPromptTokens } from '../../../common/string/tiktoken/index';
|
||||
import { chatValue2RuntimePrompt } from '@fastgpt/global/core/chat/adapt';
|
||||
import { chats2GPTMessages } from '@fastgpt/global/core/chat/adapt';
|
||||
import { getLLMModel } from '../model';
|
||||
import { llmCompletionsBodyFormat } from '../utils';
|
||||
import { addLog } from '../../../common/system/log';
|
||||
import { filterGPTMessageByMaxContext } from '../../chat/utils';
|
||||
import json5 from 'json5';
|
||||
|
||||
/*
|
||||
query extension - 问题扩展
|
||||
@@ -13,72 +15,73 @@ import { addLog } from '../../../common/system/log';
|
||||
*/
|
||||
|
||||
const title = global.feConfigs?.systemTitle || 'FastAI';
|
||||
const defaultPrompt = `作为一个向量检索助手,你的任务是结合历史记录,从不同角度,为“原问题”生成个不同版本的“检索词”,从而提高向量检索的语义丰富度,提高向量检索的精度。
|
||||
const defaultPrompt = `## 你的任务
|
||||
你作为一个向量检索助手,你的任务是结合历史记录,从不同角度,为“原问题”生成个不同版本的“检索词”,从而提高向量检索的语义丰富度,提高向量检索的精度。
|
||||
生成的问题要求指向对象清晰明确,并与“原问题语言相同”。
|
||||
|
||||
参考 <Example></Example> 标中的示例来完成任务。
|
||||
## 参考示例
|
||||
|
||||
<Example>
|
||||
历史记录:
|
||||
"""
|
||||
null
|
||||
"""
|
||||
原问题: 介绍下剧情。
|
||||
检索词: ["介绍下故事的背景。","故事的主题是什么?","介绍下故事的主要人物。"]
|
||||
----------------
|
||||
历史记录:
|
||||
"""
|
||||
Q: 对话背景。
|
||||
A: 当前对话是关于 Nginx 的介绍和使用等。
|
||||
user: 对话背景。
|
||||
assistant: 当前对话是关于 Nginx 的介绍和使用等。
|
||||
"""
|
||||
原问题: 怎么下载
|
||||
检索词: ["Nginx 如何下载?","下载 Nginx 需要什么条件?","有哪些渠道可以下载 Nginx?"]
|
||||
----------------
|
||||
历史记录:
|
||||
"""
|
||||
Q: 对话背景。
|
||||
A: 当前对话是关于 Nginx 的介绍和使用等。
|
||||
Q: 报错 "no connection"
|
||||
A: 报错"no connection"可能是因为……
|
||||
user: 对话背景。
|
||||
assistant: 当前对话是关于 Nginx 的介绍和使用等。
|
||||
user: 报错 "no connection"
|
||||
assistant: 报错"no connection"可能是因为……
|
||||
"""
|
||||
原问题: 怎么解决
|
||||
检索词: ["Nginx报错"no connection"如何解决?","造成'no connection'报错的原因。","Nginx提示'no connection',要怎么办?"]
|
||||
----------------
|
||||
历史记录:
|
||||
"""
|
||||
Q: 护产假多少天?
|
||||
A: 护产假的天数根据员工所在的城市而定。请提供您所在的城市,以便我回答您的问题。
|
||||
user: How long is the maternity leave?
|
||||
assistant: The number of days of maternity leave depends on the city in which the employee is located. Please provide your city so that I can answer your questions.
|
||||
"""
|
||||
原问题: 沈阳
|
||||
检索词: ["沈阳的护产假多少天?","沈阳的护产假政策。","沈阳的护产假标准。"]
|
||||
原问题: ShenYang
|
||||
检索词: ["How many days is maternity leave in Shenyang?","Shenyang's maternity leave policy.","The standard of maternity leave in Shenyang."]
|
||||
----------------
|
||||
历史记录:
|
||||
"""
|
||||
Q: 作者是谁?
|
||||
A: ${title} 的作者是 labring。
|
||||
user: 作者是谁?
|
||||
assistant: ${title} 的作者是 labring。
|
||||
"""
|
||||
原问题: Tell me about him
|
||||
检索词: ["Introduce labring, the author of ${title}." ," Background information on author labring." "," Why does labring do ${title}?"]
|
||||
----------------
|
||||
历史记录:
|
||||
"""
|
||||
Q: 对话背景。
|
||||
A: 关于 ${title} 的介绍和使用等问题。
|
||||
user: 对话背景。
|
||||
assistant: 关于 ${title} 的介绍和使用等问题。
|
||||
"""
|
||||
原问题: 你好。
|
||||
检索词: ["你好"]
|
||||
----------------
|
||||
历史记录:
|
||||
"""
|
||||
Q: ${title} 如何收费?
|
||||
A: ${title} 收费可以参考……
|
||||
user: ${title} 如何收费?
|
||||
assistant: ${title} 收费可以参考……
|
||||
"""
|
||||
原问题: 你知道 laf 么?
|
||||
检索词: ["laf 的官网地址是多少?","laf 的使用教程。","laf 有什么特点和优势。"]
|
||||
----------------
|
||||
历史记录:
|
||||
"""
|
||||
Q: ${title} 的优势
|
||||
A: 1. 开源
|
||||
user: ${title} 的优势
|
||||
assistant: 1. 开源
|
||||
2. 简便
|
||||
3. 扩展性强
|
||||
"""
|
||||
@@ -87,18 +90,20 @@ A: 1. 开源
|
||||
----------------
|
||||
历史记录:
|
||||
"""
|
||||
Q: 什么是 ${title}?
|
||||
A: ${title} 是一个 RAG 平台。
|
||||
Q: 什么是 Laf?
|
||||
A: Laf 是一个云函数开发平台。
|
||||
user: 什么是 ${title}?
|
||||
assistant: ${title} 是一个 RAG 平台。
|
||||
user: 什么是 Laf?
|
||||
assistant: Laf 是一个云函数开发平台。
|
||||
"""
|
||||
原问题: 它们有什么关系?
|
||||
检索词: ["${title}和Laf有什么关系?","介绍下${title}","介绍下Laf"]
|
||||
</Example>
|
||||
|
||||
-----
|
||||
## 输出要求
|
||||
|
||||
下面是正式的任务:
|
||||
1. 输出格式为 JSON 数组,数组中每个元素为字符串。无需对输出进行任何解释。
|
||||
2. 输出语言与原问题相同。原问题为中文则输出中文;原问题为英文则输出英文。
|
||||
|
||||
## 开始任务
|
||||
|
||||
历史记录:
|
||||
"""
|
||||
@@ -125,26 +130,39 @@ export const queryExtension = async ({
|
||||
outputTokens: number;
|
||||
}> => {
|
||||
const systemFewShot = chatBg
|
||||
? `Q: 对话背景。
|
||||
A: ${chatBg}
|
||||
? `user: 对话背景。
|
||||
assistant: ${chatBg}
|
||||
`
|
||||
: '';
|
||||
const historyFewShot = histories
|
||||
.map((item) => {
|
||||
const role = item.obj === 'Human' ? 'Q' : 'A';
|
||||
return `${role}: ${chatValue2RuntimePrompt(item.value).text}`;
|
||||
})
|
||||
.join('\n');
|
||||
const concatFewShot = `${systemFewShot}${historyFewShot}`.trim();
|
||||
|
||||
const modelData = getLLMModel(model);
|
||||
const filterHistories = await filterGPTMessageByMaxContext({
|
||||
messages: chats2GPTMessages({ messages: histories, reserveId: false }),
|
||||
maxContext: modelData.maxContext - 1000
|
||||
});
|
||||
|
||||
const historyFewShot = filterHistories
|
||||
.map((item) => {
|
||||
const role = item.role;
|
||||
const content = item.content;
|
||||
if ((role === 'user' || role === 'assistant') && content) {
|
||||
if (typeof content === 'string') {
|
||||
return `${role}: ${content}`;
|
||||
} else {
|
||||
return `${role}: ${content.map((item) => (item.type === 'text' ? item.text : '')).join('\n')}`;
|
||||
}
|
||||
}
|
||||
})
|
||||
.filter(Boolean)
|
||||
.join('\n');
|
||||
const concatFewShot = `${systemFewShot}${historyFewShot}`.trim();
|
||||
|
||||
const messages = [
|
||||
{
|
||||
role: 'user',
|
||||
content: replaceVariable(defaultPrompt, {
|
||||
query: `${query}`,
|
||||
histories: concatFewShot
|
||||
histories: concatFewShot || 'null'
|
||||
})
|
||||
}
|
||||
] as any;
|
||||
@@ -154,7 +172,7 @@ A: ${chatBg}
|
||||
{
|
||||
stream: false,
|
||||
model: modelData.model,
|
||||
temperature: 0.01,
|
||||
temperature: 0.1,
|
||||
messages
|
||||
},
|
||||
modelData
|
||||
@@ -172,22 +190,41 @@ A: ${chatBg}
|
||||
};
|
||||
}
|
||||
|
||||
const start = answer.indexOf('[');
|
||||
const end = answer.lastIndexOf(']');
|
||||
if (start === -1 || end === -1) {
|
||||
addLog.warn('Query extension failed, not a valid JSON', {
|
||||
answer
|
||||
});
|
||||
return {
|
||||
rawQuery: query,
|
||||
extensionQueries: [],
|
||||
model,
|
||||
inputTokens: 0,
|
||||
outputTokens: 0
|
||||
};
|
||||
}
|
||||
|
||||
// Intercept the content of [] and retain []
|
||||
answer = answer.match(/\[.*?\]/)?.[0] || '';
|
||||
answer = answer.replace(/\\"/g, '"');
|
||||
const jsonStr = answer
|
||||
.substring(start, end + 1)
|
||||
.replace(/(\\n|\\)/g, '')
|
||||
.replace(/ /g, '');
|
||||
|
||||
try {
|
||||
const queries = JSON.parse(answer) as string[];
|
||||
const queries = json5.parse(jsonStr) as string[];
|
||||
|
||||
return {
|
||||
rawQuery: query,
|
||||
extensionQueries: Array.isArray(queries) ? queries : [],
|
||||
extensionQueries: (Array.isArray(queries) ? queries : []).slice(0, 5),
|
||||
model,
|
||||
inputTokens: await countGptMessagesTokens(messages),
|
||||
outputTokens: await countPromptTokens(answer)
|
||||
};
|
||||
} catch (error) {
|
||||
addLog.error(`Query extension error`, error);
|
||||
addLog.warn('Query extension failed, not a valid JSON', {
|
||||
answer
|
||||
});
|
||||
return {
|
||||
rawQuery: query,
|
||||
extensionQueries: [],
|
||||
|
||||
@@ -1,51 +1,52 @@
|
||||
import { SystemModelItemType } from './type';
|
||||
|
||||
export const getDefaultLLMModel = () => global?.systemDefaultModel.llm!;
|
||||
export const getLLMModel = (model?: string) => {
|
||||
return (
|
||||
global.llmModels.find((item) => item.model === model || item.name === model) ??
|
||||
global.llmModels[0]
|
||||
);
|
||||
if (!model) return getDefaultLLMModel();
|
||||
return global.llmModelMap.get(model) || getDefaultLLMModel();
|
||||
};
|
||||
|
||||
export const getDatasetModel = (model?: string) => {
|
||||
return (
|
||||
global.llmModels
|
||||
Array.from(global.llmModelMap.values())
|
||||
?.filter((item) => item.datasetProcess)
|
||||
?.find((item) => item.model === model || item.name === model) ?? global.llmModels[0]
|
||||
?.find((item) => item.model === model || item.name === model) ?? getDefaultLLMModel()
|
||||
);
|
||||
};
|
||||
|
||||
export const getVectorModel = (model?: string) => {
|
||||
return (
|
||||
global.vectorModels.find((item) => item.model === model || item.name === model) ||
|
||||
global.vectorModels[0]
|
||||
);
|
||||
export const getDefaultEmbeddingModel = () => global?.systemDefaultModel.embedding!;
|
||||
export const getEmbeddingModel = (model?: string) => {
|
||||
if (!model) return getDefaultEmbeddingModel();
|
||||
return global.embeddingModelMap.get(model) || getDefaultEmbeddingModel();
|
||||
};
|
||||
|
||||
export function getAudioSpeechModel(model?: string) {
|
||||
return (
|
||||
global.audioSpeechModels.find((item) => item.model === model || item.name === model) ||
|
||||
global.audioSpeechModels[0]
|
||||
);
|
||||
export const getDefaultTTSModel = () => global?.systemDefaultModel.tts!;
|
||||
export function getTTSModel(model?: string) {
|
||||
if (!model) return getDefaultTTSModel();
|
||||
return global.ttsModelMap.get(model) || getDefaultTTSModel();
|
||||
}
|
||||
|
||||
export function getWhisperModel(model?: string) {
|
||||
return global.whisperModel;
|
||||
export const getDefaultSTTModel = () => global?.systemDefaultModel.stt!;
|
||||
export function getSTTModel(model?: string) {
|
||||
if (!model) return getDefaultSTTModel();
|
||||
return global.sttModelMap.get(model) || getDefaultSTTModel();
|
||||
}
|
||||
|
||||
export const getDefaultRerankModel = () => global?.systemDefaultModel.rerank!;
|
||||
export function getReRankModel(model?: string) {
|
||||
return global.reRankModels.find((item) => item.model === model);
|
||||
if (!model) return getDefaultRerankModel();
|
||||
return global.reRankModelMap.get(model) || getDefaultRerankModel();
|
||||
}
|
||||
|
||||
export enum ModelTypeEnum {
|
||||
llm = 'llm',
|
||||
vector = 'vector',
|
||||
audioSpeech = 'audioSpeech',
|
||||
whisper = 'whisper',
|
||||
rerank = 'rerank'
|
||||
}
|
||||
export const getModelMap = {
|
||||
[ModelTypeEnum.llm]: getLLMModel,
|
||||
[ModelTypeEnum.vector]: getVectorModel,
|
||||
[ModelTypeEnum.audioSpeech]: getAudioSpeechModel,
|
||||
[ModelTypeEnum.whisper]: getWhisperModel,
|
||||
[ModelTypeEnum.rerank]: getReRankModel
|
||||
export const findAIModel = (model: string): SystemModelItemType | undefined => {
|
||||
return (
|
||||
global.llmModelMap.get(model) ||
|
||||
global.embeddingModelMap.get(model) ||
|
||||
global.ttsModelMap.get(model) ||
|
||||
global.sttModelMap.get(model) ||
|
||||
global.reRankModelMap.get(model)
|
||||
);
|
||||
};
|
||||
export const findModelFromAlldata = (model: string) => {
|
||||
return global.systemModelList.find((item) => item.model === model);
|
||||
};
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
import { addLog } from '../../../common/system/log';
|
||||
import { POST } from '../../../common/api/serverRequest';
|
||||
import { getDefaultRerankModel } from '../model';
|
||||
import { getAxiosConfig } from '../config';
|
||||
import { ReRankModelItemType } from '@fastgpt/global/core/ai/model.d';
|
||||
|
||||
type PostReRankResponse = {
|
||||
id: string;
|
||||
@@ -11,21 +14,23 @@ type PostReRankResponse = {
|
||||
type ReRankCallResult = { id: string; score?: number }[];
|
||||
|
||||
export function reRankRecall({
|
||||
model = getDefaultRerankModel(),
|
||||
query,
|
||||
documents
|
||||
}: {
|
||||
model?: ReRankModelItemType;
|
||||
query: string;
|
||||
documents: { id: string; text: string }[];
|
||||
}): Promise<ReRankCallResult> {
|
||||
const model = global.reRankModels[0];
|
||||
|
||||
if (!model || !model?.requestUrl) {
|
||||
if (!model) {
|
||||
return Promise.reject('no rerank model');
|
||||
}
|
||||
|
||||
const { baseUrl, authorization } = getAxiosConfig({});
|
||||
|
||||
let start = Date.now();
|
||||
return POST<PostReRankResponse>(
|
||||
model.requestUrl,
|
||||
model.requestUrl ? model.requestUrl : `${baseUrl}/rerank`,
|
||||
{
|
||||
model: model.model,
|
||||
query,
|
||||
@@ -33,7 +38,7 @@ export function reRankRecall({
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${model.requestAuth}`
|
||||
Authorization: model.requestAuth ? model.requestAuth : authorization
|
||||
},
|
||||
timeout: 30000
|
||||
}
|
||||
@@ -53,6 +58,6 @@ export function reRankRecall({
|
||||
.catch((err) => {
|
||||
addLog.error('rerank error', err);
|
||||
|
||||
return [];
|
||||
return Promise.reject(err);
|
||||
});
|
||||
}
|
||||
|
||||
42
packages/service/core/ai/type.d.ts
vendored
Normal file
42
packages/service/core/ai/type.d.ts
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
import { ModelTypeEnum } from '@fastgpt/global/core/ai/model';
|
||||
import {
|
||||
STTModelType,
|
||||
ReRankModelItemType,
|
||||
TTSModelType,
|
||||
EmbeddingModelItemType,
|
||||
LLMModelItemType
|
||||
} from '@fastgpt/global/core/ai/model.d';
|
||||
|
||||
export type SystemModelSchemaType = {
|
||||
_id: string;
|
||||
model: string;
|
||||
metadata: SystemModelItemType;
|
||||
};
|
||||
|
||||
export type SystemModelItemType =
|
||||
| LLMModelItemType
|
||||
| EmbeddingModelItemType
|
||||
| TTSModelType
|
||||
| STTModelType
|
||||
| ReRankModelItemType;
|
||||
|
||||
export type SystemDefaultModelType = {
|
||||
[ModelTypeEnum.llm]?: LLMModelItemType;
|
||||
[ModelTypeEnum.embedding]?: EmbeddingModelItemType;
|
||||
[ModelTypeEnum.tts]?: TTSModelType;
|
||||
[ModelTypeEnum.stt]?: STTModelType;
|
||||
[ModelTypeEnum.rerank]?: ReRankModelItemType;
|
||||
};
|
||||
|
||||
declare global {
|
||||
var systemModelList: SystemModelItemType[];
|
||||
// var systemModelMap: Map<string, SystemModelItemType>;
|
||||
var llmModelMap: Map<string, LLMModelItemType>;
|
||||
var embeddingModelMap: Map<string, EmbeddingModelItemType>;
|
||||
var ttsModelMap: Map<string, TTSModelType>;
|
||||
var sttModelMap: Map<string, STTModelType>;
|
||||
var reRankModelMap: Map<string, ReRankModelItemType>;
|
||||
|
||||
var systemActiveModelList: SystemModelItemType[];
|
||||
var systemDefaultModel: SystemDefaultModelType;
|
||||
}
|
||||
@@ -2,32 +2,23 @@ import { LLMModelItemType } from '@fastgpt/global/core/ai/model.d';
|
||||
import {
|
||||
ChatCompletionCreateParamsNonStreaming,
|
||||
ChatCompletionCreateParamsStreaming,
|
||||
ChatCompletionMessageParam
|
||||
StreamChatType
|
||||
} from '@fastgpt/global/core/ai/type';
|
||||
import { countGptMessagesTokens } from '../../common/string/tiktoken';
|
||||
import { getLLMModel } from './model';
|
||||
|
||||
export const computedMaxToken = async ({
|
||||
/*
|
||||
Count response max token
|
||||
*/
|
||||
export const computedMaxToken = ({
|
||||
maxToken,
|
||||
model,
|
||||
filterMessages = []
|
||||
model
|
||||
}: {
|
||||
maxToken?: number;
|
||||
model: LLMModelItemType;
|
||||
filterMessages: ChatCompletionMessageParam[];
|
||||
}) => {
|
||||
if (maxToken === undefined) return;
|
||||
|
||||
maxToken = Math.min(maxToken, model.maxResponse);
|
||||
const tokensLimit = model.maxContext;
|
||||
|
||||
/* count response max token */
|
||||
const promptsToken = await countGptMessagesTokens(filterMessages);
|
||||
maxToken = promptsToken + maxToken > tokensLimit ? tokensLimit - promptsToken : maxToken;
|
||||
|
||||
if (maxToken <= 0) {
|
||||
maxToken = 200;
|
||||
}
|
||||
return maxToken;
|
||||
};
|
||||
|
||||
@@ -39,6 +30,7 @@ export const computedTemperature = ({
|
||||
model: LLMModelItemType;
|
||||
temperature: number;
|
||||
}) => {
|
||||
if (typeof model.maxTemperature !== 'number') return undefined;
|
||||
temperature = +(model.maxTemperature * (temperature / 10)).toFixed(2);
|
||||
temperature = Math.max(temperature, 0.01);
|
||||
|
||||
@@ -87,3 +79,12 @@ export const llmCompletionsBodyFormat = <T extends CompletionsBodyType>(
|
||||
|
||||
return requestBody as InferCompletionsBody<T>;
|
||||
};
|
||||
|
||||
export const llmStreamResponseToText = async (response: StreamChatType) => {
|
||||
let answer = '';
|
||||
for await (const part of response) {
|
||||
const content = part.choices?.[0]?.delta?.content || '';
|
||||
answer += content;
|
||||
}
|
||||
return answer;
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user