feat: markdown extension (#3663)

* feat: markdown extension

* media cros

* rerank test

* default price

* perf: default model

* fix: cannot custom provider

* fix: default model select

* update bg

* perf: default model selector

* fix: usage export

* i18n

* fix: rerank

* update init extension

* perf: ip limit check

* doubao model order

* web default modle

* perf: tts selector

* perf: tts error

* qrcode package
This commit is contained in:
Archer
2025-01-24 23:42:04 +08:00
committed by GitHub
parent 02fcb6a61e
commit d2948d7e57
49 changed files with 672 additions and 290 deletions

View File

@@ -1,6 +1,5 @@
import { ApiRequestProps } from '../../type/next';
import requestIp from 'request-ip';
import { ERROR_ENUM } from '@fastgpt/global/common/error/errorCode';
import { authFrequencyLimit } from '../system/frequencyLimit/utils';
import { addSeconds } from 'date-fns';
import { NextApiResponse } from 'next';
@@ -9,7 +8,17 @@ import { jsonRes } from '../response';
// unit: times/s
// how to use?
// export default NextAPI(useQPSLimit(10), handler); // limit 10 times per second for a ip
export function useIPFrequencyLimit(seconds: number, limit: number, force = false) {
export function useIPFrequencyLimit({
id,
seconds,
limit,
force = false
}: {
id: string;
seconds: number;
limit: number;
force?: boolean;
}) {
return async (req: ApiRequestProps, res: NextApiResponse) => {
const ip = requestIp.getClientIp(req);
if (!ip || (process.env.USE_IP_LIMIT !== 'true' && !force)) {
@@ -17,14 +26,14 @@ export function useIPFrequencyLimit(seconds: number, limit: number, force = fals
}
try {
await authFrequencyLimit({
eventId: 'ip-qps-limit' + ip,
eventId: `ip-qps-limit-${id}-` + ip,
maxAmount: limit,
expiredTime: addSeconds(new Date(), seconds)
});
} catch (_) {
jsonRes(res, {
code: 429,
error: ERROR_ENUM.tooManyRequest
error: `Too many request, request ${limit} times every ${seconds} seconds`
});
}
};

View File

@@ -33,7 +33,15 @@ export const jsonRes = <T = any>(
addLog.error(`Api response error: ${url}`, ERROR_RESPONSE[errResponseKey]);
return res.status(code).json(ERROR_RESPONSE[errResponseKey]);
res.status(code);
if (message) {
res.send(message);
} else {
res.json(ERROR_RESPONSE[errResponseKey]);
}
return;
}
// another error

View File

@@ -2,11 +2,11 @@
"provider": "Doubao",
"list": [
{
"model": "Doubao-lite-128k",
"name": "Doubao-lite-128k",
"maxContext": 128000,
"model": "Doubao-lite-4k",
"name": "Doubao-lite-4k",
"maxContext": 4000,
"maxResponse": 4000,
"quoteMaxToken": 120000,
"quoteMaxToken": 4000,
"maxTemperature": 1,
"vision": false,
"toolChoice": true,
@@ -46,11 +46,11 @@
"type": "llm"
},
{
"model": "Doubao-lite-4k",
"name": "Doubao-lite-4k",
"maxContext": 4000,
"model": "Doubao-lite-128k",
"name": "Doubao-lite-128k",
"maxContext": 128000,
"maxResponse": 4000,
"quoteMaxToken": 4000,
"quoteMaxToken": 120000,
"maxTemperature": 1,
"vision": false,
"toolChoice": true,
@@ -68,11 +68,33 @@
"type": "llm"
},
{
"model": "Doubao-pro-128k",
"name": "Doubao-pro-128k",
"maxContext": 128000,
"model": "Doubao-vision-lite-32k",
"name": "Doubao-vision-lite-32k",
"maxContext": 32000,
"maxResponse": 4000,
"quoteMaxToken": 120000,
"quoteMaxToken": 32000,
"maxTemperature": 1,
"vision": true,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"datasetProcess": true,
"usedInClassify": true,
"customCQPrompt": "",
"usedInExtractFields": true,
"usedInQueryExtension": true,
"customExtractPrompt": "",
"usedInToolCall": true,
"defaultConfig": {},
"fieldMap": {},
"type": "llm"
},
{
"model": "Doubao-pro-4k",
"name": "Doubao-pro-4k",
"maxContext": 4000,
"maxResponse": 4000,
"quoteMaxToken": 4000,
"maxTemperature": 1,
"vision": false,
"toolChoice": true,
@@ -112,11 +134,11 @@
"type": "llm"
},
{
"model": "Doubao-pro-4k",
"name": "Doubao-pro-4k",
"maxContext": 4000,
"model": "Doubao-pro-128k",
"name": "Doubao-pro-128k",
"maxContext": 128000,
"maxResponse": 4000,
"quoteMaxToken": 4000,
"quoteMaxToken": 120000,
"maxTemperature": 1,
"vision": false,
"toolChoice": true,
@@ -133,28 +155,6 @@
"fieldMap": {},
"type": "llm"
},
{
"model": "Doubao-vision-lite-32k",
"name": "Doubao-vision-lite-32k",
"maxContext": 32000,
"maxResponse": 4000,
"quoteMaxToken": 32000,
"maxTemperature": 1,
"vision": true,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"datasetProcess": true,
"usedInClassify": true,
"customCQPrompt": "",
"usedInExtractFields": true,
"usedInQueryExtension": true,
"customExtractPrompt": "",
"usedInToolCall": true,
"defaultConfig": {},
"fieldMap": {},
"type": "llm"
},
{
"model": "Doubao-vision-pro-32k",
"name": "Doubao-vision-pro-32k",
@@ -192,4 +192,4 @@
"type": "embedding"
}
]
}
}

View File

@@ -47,194 +47,194 @@
},
{
"model": "speech-01-turbo",
"name": "Minimax-speech-01-turbo",
"name": "speech-01-turbo",
"voices": [
{
"label": "minimax-male-qn-qingse",
"label": "male-qn-qingse",
"value": "male-qn-qingse"
},
{
"label": "minimax-male-qn-jingying",
"label": "male-qn-jingying",
"value": "male-qn-jingying"
},
{
"label": "minimax-male-qn-badao",
"label": "male-qn-badao",
"value": "male-qn-badao"
},
{
"label": "minimax-male-qn-daxuesheng",
"label": "male-qn-daxuesheng",
"value": "male-qn-daxuesheng"
},
{
"label": "minimax-female-shaonv",
"label": "female-shaonv",
"value": "female-shaonv"
},
{
"label": "minimax-female-yujie",
"label": "female-yujie",
"value": "female-yujie"
},
{
"label": "minimax-female-chengshu",
"label": "female-chengshu",
"value": "female-chengshu"
},
{
"label": "minimax-female-tianmei",
"label": "female-tianmei",
"value": "female-tianmei"
},
{
"label": "minimax-presenter_male",
"label": "presenter_male",
"value": "presenter_male"
},
{
"label": "minimax-presenter_female",
"label": "presenter_female",
"value": "presenter_female"
},
{
"label": "minimax-audiobook_male_1",
"label": "audiobook_male_1",
"value": "audiobook_male_1"
},
{
"label": "minimax-audiobook_male_2",
"label": "audiobook_male_2",
"value": "audiobook_male_2"
},
{
"label": "minimax-audiobook_female_1",
"label": "audiobook_female_1",
"value": "audiobook_female_1"
},
{
"label": "minimax-audiobook_female_2",
"label": "audiobook_female_2",
"value": "audiobook_female_2"
},
{
"label": "minimax-male-qn-qingse-jingpin",
"label": "male-qn-qingse-jingpin",
"value": "male-qn-qingse-jingpin"
},
{
"label": "minimax-male-qn-jingying-jingpin",
"label": "male-qn-jingying-jingpin",
"value": "male-qn-jingying-jingpin"
},
{
"label": "minimax-male-qn-badao-jingpin",
"label": "male-qn-badao-jingpin",
"value": "male-qn-badao-jingpin"
},
{
"label": "minimax-male-qn-daxuesheng-jingpin",
"label": "male-qn-daxuesheng-jingpin",
"value": "male-qn-daxuesheng-jingpin"
},
{
"label": "minimax-female-shaonv-jingpin",
"label": "female-shaonv-jingpin",
"value": "female-shaonv-jingpin"
},
{
"label": "minimax-female-yujie-jingpin",
"label": "female-yujie-jingpin",
"value": "female-yujie-jingpin"
},
{
"label": "minimax-female-chengshu-jingpin",
"label": "female-chengshu-jingpin",
"value": "female-chengshu-jingpin"
},
{
"label": "minimax-female-tianmei-jingpin",
"label": "female-tianmei-jingpin",
"value": "female-tianmei-jingpin"
},
{
"label": "minimax-clever_boy",
"label": "clever_boy",
"value": "clever_boy"
},
{
"label": "minimax-cute_boy",
"label": "cute_boy",
"value": "cute_boy"
},
{
"label": "minimax-lovely_girl",
"label": "lovely_girl",
"value": "lovely_girl"
},
{
"label": "minimax-cartoon_pig",
"label": "cartoon_pig",
"value": "cartoon_pig"
},
{
"label": "minimax-bingjiao_didi",
"label": "bingjiao_didi",
"value": "bingjiao_didi"
},
{
"label": "minimax-junlang_nanyou",
"label": "junlang_nanyou",
"value": "junlang_nanyou"
},
{
"label": "minimax-chunzhen_xuedi",
"label": "chunzhen_xuedi",
"value": "chunzhen_xuedi"
},
{
"label": "minimax-lengdan_xiongzhang",
"label": "lengdan_xiongzhang",
"value": "lengdan_xiongzhang"
},
{
"label": "minimax-badao_shaoye",
"label": "badao_shaoye",
"value": "badao_shaoye"
},
{
"label": "minimax-tianxin_xiaoling",
"label": "tianxin_xiaoling",
"value": "tianxin_xiaoling"
},
{
"label": "minimax-qiaopi_mengmei",
"label": "qiaopi_mengmei",
"value": "qiaopi_mengmei"
},
{
"label": "minimax-wumei_yujie",
"label": "wumei_yujie",
"value": "wumei_yujie"
},
{
"label": "minimax-diadia_xuemei",
"label": "diadia_xuemei",
"value": "diadia_xuemei"
},
{
"label": "minimax-danya_xuejie",
"label": "danya_xuejie",
"value": "danya_xuejie"
},
{
"label": "minimax-Santa_Claus",
"label": "Santa_Claus",
"value": "Santa_Claus"
},
{
"label": "minimax-Grinch",
"label": "Grinch",
"value": "Grinch"
},
{
"label": "minimax-Rudolph",
"label": "Rudolph",
"value": "Rudolph"
},
{
"label": "minimax-Arnold",
"label": "Arnold",
"value": "Arnold"
},
{
"label": "minimax-Charming_Santa",
"label": "Charming_Santa",
"value": "Charming_Santa"
},
{
"label": "minimax-Charming_Lady",
"label": "Charming_Lady",
"value": "Charming_Lady"
},
{
"label": "minimax-Sweet_Girl",
"label": "Sweet_Girl",
"value": "Sweet_Girl"
},
{
"label": "minimax-Cute_Elf",
"label": "Cute_Elf",
"value": "Cute_Elf"
},
{
"label": "minimax-Attractive_Girl",
"label": "Attractive_Girl",
"value": "Attractive_Girl"
},
{
"label": "minimax-Serene_Woman",
"label": "Serene_Woman",
"value": "Serene_Woman"
}
],
"type": "tts"
}
]
}
}

View File

@@ -10,7 +10,7 @@
"maxTemperature": 1.2,
"vision": true,
"toolChoice": true,
"functionCall": false,
"functionCall": true,
"defaultSystemChatPrompt": "",
"datasetProcess": true,
"usedInClassify": true,
@@ -53,7 +53,7 @@
"maxTemperature": 1.2,
"vision": false,
"toolChoice": false,
"functionCall": false,
"functionCall": true,
"defaultSystemChatPrompt": "",
"datasetProcess": true,
"usedInClassify": true,
@@ -77,7 +77,7 @@
"maxTemperature": 1.2,
"vision": false,
"toolChoice": false,
"functionCall": false,
"functionCall": true,
"defaultSystemChatPrompt": "",
"datasetProcess": true,
"usedInClassify": true,
@@ -102,7 +102,7 @@
"maxTemperature": 1.2,
"vision": false,
"toolChoice": false,
"functionCall": false,
"functionCall": true,
"defaultSystemChatPrompt": "",
"datasetProcess": true,
"usedInClassify": true,
@@ -127,7 +127,7 @@
"maxTemperature": 1.2,
"vision": false,
"toolChoice": true,
"functionCall": false,
"functionCall": true,
"defaultSystemChatPrompt": "",
"datasetProcess": true,
"usedInClassify": true,
@@ -147,7 +147,7 @@
"maxTemperature": 1.2,
"vision": true,
"toolChoice": true,
"functionCall": false,
"functionCall": true,
"defaultSystemChatPrompt": "",
"datasetProcess": true,
"usedInClassify": true,

View File

@@ -12,6 +12,7 @@ import {
} from '@fastgpt/global/core/ai/model.d';
import { debounce } from 'lodash';
import { ModelProviderType } from '@fastgpt/global/core/ai/provider';
import { findModelFromAlldata } from '../model';
/*
TODO: 分优先级读取:
@@ -95,7 +96,7 @@ export const loadSystemModels = async (init = false) => {
const modelData: any = {
...fileModel,
...dbModel?.metadata,
provider: fileContent.provider,
provider: dbModel?.metadata?.provider || fileContent.provider,
type: dbModel?.metadata?.type || fileModel.type,
isCustom: false
};
@@ -140,6 +141,28 @@ export const loadSystemModels = async (init = false) => {
}
};
export const getSystemModelConfig = async (model: string): Promise<SystemModelItemType> => {
const modelData = findModelFromAlldata(model);
if (!modelData) return Promise.reject('Model is not found');
if (modelData.isCustom) return Promise.reject('Custom model not data');
// Read file
const fileContent = (await import(`./provider/${modelData.provider}`))?.default as {
provider: ModelProviderType;
list: SystemModelItemType[];
};
const config = fileContent.list.find((item) => item.model === model);
if (!config) return Promise.reject('Model config is not found');
return {
...config,
provider: modelData.provider,
isCustom: false
};
};
export const watchSystemModelUpdate = () => {
const changeStream = MongoSystemModel.watch();

View File

@@ -2,6 +2,7 @@ import { addLog } from '../../../common/system/log';
import { POST } from '../../../common/api/serverRequest';
import { getDefaultRerankModel } from '../model';
import { getAxiosConfig } from '../config';
import { ReRankModelItemType } from '@fastgpt/global/core/ai/model.d';
type PostReRankResponse = {
id: string;
@@ -13,15 +14,15 @@ type PostReRankResponse = {
type ReRankCallResult = { id: string; score?: number }[];
export function reRankRecall({
model = getDefaultRerankModel(),
query,
documents
}: {
model?: ReRankModelItemType;
query: string;
documents: { id: string; text: string }[];
}): Promise<ReRankCallResult> {
const model = getDefaultRerankModel();
if (!model || !model?.requestUrl) {
if (!model) {
return Promise.reject('no rerank model');
}
@@ -29,7 +30,7 @@ export function reRankRecall({
let start = Date.now();
return POST<PostReRankResponse>(
model.requestUrl ? model.requestUrl : `${baseUrl}/v1/rerank`,
model.requestUrl ? model.requestUrl : `${baseUrl}/rerank`,
{
model: model.model,
query,
@@ -57,6 +58,6 @@ export function reRankRecall({
.catch((err) => {
addLog.error('rerank error', err);
return [];
return Promise.reject(err);
});
}