feat: markdown extension (#3663)

* feat: markdown extension

* media cros

* rerank test

* default price

* perf: default model

* fix: cannot custom provider

* fix: default model select

* update bg

* perf: default model selector

* fix: usage export

* i18n

* fix: rerank

* update init extension

* perf: ip limit check

* doubao model order

* web default modle

* perf: tts selector

* perf: tts error

* qrcode package
This commit is contained in:
Archer
2025-01-24 23:42:04 +08:00
committed by GitHub
parent 02fcb6a61e
commit d2948d7e57
49 changed files with 672 additions and 290 deletions

View File

@@ -1,6 +1,5 @@
import { ApiRequestProps } from '../../type/next';
import requestIp from 'request-ip';
import { ERROR_ENUM } from '@fastgpt/global/common/error/errorCode';
import { authFrequencyLimit } from '../system/frequencyLimit/utils';
import { addSeconds } from 'date-fns';
import { NextApiResponse } from 'next';
@@ -9,7 +8,17 @@ import { jsonRes } from '../response';
// unit: times/s
// how to use?
// export default NextAPI(useQPSLimit(10), handler); // limit 10 times per second for a ip
export function useIPFrequencyLimit(seconds: number, limit: number, force = false) {
export function useIPFrequencyLimit({
id,
seconds,
limit,
force = false
}: {
id: string;
seconds: number;
limit: number;
force?: boolean;
}) {
return async (req: ApiRequestProps, res: NextApiResponse) => {
const ip = requestIp.getClientIp(req);
if (!ip || (process.env.USE_IP_LIMIT !== 'true' && !force)) {
@@ -17,14 +26,14 @@ export function useIPFrequencyLimit(seconds: number, limit: number, force = fals
}
try {
await authFrequencyLimit({
eventId: 'ip-qps-limit' + ip,
eventId: `ip-qps-limit-${id}-` + ip,
maxAmount: limit,
expiredTime: addSeconds(new Date(), seconds)
});
} catch (_) {
jsonRes(res, {
code: 429,
error: ERROR_ENUM.tooManyRequest
error: `Too many request, request ${limit} times every ${seconds} seconds`
});
}
};

View File

@@ -33,7 +33,15 @@ export const jsonRes = <T = any>(
addLog.error(`Api response error: ${url}`, ERROR_RESPONSE[errResponseKey]);
return res.status(code).json(ERROR_RESPONSE[errResponseKey]);
res.status(code);
if (message) {
res.send(message);
} else {
res.json(ERROR_RESPONSE[errResponseKey]);
}
return;
}
// another error

View File

@@ -2,11 +2,11 @@
"provider": "Doubao",
"list": [
{
"model": "Doubao-lite-128k",
"name": "Doubao-lite-128k",
"maxContext": 128000,
"model": "Doubao-lite-4k",
"name": "Doubao-lite-4k",
"maxContext": 4000,
"maxResponse": 4000,
"quoteMaxToken": 120000,
"quoteMaxToken": 4000,
"maxTemperature": 1,
"vision": false,
"toolChoice": true,
@@ -46,11 +46,11 @@
"type": "llm"
},
{
"model": "Doubao-lite-4k",
"name": "Doubao-lite-4k",
"maxContext": 4000,
"model": "Doubao-lite-128k",
"name": "Doubao-lite-128k",
"maxContext": 128000,
"maxResponse": 4000,
"quoteMaxToken": 4000,
"quoteMaxToken": 120000,
"maxTemperature": 1,
"vision": false,
"toolChoice": true,
@@ -68,11 +68,33 @@
"type": "llm"
},
{
"model": "Doubao-pro-128k",
"name": "Doubao-pro-128k",
"maxContext": 128000,
"model": "Doubao-vision-lite-32k",
"name": "Doubao-vision-lite-32k",
"maxContext": 32000,
"maxResponse": 4000,
"quoteMaxToken": 120000,
"quoteMaxToken": 32000,
"maxTemperature": 1,
"vision": true,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"datasetProcess": true,
"usedInClassify": true,
"customCQPrompt": "",
"usedInExtractFields": true,
"usedInQueryExtension": true,
"customExtractPrompt": "",
"usedInToolCall": true,
"defaultConfig": {},
"fieldMap": {},
"type": "llm"
},
{
"model": "Doubao-pro-4k",
"name": "Doubao-pro-4k",
"maxContext": 4000,
"maxResponse": 4000,
"quoteMaxToken": 4000,
"maxTemperature": 1,
"vision": false,
"toolChoice": true,
@@ -112,11 +134,11 @@
"type": "llm"
},
{
"model": "Doubao-pro-4k",
"name": "Doubao-pro-4k",
"maxContext": 4000,
"model": "Doubao-pro-128k",
"name": "Doubao-pro-128k",
"maxContext": 128000,
"maxResponse": 4000,
"quoteMaxToken": 4000,
"quoteMaxToken": 120000,
"maxTemperature": 1,
"vision": false,
"toolChoice": true,
@@ -133,28 +155,6 @@
"fieldMap": {},
"type": "llm"
},
{
"model": "Doubao-vision-lite-32k",
"name": "Doubao-vision-lite-32k",
"maxContext": 32000,
"maxResponse": 4000,
"quoteMaxToken": 32000,
"maxTemperature": 1,
"vision": true,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"datasetProcess": true,
"usedInClassify": true,
"customCQPrompt": "",
"usedInExtractFields": true,
"usedInQueryExtension": true,
"customExtractPrompt": "",
"usedInToolCall": true,
"defaultConfig": {},
"fieldMap": {},
"type": "llm"
},
{
"model": "Doubao-vision-pro-32k",
"name": "Doubao-vision-pro-32k",
@@ -192,4 +192,4 @@
"type": "embedding"
}
]
}
}

View File

@@ -47,194 +47,194 @@
},
{
"model": "speech-01-turbo",
"name": "Minimax-speech-01-turbo",
"name": "speech-01-turbo",
"voices": [
{
"label": "minimax-male-qn-qingse",
"label": "male-qn-qingse",
"value": "male-qn-qingse"
},
{
"label": "minimax-male-qn-jingying",
"label": "male-qn-jingying",
"value": "male-qn-jingying"
},
{
"label": "minimax-male-qn-badao",
"label": "male-qn-badao",
"value": "male-qn-badao"
},
{
"label": "minimax-male-qn-daxuesheng",
"label": "male-qn-daxuesheng",
"value": "male-qn-daxuesheng"
},
{
"label": "minimax-female-shaonv",
"label": "female-shaonv",
"value": "female-shaonv"
},
{
"label": "minimax-female-yujie",
"label": "female-yujie",
"value": "female-yujie"
},
{
"label": "minimax-female-chengshu",
"label": "female-chengshu",
"value": "female-chengshu"
},
{
"label": "minimax-female-tianmei",
"label": "female-tianmei",
"value": "female-tianmei"
},
{
"label": "minimax-presenter_male",
"label": "presenter_male",
"value": "presenter_male"
},
{
"label": "minimax-presenter_female",
"label": "presenter_female",
"value": "presenter_female"
},
{
"label": "minimax-audiobook_male_1",
"label": "audiobook_male_1",
"value": "audiobook_male_1"
},
{
"label": "minimax-audiobook_male_2",
"label": "audiobook_male_2",
"value": "audiobook_male_2"
},
{
"label": "minimax-audiobook_female_1",
"label": "audiobook_female_1",
"value": "audiobook_female_1"
},
{
"label": "minimax-audiobook_female_2",
"label": "audiobook_female_2",
"value": "audiobook_female_2"
},
{
"label": "minimax-male-qn-qingse-jingpin",
"label": "male-qn-qingse-jingpin",
"value": "male-qn-qingse-jingpin"
},
{
"label": "minimax-male-qn-jingying-jingpin",
"label": "male-qn-jingying-jingpin",
"value": "male-qn-jingying-jingpin"
},
{
"label": "minimax-male-qn-badao-jingpin",
"label": "male-qn-badao-jingpin",
"value": "male-qn-badao-jingpin"
},
{
"label": "minimax-male-qn-daxuesheng-jingpin",
"label": "male-qn-daxuesheng-jingpin",
"value": "male-qn-daxuesheng-jingpin"
},
{
"label": "minimax-female-shaonv-jingpin",
"label": "female-shaonv-jingpin",
"value": "female-shaonv-jingpin"
},
{
"label": "minimax-female-yujie-jingpin",
"label": "female-yujie-jingpin",
"value": "female-yujie-jingpin"
},
{
"label": "minimax-female-chengshu-jingpin",
"label": "female-chengshu-jingpin",
"value": "female-chengshu-jingpin"
},
{
"label": "minimax-female-tianmei-jingpin",
"label": "female-tianmei-jingpin",
"value": "female-tianmei-jingpin"
},
{
"label": "minimax-clever_boy",
"label": "clever_boy",
"value": "clever_boy"
},
{
"label": "minimax-cute_boy",
"label": "cute_boy",
"value": "cute_boy"
},
{
"label": "minimax-lovely_girl",
"label": "lovely_girl",
"value": "lovely_girl"
},
{
"label": "minimax-cartoon_pig",
"label": "cartoon_pig",
"value": "cartoon_pig"
},
{
"label": "minimax-bingjiao_didi",
"label": "bingjiao_didi",
"value": "bingjiao_didi"
},
{
"label": "minimax-junlang_nanyou",
"label": "junlang_nanyou",
"value": "junlang_nanyou"
},
{
"label": "minimax-chunzhen_xuedi",
"label": "chunzhen_xuedi",
"value": "chunzhen_xuedi"
},
{
"label": "minimax-lengdan_xiongzhang",
"label": "lengdan_xiongzhang",
"value": "lengdan_xiongzhang"
},
{
"label": "minimax-badao_shaoye",
"label": "badao_shaoye",
"value": "badao_shaoye"
},
{
"label": "minimax-tianxin_xiaoling",
"label": "tianxin_xiaoling",
"value": "tianxin_xiaoling"
},
{
"label": "minimax-qiaopi_mengmei",
"label": "qiaopi_mengmei",
"value": "qiaopi_mengmei"
},
{
"label": "minimax-wumei_yujie",
"label": "wumei_yujie",
"value": "wumei_yujie"
},
{
"label": "minimax-diadia_xuemei",
"label": "diadia_xuemei",
"value": "diadia_xuemei"
},
{
"label": "minimax-danya_xuejie",
"label": "danya_xuejie",
"value": "danya_xuejie"
},
{
"label": "minimax-Santa_Claus",
"label": "Santa_Claus",
"value": "Santa_Claus"
},
{
"label": "minimax-Grinch",
"label": "Grinch",
"value": "Grinch"
},
{
"label": "minimax-Rudolph",
"label": "Rudolph",
"value": "Rudolph"
},
{
"label": "minimax-Arnold",
"label": "Arnold",
"value": "Arnold"
},
{
"label": "minimax-Charming_Santa",
"label": "Charming_Santa",
"value": "Charming_Santa"
},
{
"label": "minimax-Charming_Lady",
"label": "Charming_Lady",
"value": "Charming_Lady"
},
{
"label": "minimax-Sweet_Girl",
"label": "Sweet_Girl",
"value": "Sweet_Girl"
},
{
"label": "minimax-Cute_Elf",
"label": "Cute_Elf",
"value": "Cute_Elf"
},
{
"label": "minimax-Attractive_Girl",
"label": "Attractive_Girl",
"value": "Attractive_Girl"
},
{
"label": "minimax-Serene_Woman",
"label": "Serene_Woman",
"value": "Serene_Woman"
}
],
"type": "tts"
}
]
}
}

View File

@@ -10,7 +10,7 @@
"maxTemperature": 1.2,
"vision": true,
"toolChoice": true,
"functionCall": false,
"functionCall": true,
"defaultSystemChatPrompt": "",
"datasetProcess": true,
"usedInClassify": true,
@@ -53,7 +53,7 @@
"maxTemperature": 1.2,
"vision": false,
"toolChoice": false,
"functionCall": false,
"functionCall": true,
"defaultSystemChatPrompt": "",
"datasetProcess": true,
"usedInClassify": true,
@@ -77,7 +77,7 @@
"maxTemperature": 1.2,
"vision": false,
"toolChoice": false,
"functionCall": false,
"functionCall": true,
"defaultSystemChatPrompt": "",
"datasetProcess": true,
"usedInClassify": true,
@@ -102,7 +102,7 @@
"maxTemperature": 1.2,
"vision": false,
"toolChoice": false,
"functionCall": false,
"functionCall": true,
"defaultSystemChatPrompt": "",
"datasetProcess": true,
"usedInClassify": true,
@@ -127,7 +127,7 @@
"maxTemperature": 1.2,
"vision": false,
"toolChoice": true,
"functionCall": false,
"functionCall": true,
"defaultSystemChatPrompt": "",
"datasetProcess": true,
"usedInClassify": true,
@@ -147,7 +147,7 @@
"maxTemperature": 1.2,
"vision": true,
"toolChoice": true,
"functionCall": false,
"functionCall": true,
"defaultSystemChatPrompt": "",
"datasetProcess": true,
"usedInClassify": true,

View File

@@ -12,6 +12,7 @@ import {
} from '@fastgpt/global/core/ai/model.d';
import { debounce } from 'lodash';
import { ModelProviderType } from '@fastgpt/global/core/ai/provider';
import { findModelFromAlldata } from '../model';
/*
TODO: 分优先级读取:
@@ -95,7 +96,7 @@ export const loadSystemModels = async (init = false) => {
const modelData: any = {
...fileModel,
...dbModel?.metadata,
provider: fileContent.provider,
provider: dbModel?.metadata?.provider || fileContent.provider,
type: dbModel?.metadata?.type || fileModel.type,
isCustom: false
};
@@ -140,6 +141,28 @@ export const loadSystemModels = async (init = false) => {
}
};
export const getSystemModelConfig = async (model: string): Promise<SystemModelItemType> => {
const modelData = findModelFromAlldata(model);
if (!modelData) return Promise.reject('Model is not found');
if (modelData.isCustom) return Promise.reject('Custom model not data');
// Read file
const fileContent = (await import(`./provider/${modelData.provider}`))?.default as {
provider: ModelProviderType;
list: SystemModelItemType[];
};
const config = fileContent.list.find((item) => item.model === model);
if (!config) return Promise.reject('Model config is not found');
return {
...config,
provider: modelData.provider,
isCustom: false
};
};
export const watchSystemModelUpdate = () => {
const changeStream = MongoSystemModel.watch();

View File

@@ -2,6 +2,7 @@ import { addLog } from '../../../common/system/log';
import { POST } from '../../../common/api/serverRequest';
import { getDefaultRerankModel } from '../model';
import { getAxiosConfig } from '../config';
import { ReRankModelItemType } from '@fastgpt/global/core/ai/model.d';
type PostReRankResponse = {
id: string;
@@ -13,15 +14,15 @@ type PostReRankResponse = {
type ReRankCallResult = { id: string; score?: number }[];
export function reRankRecall({
model = getDefaultRerankModel(),
query,
documents
}: {
model?: ReRankModelItemType;
query: string;
documents: { id: string; text: string }[];
}): Promise<ReRankCallResult> {
const model = getDefaultRerankModel();
if (!model || !model?.requestUrl) {
if (!model) {
return Promise.reject('no rerank model');
}
@@ -29,7 +30,7 @@ export function reRankRecall({
let start = Date.now();
return POST<PostReRankResponse>(
model.requestUrl ? model.requestUrl : `${baseUrl}/v1/rerank`,
model.requestUrl ? model.requestUrl : `${baseUrl}/rerank`,
{
model: model.model,
query,
@@ -57,6 +58,6 @@ export function reRankRecall({
.catch((err) => {
addLog.error('rerank error', err);
return [];
return Promise.reject(err);
});
}

View File

@@ -26,11 +26,12 @@ const MyNumberInput = (props: Props) => {
{...restProps}
onChange={(e) => {
if (!onChange) return;
if (e === '') {
const numE = Number(e);
if (isNaN(numE)) {
// @ts-ignore
onChange('');
} else {
onChange(Number(e));
onChange(numE);
}
}}
>

View File

@@ -58,7 +58,6 @@ export const MultipleRowSelect = ({
const selectedValue = cloneValue[index];
const selectedIndex = list.findIndex((item) => item.value === selectedValue);
const children = list[selectedIndex]?.children || [];
const hasChildren = list.some((item) => item.children && item.children?.length > 0);
// Store current scroll position before update
const currentScrollTop = MenuRef.current[index]?.scrollTop;
@@ -84,54 +83,58 @@ export const MultipleRowSelect = ({
overflowY={'auto'}
whiteSpace={'nowrap'}
>
{list.map((item) => (
<Flex
key={item.value}
ref={(ref) => {
if (item.value === selectedValue) {
SelectedItemRef.current[index] = ref;
}
}}
py={1.5}
_notLast={{ mb: 1 }}
cursor={'pointer'}
px={1.5}
borderRadius={'sm'}
_hover={{
bg: 'primary.50'
}}
onClick={() => {
const newValue = [...cloneValue];
{list.map((item) => {
const hasChildren = item.children && item.children.length > 0;
if (item.value === selectedValue) {
for (let i = index; i < newValue.length; i++) {
newValue[i] = undefined;
return (
<Flex
key={item.value}
ref={(ref) => {
if (item.value === selectedValue) {
SelectedItemRef.current[index] = ref;
}
setCloneValue(newValue);
onSelect(newValue);
} else {
newValue[index] = item.value;
setCloneValue(newValue);
}}
py={1.5}
_notLast={{ mb: 1 }}
cursor={'pointer'}
px={1.5}
borderRadius={'sm'}
_hover={{
bg: 'primary.50'
}}
onClick={() => {
const newValue = [...cloneValue];
if (changeOnEverySelect || !hasChildren) {
if (item.value === selectedValue) {
for (let i = index; i < newValue.length; i++) {
newValue[i] = undefined;
}
setCloneValue(newValue);
onSelect(newValue);
}
} else {
newValue[index] = item.value;
setCloneValue(newValue);
if (!hasChildren) {
onClose();
if (changeOnEverySelect || !hasChildren) {
onSelect(newValue);
}
if (!hasChildren) {
onClose();
}
}
}
}}
{...(item.value === selectedValue
? {
bg: 'primary.50',
color: 'primary.600'
}
: {})}
>
{item.label}
</Flex>
))}
}}
{...(item.value === selectedValue
? {
bg: 'primary.50',
color: 'primary.600'
}
: {})}
>
{item.label}
</Flex>
);
})}
{list.length === 0 && (
<EmptyTip
text={emptyTip ?? t('common:common.MultipleRowSelect.No data')}

View File

@@ -70,6 +70,7 @@
"personal_information": "Personal",
"personalization": "Personalization",
"promotion_records": "Promotions",
"reset_default": "Restore the default configuration",
"team": "Team",
"third_party": "Third Party",
"usage_records": "Usage"

View File

@@ -146,6 +146,9 @@
"transition_to_workflow": "Convert to Workflow",
"transition_to_workflow_create_new_placeholder": "Create a new app instead of modifying the current app",
"transition_to_workflow_create_new_tip": "Once converted to a workflow, it cannot be reverted to simple mode. Please confirm!",
"tts_ai_model": "Use a speech synthesis model",
"tts_browser": "Browser's own (free)",
"tts_close": "Close",
"type.All": "All",
"type.Create http plugin tip": "Batch create plugins through OpenAPI Schema, compatible with GPTs format.",
"type.Create one plugin tip": "Customizable input and output workflows, usually used to encapsulate reusable workflows.",

View File

@@ -375,12 +375,10 @@
"core.app.tool_label.github": "GitHub Address",
"core.app.tool_label.price": "Pricing",
"core.app.tool_label.view_doc": "View Documentation",
"core.app.tts.Close": "Do Not Use",
"core.app.tts.Speech model": "Speech Model",
"core.app.tts.Speech speed": "Speech Speed",
"core.app.tts.Test Listen": "Test Listen",
"core.app.tts.Test Listen Text": "Hello, this is a voice test. If you can hear this sentence, the voice playback function is normal.",
"core.app.tts.Web": "Browser Built-in (Free)",
"core.app.whisper.Auto send": "Auto Send",
"core.app.whisper.Auto send tip": "Automatically send after voice input is completed, no need to click the send button manually",
"core.app.whisper.Auto tts response": "Auto Voice Response",
@@ -922,10 +920,10 @@
"model.name": "Model name",
"model.provider": "Provider",
"model.search_name_placeholder": "Search by model name",
"model.type.chat": "language model",
"model.type.chat": "LLM",
"model.type.embedding": "Embedding",
"model.type.reRank": "ReRank",
"model.type.stt": "speech recognition",
"model.type.stt": "STT",
"model.type.tts": "TTS",
"model_alicloud": "Ali Cloud",
"model_baai": "BAAI",

View File

@@ -71,6 +71,7 @@
"personal_information": "个人信息",
"personalization": "个性化",
"promotion_records": "促销记录",
"reset_default": "恢复默认配置",
"team": "团队管理",
"third_party": "第三方账号",
"usage_records": "使用记录"

View File

@@ -146,6 +146,9 @@
"transition_to_workflow": "转成工作流",
"transition_to_workflow_create_new_placeholder": "创建一个新的应用,而不是修改当前应用",
"transition_to_workflow_create_new_tip": "转化成工作流后,将无法转化回简易模式,请确认!",
"tts_ai_model": "使用语音合成模型",
"tts_browser": "浏览器自带(免费)",
"tts_close": "关闭",
"type.All": "全部",
"type.Create http plugin tip": "通过 OpenAPI Schema 批量创建插件,兼容 GPTs 格式",
"type.Create one plugin tip": "可以自定义输入和输出的工作流,通常用于封装重复使用的工作流",

View File

@@ -378,12 +378,10 @@
"core.app.tool_label.github": "GitHub地址",
"core.app.tool_label.price": "计费说明",
"core.app.tool_label.view_doc": "查看说明文档",
"core.app.tts.Close": "不使用",
"core.app.tts.Speech model": "语音模型",
"core.app.tts.Speech speed": "语速",
"core.app.tts.Test Listen": "试听",
"core.app.tts.Test Listen Text": "你好,这是语音测试,如果你能听到这句话,说明语音播放功能正常",
"core.app.tts.Web": "浏览器自带(免费)",
"core.app.whisper.Auto send": "自动发送",
"core.app.whisper.Auto send tip": "语音输入完毕后直接发送,不需要再手动点击发送按键",
"core.app.whisper.Auto tts response": "自动语音回复",

View File

@@ -69,6 +69,7 @@
"personal_information": "個人資訊",
"personalization": "個人化",
"promotion_records": "促銷記錄",
"reset_default": "恢復默認配置",
"team": "團隊管理",
"third_party": "第三方账号",
"usage_records": "使用記錄"

View File

@@ -146,6 +146,9 @@
"transition_to_workflow": "轉換成工作流程",
"transition_to_workflow_create_new_placeholder": "建立新的應用程式,而不是修改目前應用程式",
"transition_to_workflow_create_new_tip": "轉換成工作流程後,將無法轉換回簡易模式,請確認!",
"tts_ai_model": "使用語音合成模型",
"tts_browser": "瀏覽器自帶(免費)",
"tts_close": "關閉",
"type.All": "全部",
"type.Create http plugin tip": "透過 OpenAPI Schema 批次建立外掛,相容 GPTs 格式",
"type.Create one plugin tip": "可以自訂輸入和輸出的工作流程,通常用於封裝重複使用的工作流程",

View File

@@ -374,12 +374,10 @@
"core.app.tool_label.github": "GitHub 網址",
"core.app.tool_label.price": "收費說明",
"core.app.tool_label.view_doc": "檢視說明文件",
"core.app.tts.Close": "不使用",
"core.app.tts.Speech model": "語音模型",
"core.app.tts.Speech speed": "語速",
"core.app.tts.Test Listen": "測試聆聽",
"core.app.tts.Test Listen Text": "您好,這是語音測試。如果您能聽到這句話,表示語音播放功能正常",
"core.app.tts.Web": "瀏覽器內建(免費)",
"core.app.whisper.Auto send": "自動傳送",
"core.app.whisper.Auto send tip": "語音輸入完成後自動傳送,無需手動點選傳送按鈕",
"core.app.whisper.Auto tts response": "自動語音回應",