update doc ;perf: model test (#4098)

* perf: extract array

* update doc

* perf: model test

* perf: model test
This commit is contained in:
Archer
2025-03-11 14:56:13 +08:00
committed by archer
parent bcd0b010a6
commit a3df9ea531
21 changed files with 144 additions and 79 deletions

View File

@@ -6,10 +6,12 @@ import { getSTTModel } from '../model';
export const aiTranscriptions = async ({
model,
fileStream
fileStream,
headers
}: {
model: string;
fileStream: fs.ReadStream;
headers?: Record<string, string>;
}) => {
const data = new FormData();
data.append('model', model);
@@ -30,7 +32,8 @@ export const aiTranscriptions = async ({
Authorization: modelData.requestAuth
? `Bearer ${modelData.requestAuth}`
: aiAxiosConfig.authorization,
...data.getHeaders()
...data.getHeaders(),
...headers
},
data: data
});

View File

@@ -76,7 +76,7 @@
{
"model": "qwen-max",
"name": "Qwen-max",
"maxContext": 8000,
"maxContext": 32000,
"maxResponse": 4000,
"quoteMaxToken": 6000,
"maxTemperature": 1,

View File

@@ -8,10 +8,11 @@ type GetVectorProps = {
model: EmbeddingModelItemType;
input: string;
type?: `${EmbeddingTypeEnm}`;
headers?: Record<string, string>;
};
// text to vector
export async function getVectorsByText({ model, input, type }: GetVectorProps) {
export async function getVectorsByText({ model, input, type, headers }: GetVectorProps) {
if (!input) {
return Promise.reject({
code: 500,
@@ -37,9 +38,10 @@ export async function getVectorsByText({ model, input, type }: GetVectorProps) {
path: model.requestUrl,
headers: model.requestAuth
? {
Authorization: `Bearer ${model.requestAuth}`
Authorization: `Bearer ${model.requestAuth}`,
...headers
}
: undefined
: headers
}
: {}
)

View File

@@ -16,11 +16,13 @@ type ReRankCallResult = { id: string; score?: number }[];
export function reRankRecall({
model = getDefaultRerankModel(),
query,
documents
documents,
headers
}: {
model?: ReRankModelItemType;
query: string;
documents: { id: string; text: string }[];
headers?: Record<string, string>;
}): Promise<ReRankCallResult> {
if (!model) {
return Promise.reject('no rerank model');
@@ -41,7 +43,8 @@ export function reRankRecall({
},
{
headers: {
Authorization: model.requestAuth ? `Bearer ${model.requestAuth}` : authorization
Authorization: model.requestAuth ? `Bearer ${model.requestAuth}` : authorization,
...headers
},
timeout: 30000
}

View File

@@ -12,12 +12,12 @@ export async function listAppDatasetDataByTeamIdAndDatasetIds({
datasetIdList: string[];
}) {
const myDatasets = await MongoDataset.find({
teamId,
_id: { $in: datasetIdList }
_id: { $in: datasetIdList },
...(teamId && { teamId })
}).lean();
return myDatasets.map((item) => ({
datasetId: item._id,
datasetId: String(item._id),
avatar: item.avatar,
name: item.name,
vectorModel: getEmbeddingModel(item.vectorModel)
@@ -47,7 +47,7 @@ export async function rewriteAppWorkflowToDetail({
const datasetIds = Array.isArray(rawValue)
? rawValue.map((v) => v?.datasetId).filter((id) => !!id && typeof id === 'string')
: rawValue.datasetId
: rawValue?.datasetId
? [String(rawValue.datasetId)]
: [];
@@ -61,38 +61,63 @@ export async function rewriteAppWorkflowToDetail({
teamId: isRoot ? undefined : teamId,
datasetIdList: Array.from(datasetIdSet)
});
const datasetMap = new Map(datasetList.map((ds) => [String(ds.datasetId), ds]));
// Rewrite dataset ids, add dataset info to nodes
nodes.forEach((node) => {
if (node.flowNodeType !== FlowNodeTypeEnum.datasetSearchNode) return;
if (datasetList.length > 0) {
nodes.forEach((node) => {
if (node.flowNodeType !== FlowNodeTypeEnum.datasetSearchNode) return;
node.inputs.forEach((item) => {
if (item.key !== NodeInputKeyEnum.datasetSelectList) return;
node.inputs.forEach((item) => {
if (item.key !== NodeInputKeyEnum.datasetSelectList) return;
const val = item.value as undefined | { datasetId: string }[] | { datasetId: string };
const val = item.value as undefined | { datasetId: string }[] | { datasetId: string };
if (Array.isArray(val)) {
item.value = val.map((v) => {
const data = datasetMap.get(String(v.datasetId))!;
return {
datasetId: data.datasetId,
avatar: data.avatar,
name: data.name,
vectorModel: data.vectorModel
};
});
} else if (typeof val === 'object' && val !== null) {
const data = datasetMap.get(String(val.datasetId))!;
item.value = {
datasetId: data.datasetId,
avatar: data.avatar,
name: data.name,
vectorModel: data.vectorModel
};
}
if (Array.isArray(val)) {
item.value = val
.map((v) => {
const data = datasetMap.get(String(v.datasetId));
if (!data)
return {
datasetId: v.datasetId,
avatar: '',
name: 'Dataset not found',
vectorModel: ''
};
return {
datasetId: data.datasetId,
avatar: data.avatar,
name: data.name,
vectorModel: data.vectorModel
};
})
.filter(Boolean);
} else if (typeof val === 'object' && val !== null) {
const data = datasetMap.get(String(val.datasetId));
if (!data) {
item.value = [
{
datasetId: val.datasetId,
avatar: '',
name: 'Dataset not found',
vectorModel: ''
}
];
} else {
item.value = [
{
datasetId: data.datasetId,
avatar: data.avatar,
name: data.name,
vectorModel: data.vectorModel
}
];
}
}
});
});
});
}
return nodes;
}

View File

@@ -202,7 +202,7 @@ ${description ? `- ${description}` : ''}
properties[item.key] = {
...jsonSchema,
description: item.desc,
...(item.enum ? { enum: item.enum.split('\n') } : {})
...(item.enum ? { enum: item.enum.split('\n').filter(Boolean) } : {})
};
});
// function body

View File

@@ -21,7 +21,7 @@ import {
FlowNodeInputTypeEnum,
FlowNodeTypeEnum
} from '@fastgpt/global/core/workflow/node/constant';
import { getNanoid, replaceVariable } from '@fastgpt/global/common/string/tools';
import { getNanoid } from '@fastgpt/global/common/string/tools';
import { getSystemTime } from '@fastgpt/global/common/time/timezone';
import { dispatchWorkflowStart } from './init/workflowStart';
@@ -426,6 +426,14 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
})();
if (!nodeRunResult) return [];
if (res?.closed) {
addLog.warn('Request is closed', {
appId: props.runningAppInfo.id,
nodeId: node.nodeId,
nodeName: node.name
});
return [];
}
/*
特殊情况: