Compare commits

..

2 Commits

Author SHA1 Message Date
Archer
874300a56a fix: chinese name export (#4890)
* fix: chinese name export

* fix: xlsx white space

* doc

* doc
2025-05-25 21:19:29 +08:00
Archer
1dea2b71b4 perf: human check;perf: recursion get node response (#4888)
* perf: human check

* version

* perf: recursion get node response
2025-05-25 20:55:29 +08:00
11 changed files with 36 additions and 117 deletions

View File

@@ -22,10 +22,13 @@ weight: 790
3. 纠正原先知识库的“表格数据集”名称,改成“备份导入”。同时支持知识库索引的导出和导入。
4. 工作流知识库引用上限,如果工作流中没有相关 AI 节点,则交互模式改成纯手动输入,并且上限为 1000万。
5. 语音输入,移动端判断逻辑,准确判断是否为手机,而不是小屏。
6. 优化上下文截取算法,至少保证留下一组 Human 信息。
## 🐛 修复
1. 全文检索多知识库时排序得分排序不正确。
2. 流响应捕获 finish_reason 可能不正确。
3. 工具调用模式,未保存思考输出。
4. 知识库 indexSize 参数未生效。
4. 知识库 indexSize 参数未生效。
5. 工作流嵌套 2 层后,获取预览引用、上下文不正确。
6. xlsx 转成 Markdown 时候,前面会多出一个空格。

View File

@@ -28,7 +28,6 @@ FastGPT 商业版是基于 FastGPT 开源版的增强版本,增加了一些独
| 应用发布安全配置 | ❌ | ✅ | ✅ |
| 内容审核 | ❌ | ✅ | ✅ |
| web站点同步 | ❌ | ✅ | ✅ |
| 主流文档库接入(目前支持:语雀、飞书) | ❌ | ✅ | ✅ |
| 增强训练模式 | ❌ | ✅ | ✅ |
| 第三方应用快速接入(飞书、公众号) | ❌ | ✅ | ✅ |
| 管理后台 | ❌ | ✅ | 不需要 |

View File

@@ -65,8 +65,8 @@ export const filterGPTMessageByMaxContext = async ({
if (lastMessage.role === ChatCompletionRequestMessageRoleEnum.User) {
const tokens = await countGptMessagesTokens([lastMessage, ...tmpChats]);
maxContext -= tokens;
// 该轮信息整体 tokens 超出范围,这段数据不要了
if (maxContext < 0) {
// 该轮信息整体 tokens 超出范围,这段数据不要了。但是至少保证一组。
if (maxContext < 0 && chats.length > 0) {
break;
}

View File

@@ -28,11 +28,11 @@ export const readXlsxRawText = async ({
if (!header) return;
const formatText = `| ${header.join(' | ')} |
| ${header.map(() => '---').join(' | ')} |
${csvArr
.slice(1)
.map((row) => `| ${row.map((item) => item.replace(/\n/g, '\\n')).join(' | ')} |`)
.join('\n')}`;
| ${header.map(() => '---').join(' | ')} |
${csvArr
.slice(1)
.map((row) => `| ${row.map((item) => item.replace(/\n/g, '\\n')).join(' | ')} |`)
.join('\n')}`;
return formatText;
})

View File

@@ -1,6 +1,6 @@
{
"name": "app",
"version": "4.9.9",
"version": "4.9.10",
"private": false,
"scripts": {
"dev": "next dev",

View File

@@ -7,6 +7,7 @@ import { type ChatHistoryItemResType } from '@fastgpt/global/core/chat/type';
import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
import { useTranslation } from 'next-i18next';
import { getFlatAppResponses } from '@/global/core/chat/utils';
const isLLMNode = (item: ChatHistoryItemResType) =>
item.moduleType === FlowNodeTypeEnum.chatNode || item.moduleType === FlowNodeTypeEnum.tools;
@@ -16,17 +17,7 @@ const ContextModal = ({ onClose, dataId }: { onClose: () => void; dataId: string
const { loading: isLoading, data: contextModalData } = useRequest2(
() =>
getHistoryResponseData({ dataId }).then((res) => {
const flatResData: ChatHistoryItemResType[] =
res
?.map((item) => {
return [
item,
...(item.pluginDetail || []),
...(item.toolDetail || []),
...(item.loopDetail || [])
];
})
.flat() || [];
const flatResData = getFlatAppResponses(res || []);
return flatResData.find(isLLMNode)?.historyPreview || [];
}),
{ manual: false }

View File

@@ -19,23 +19,25 @@ export function transformPreviewHistories(
});
}
export const getFlatAppResponses = (res: ChatHistoryItemResType[]): ChatHistoryItemResType[] => {
return res
.map((item) => {
return [
item,
...getFlatAppResponses(item.pluginDetail || []),
...getFlatAppResponses(item.toolDetail || []),
...getFlatAppResponses(item.loopDetail || [])
];
})
.flat();
};
export function addStatisticalDataToHistoryItem(historyItem: ChatItemType) {
if (historyItem.obj !== ChatRoleEnum.AI) return historyItem;
if (historyItem.totalQuoteList !== undefined) return historyItem;
if (!historyItem.responseData) return historyItem;
// Flat children
const flatResData: ChatHistoryItemResType[] =
historyItem.responseData
?.map((item) => {
return [
item,
...(item.pluginDetail || []),
...(item.toolDetail || []),
...(item.loopDetail || [])
];
})
.flat() || [];
const flatResData = getFlatAppResponses(historyItem.responseData || []);
return {
...historyItem,

View File

@@ -48,7 +48,7 @@ async function handler(req: ApiRequestProps<backupBody, backupQuery>, res: ApiRe
encoding: file.encoding,
getFormatText: false
});
if (!rawText.startsWith('q,a,indexes')) {
if (!rawText.trim().startsWith('q,a,indexes')) {
return Promise.reject('Backup file start with "q,a,indexes"');
}
// 2. delete tmp file

View File

@@ -50,7 +50,10 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
});
res.setHeader('Content-Type', 'text/csv; charset=utf-8;');
res.setHeader('Content-Disposition', `attachment; filename=${dataset.name}-backup.csv;`);
res.setHeader(
'Content-Disposition',
`attachment; filename=${encodeURIComponent(dataset.name)}-backup.csv;`
);
const cursor = MongoDatasetData.find<DataItemType>(
{

View File

@@ -1,8 +1,4 @@
import {
type AIChatItemType,
type ChatHistoryItemResType,
type ChatSchema
} from '@fastgpt/global/core/chat/type';
import { type ChatHistoryItemResType, type ChatSchema } from '@fastgpt/global/core/chat/type';
import { MongoChat } from '@fastgpt/service/core/chat/chatSchema';
import { type AuthModeType } from '@fastgpt/service/support/permission/type';
import { authOutLink } from './outLink';
@@ -12,6 +8,7 @@ import { AuthUserTypeEnum, ReadPermissionVal } from '@fastgpt/global/support/per
import { authApp } from '@fastgpt/service/support/permission/app/auth';
import { MongoChatItem } from '@fastgpt/service/core/chat/chatItemSchema';
import { DatasetErrEnum } from '@fastgpt/global/common/error/code/dataset';
import { getFlatAppResponses } from '@/global/core/chat/utils';
/*
检查chat的权限
@@ -221,18 +218,7 @@ export const authCollectionInChat = async ({
if (!chatItem) return Promise.reject(DatasetErrEnum.unAuthDatasetCollection);
// 找 responseData 里,是否有该文档 id
const responseData = chatItem.responseData || [];
const flatResData: ChatHistoryItemResType[] =
responseData
?.map((item) => {
return [
item,
...(item.pluginDetail || []),
...(item.toolDetail || []),
...(item.loopDetail || [])
];
})
.flat() || [];
const flatResData = getFlatAppResponses(chatItem.responseData || []);
const quoteListSet = new Set(
flatResData

View File

@@ -4,8 +4,7 @@ import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
import type { ChatItemType } from '@fastgpt/global/core/chat/type';
import {
transformPreviewHistories,
addStatisticalDataToHistoryItem,
getFlatAppResponses
addStatisticalDataToHistoryItem
} from '@/global/core/chat/utils';
const mockResponseData = {
@@ -15,70 +14,6 @@ const mockResponseData = {
moduleType: FlowNodeTypeEnum.chatNode
};
describe('getFlatAppResponses', () => {
it('should return empty array for empty input', () => {
expect(getFlatAppResponses([])).toEqual([]);
});
it('should handle single level responses', () => {
const responses = [
{ ...mockResponseData, moduleType: FlowNodeTypeEnum.chatNode },
{ ...mockResponseData, moduleType: FlowNodeTypeEnum.tools }
];
expect(getFlatAppResponses(responses)).toEqual(responses);
});
it('should handle nested pluginDetail', () => {
const responses = [
{
...mockResponseData,
pluginDetail: [{ ...mockResponseData, moduleType: FlowNodeTypeEnum.tools }]
}
];
expect(getFlatAppResponses(responses)).toHaveLength(2);
});
it('should handle nested toolDetail', () => {
const responses = [
{
...mockResponseData,
toolDetail: [{ ...mockResponseData, moduleType: FlowNodeTypeEnum.chatNode }]
}
];
expect(getFlatAppResponses(responses)).toHaveLength(2);
});
it('should handle nested loopDetail', () => {
const responses = [
{
...mockResponseData,
loopDetail: [{ ...mockResponseData, moduleType: FlowNodeTypeEnum.datasetSearchNode }]
}
];
expect(getFlatAppResponses(responses)).toHaveLength(2);
});
it('should handle multiple levels of nesting', () => {
const responses = [
{
...mockResponseData,
pluginDetail: [
{
...mockResponseData,
toolDetail: [
{
...mockResponseData,
loopDetail: [{ ...mockResponseData }]
}
]
}
]
}
];
expect(getFlatAppResponses(responses)).toHaveLength(4);
});
});
describe('transformPreviewHistories', () => {
it('should transform histories correctly with responseDetail=true', () => {
const histories: ChatItemType[] = [