Compare commits
12 Commits
test-html
...
v4.9.11-de
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4b8dfeef12 | ||
|
|
98b00ae86d | ||
|
|
c1f8d5b032 | ||
|
|
4adb8b7e6f | ||
|
|
e32ca8a3e9 | ||
|
|
2507997d20 | ||
|
|
86f5a68d8c | ||
|
|
92c38d9d2f | ||
|
|
9fb5d05865 | ||
|
|
b974574157 | ||
|
|
5a5367d30b | ||
|
|
8ed35ffe7e |
232
dev.md
@@ -1,114 +1,118 @@
|
||||
## Premise
|
||||
|
||||
Since FastGPT is managed in the same way as monorepo, it is recommended to install ‘make’ first during development.
|
||||
|
||||
monorepo Project Name:
|
||||
|
||||
- app: main project
|
||||
-......
|
||||
|
||||
## Dev
|
||||
|
||||
```sh
|
||||
# Give automatic script code execution permission (on non-Linux systems, you can manually execute the postinstall.sh file content)
|
||||
chmod -R +x ./scripts/
|
||||
# Executing under the code root directory installs all dependencies within the root package, projects, and packages
|
||||
pnpm i
|
||||
|
||||
# Not make cmd
|
||||
cd projects/app
|
||||
pnpm dev
|
||||
|
||||
# Make cmd
|
||||
make dev name=app
|
||||
```
|
||||
|
||||
Note: If the Node version is >= 20, you need to pass the `--no-node-snapshot` parameter to Node when running `pnpm i`
|
||||
|
||||
```sh
|
||||
NODE_OPTIONS=--no-node-snapshot pnpm i
|
||||
```
|
||||
|
||||
### Jest
|
||||
|
||||
https://fael3z0zfze.feishu.cn/docx/ZOI1dABpxoGhS7xzhkXcKPxZnDL
|
||||
|
||||
## I18N
|
||||
|
||||
### Install i18n-ally Plugin
|
||||
|
||||
1. Open the Extensions Marketplace in VSCode, search for and install the `i18n Ally` plugin.
|
||||
|
||||
### Code Optimization Examples
|
||||
|
||||
#### Fetch Specific Namespace Translations in `getServerSideProps`
|
||||
|
||||
```typescript
|
||||
// pages/yourPage.tsx
|
||||
export async function getServerSideProps(context: any) {
|
||||
return {
|
||||
props: {
|
||||
currentTab: context?.query?.currentTab || TabEnum.info,
|
||||
...(await serverSideTranslations(context.locale, ['publish', 'user']))
|
||||
}
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
#### Use useTranslation Hook in Page
|
||||
|
||||
```typescript
|
||||
// pages/yourPage.tsx
|
||||
import { useTranslation } from 'next-i18next';
|
||||
|
||||
const YourComponent = () => {
|
||||
const { t } = useTranslation();
|
||||
|
||||
return (
|
||||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
mr={2}
|
||||
onClick={() => setShowSelected(false)}
|
||||
>
|
||||
{t('common:close')}
|
||||
</Button>
|
||||
);
|
||||
};
|
||||
|
||||
export default YourComponent;
|
||||
```
|
||||
|
||||
#### Handle Static File Translations
|
||||
|
||||
```typescript
|
||||
// utils/i18n.ts
|
||||
import { i18nT } from '@fastgpt/web/i18n/utils';
|
||||
|
||||
const staticContent = {
|
||||
id: 'simpleChat',
|
||||
avatar: 'core/workflow/template/aiChat',
|
||||
name: i18nT('app:template.simple_robot'),
|
||||
};
|
||||
|
||||
export default staticContent;
|
||||
```
|
||||
|
||||
### Standardize Translation Format
|
||||
|
||||
- Use the t(namespace:key) format to ensure consistent naming.
|
||||
- Translation keys should use lowercase letters and underscores, e.g., common.close.
|
||||
|
||||
## Build
|
||||
|
||||
```sh
|
||||
# Docker cmd: Build image, not proxy
|
||||
docker build -f ./projects/app/Dockerfile -t registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.8.1 . --build-arg name=app
|
||||
# Make cmd: Build image, not proxy
|
||||
make build name=app image=registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.8.1
|
||||
|
||||
# Docker cmd: Build image with proxy
|
||||
docker build -f ./projects/app/Dockerfile -t registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.8.1 . --build-arg name=app --build-arg proxy=taobao
|
||||
# Make cmd: Build image with proxy
|
||||
make build name=app image=registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.8.1 proxy=taobao
|
||||
```
|
||||
## Premise
|
||||
|
||||
Since FastGPT is managed in the same way as monorepo, it is recommended to install ‘make’ first during development.
|
||||
|
||||
monorepo Project Name:
|
||||
|
||||
- app: main project
|
||||
-......
|
||||
|
||||
## Dev
|
||||
|
||||
```sh
|
||||
# Give automatic script code execution permission (on non-Linux systems, you can manually execute the postinstall.sh file content)
|
||||
chmod -R +x ./scripts/
|
||||
# Executing under the code root directory installs all dependencies within the root package, projects, and packages
|
||||
pnpm i
|
||||
|
||||
# Not make cmd
|
||||
cd projects/app
|
||||
pnpm dev
|
||||
|
||||
# Make cmd
|
||||
make dev name=app
|
||||
```
|
||||
|
||||
Note: If the Node version is >= 20, you need to pass the `--no-node-snapshot` parameter to Node when running `pnpm i`
|
||||
|
||||
```sh
|
||||
NODE_OPTIONS=--no-node-snapshot pnpm i
|
||||
```
|
||||
|
||||
### Jest
|
||||
|
||||
https://fael3z0zfze.feishu.cn/docx/ZOI1dABpxoGhS7xzhkXcKPxZnDL
|
||||
|
||||
## I18N
|
||||
|
||||
### Install i18n-ally Plugin
|
||||
|
||||
1. Open the Extensions Marketplace in VSCode, search for and install the `i18n Ally` plugin.
|
||||
|
||||
### Code Optimization Examples
|
||||
|
||||
#### Fetch Specific Namespace Translations in `getServerSideProps`
|
||||
|
||||
```typescript
|
||||
// pages/yourPage.tsx
|
||||
export async function getServerSideProps(context: any) {
|
||||
return {
|
||||
props: {
|
||||
currentTab: context?.query?.currentTab || TabEnum.info,
|
||||
...(await serverSideTranslations(context.locale, ['publish', 'user']))
|
||||
}
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
#### Use useTranslation Hook in Page
|
||||
|
||||
```typescript
|
||||
// pages/yourPage.tsx
|
||||
import { useTranslation } from 'next-i18next';
|
||||
|
||||
const YourComponent = () => {
|
||||
const { t } = useTranslation();
|
||||
|
||||
return (
|
||||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
mr={2}
|
||||
onClick={() => setShowSelected(false)}
|
||||
>
|
||||
{t('common:close')}
|
||||
</Button>
|
||||
);
|
||||
};
|
||||
|
||||
export default YourComponent;
|
||||
```
|
||||
|
||||
#### Handle Static File Translations
|
||||
|
||||
```typescript
|
||||
// utils/i18n.ts
|
||||
import { i18nT } from '@fastgpt/web/i18n/utils';
|
||||
|
||||
const staticContent = {
|
||||
id: 'simpleChat',
|
||||
avatar: 'core/workflow/template/aiChat',
|
||||
name: i18nT('app:template.simple_robot'),
|
||||
};
|
||||
|
||||
export default staticContent;
|
||||
```
|
||||
|
||||
### Standardize Translation Format
|
||||
|
||||
- Use the t(namespace:key) format to ensure consistent naming.
|
||||
- Translation keys should use lowercase letters and underscores, e.g., common.close.
|
||||
|
||||
## audit
|
||||
|
||||
Please fill the OperationLogEventEnum and operationLog/audit function is added to the ts, and on the corresponding position to fill i18n, at the same time to add the location of the log using addOpearationLog function add function
|
||||
|
||||
## Build
|
||||
|
||||
```sh
|
||||
# Docker cmd: Build image, not proxy
|
||||
docker build -f ./projects/app/Dockerfile -t registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.8.1 . --build-arg name=app
|
||||
# Make cmd: Build image, not proxy
|
||||
make build name=app image=registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.8.1
|
||||
|
||||
# Docker cmd: Build image with proxy
|
||||
docker build -f ./projects/app/Dockerfile -t registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.8.1 . --build-arg name=app --build-arg proxy=taobao
|
||||
# Make cmd: Build image with proxy
|
||||
make build name=app image=registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.8.1 proxy=taobao
|
||||
```
|
||||
|
||||
BIN
docSite/assets/imgs/thirddataset-1.png
Normal file
|
After Width: | Height: | Size: 42 KiB |
BIN
docSite/assets/imgs/thirddataset-10.png
Normal file
|
After Width: | Height: | Size: 6.0 KiB |
BIN
docSite/assets/imgs/thirddataset-12.png
Normal file
|
After Width: | Height: | Size: 64 KiB |
BIN
docSite/assets/imgs/thirddataset-13.png
Normal file
|
After Width: | Height: | Size: 73 KiB |
BIN
docSite/assets/imgs/thirddataset-14.png
Normal file
|
After Width: | Height: | Size: 62 KiB |
BIN
docSite/assets/imgs/thirddataset-15.png
Normal file
|
After Width: | Height: | Size: 26 KiB |
BIN
docSite/assets/imgs/thirddataset-16.png
Normal file
|
After Width: | Height: | Size: 29 KiB |
BIN
docSite/assets/imgs/thirddataset-17.png
Normal file
|
After Width: | Height: | Size: 33 KiB |
BIN
docSite/assets/imgs/thirddataset-18.png
Normal file
|
After Width: | Height: | Size: 49 KiB |
BIN
docSite/assets/imgs/thirddataset-19.png
Normal file
|
After Width: | Height: | Size: 69 KiB |
BIN
docSite/assets/imgs/thirddataset-2.png
Normal file
|
After Width: | Height: | Size: 40 KiB |
BIN
docSite/assets/imgs/thirddataset-20.png
Normal file
|
After Width: | Height: | Size: 40 KiB |
BIN
docSite/assets/imgs/thirddataset-21.png
Normal file
|
After Width: | Height: | Size: 66 KiB |
BIN
docSite/assets/imgs/thirddataset-22.png
Normal file
|
After Width: | Height: | Size: 57 KiB |
BIN
docSite/assets/imgs/thirddataset-23.png
Normal file
|
After Width: | Height: | Size: 78 KiB |
BIN
docSite/assets/imgs/thirddataset-24.png
Normal file
|
After Width: | Height: | Size: 103 KiB |
BIN
docSite/assets/imgs/thirddataset-3.png
Normal file
|
After Width: | Height: | Size: 43 KiB |
BIN
docSite/assets/imgs/thirddataset-4.png
Normal file
|
After Width: | Height: | Size: 41 KiB |
BIN
docSite/assets/imgs/thirddataset-5.png
Normal file
|
After Width: | Height: | Size: 35 KiB |
BIN
docSite/assets/imgs/thirddataset-6.png
Normal file
|
After Width: | Height: | Size: 38 KiB |
BIN
docSite/assets/imgs/thirddataset-7.png
Normal file
|
After Width: | Height: | Size: 28 KiB |
BIN
docSite/assets/imgs/thirddataset-8.png
Normal file
|
After Width: | Height: | Size: 64 KiB |
BIN
docSite/assets/imgs/thirddataset-9.png
Normal file
|
After Width: | Height: | Size: 110 KiB |
@@ -645,7 +645,7 @@ data 为集合的 ID。
|
||||
{{< /tab >}}
|
||||
{{< /tabs >}}
|
||||
|
||||
### 创建一个外部文件库集合(商业版)
|
||||
### 创建一个外部文件库集合(弃用)
|
||||
|
||||
{{< tabs tabTotal="3" >}}
|
||||
{{< tab tabName="请求示例" >}}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
title: 'V4.9.1'
|
||||
title: 'V4.9.1(包含升级脚本)'
|
||||
description: 'FastGPT V4.9.1 更新说明'
|
||||
icon: 'upgrade'
|
||||
draft: false
|
||||
|
||||
@@ -7,11 +7,28 @@ toc: true
|
||||
weight: 789
|
||||
---
|
||||
|
||||
## 执行升级脚本
|
||||
|
||||
该脚本仅需商业版用户执行。
|
||||
|
||||
从任意终端,发起 1 个 HTTP 请求。其中 {{rootkey}} 替换成环境变量里的 `rootkey`;{{host}} 替换成**FastGPT 域名**。
|
||||
|
||||
```bash
|
||||
curl --location --request POST 'https://{{host}}/api/admin/initv4911' \
|
||||
--header 'rootkey: {{rootkey}}' \
|
||||
--header 'Content-Type: application/json'
|
||||
```
|
||||
|
||||
**脚本功能**
|
||||
|
||||
1. 移动第三方知识库 API 配置。
|
||||
|
||||
## 🚀 新增内容
|
||||
|
||||
1. 工作流中增加节点搜索功能。
|
||||
2. 工作流中,子流程版本控制,可选择“保持最新版本”,无需手动更新。
|
||||
1. 商业版支持图片知识库。
|
||||
2. 工作流中增加节点搜索功能。
|
||||
3. 工作流中,子流程版本控制,可选择“保持最新版本”,无需手动更新。
|
||||
4. 增加更多审计操作日志。
|
||||
|
||||
## ⚙️ 优化
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
title: 'V4.9.4'
|
||||
title: 'V4.9.4(包含升级脚本)'
|
||||
description: 'FastGPT V4.9.4 更新说明'
|
||||
icon: 'upgrade'
|
||||
draft: false
|
||||
|
||||
161
docSite/content/zh-cn/docs/guide/knowledge_base/third_dataset.md
Normal file
@@ -0,0 +1,161 @@
|
||||
---
|
||||
title: '第三方知识库开发'
|
||||
description: '本节详细介绍如何在FastGPT上自己接入第三方知识库'
|
||||
icon: 'language'
|
||||
draft: false
|
||||
toc: true
|
||||
weight: 410
|
||||
---
|
||||
|
||||
目前,互联网上拥有各种各样的文档库,例如飞书,语雀等等。 FastGPT 的不同用户可能使用的文档库不同,目前 FastGPT 内置了飞书、语雀文档库,如果需要接入其他文档库,可以参考本节内容。
|
||||
|
||||
|
||||
## 统一的接口规范
|
||||
|
||||
为了实现对不同文档库的统一接入,FastGPT 对第三方文档库进行了接口的规范,共包含 4 个接口内容,可以[查看 API 文件库接口](/docs/guide/knowledge_base/api_datase)。
|
||||
|
||||
所有内置的文档库,都是基于标准的 API 文件库进行扩展。可以参考`FastGPT/packages/service/core/dataset/apiDataset/yuqueDataset/api.ts`中的代码,进行其他文档库的扩展。一共需要完成 4 个接口开发:
|
||||
|
||||
1. 获取文件列表
|
||||
2. 获取文件内容/文件链接
|
||||
3. 获取原文预览地址
|
||||
4. 获取文件详情信息
|
||||
|
||||
## 开始一个第三方文件库
|
||||
|
||||
为了方便讲解,这里以添加飞书知识库为例。
|
||||
|
||||
### 1. 添加第三方文档库参数
|
||||
|
||||
首先,要进入 FastGPT 项目路径下的`FastGPT\packages\global\core\dataset\apiDataset.d.ts`文件,添加第三方文档库 Server 类型。例如,语雀文档中,需要提供`userId`、`token`两个字段作为鉴权信息。
|
||||
|
||||
```ts
|
||||
export type YuqueServer = {
|
||||
userId: string;
|
||||
token?: string;
|
||||
basePath?: string;
|
||||
};
|
||||
```
|
||||
|
||||
{{% alert icon="🤖 " context="success" %}}
|
||||
如果文档库有`根目录`选择的功能,需要设置添加一个字段`basePath`
|
||||
{{% /alert %}}
|
||||
|
||||
### 2. 创建 Hook 文件
|
||||
|
||||
每个第三方文档库都会采用 Hook 的方式来实现一套 API 接口的维护,Hook 里包含 4 个函数需要完成。
|
||||
|
||||
- 在`FastGPT\packages\service\core\dataset\apiDataset\`下创建一个文档库的文件夹,然后在文件夹下创建一个`api.ts`文件
|
||||
- 在`api.ts`文件中,需要完成 4 个函数的定义,分别是:
|
||||
- `listFiles`:获取文件列表
|
||||
- `getFileContent`:获取文件内容/文件链接
|
||||
- `getFileDetail`:获取文件详情信息
|
||||
- `getFilePreviewUrl`:获取原文预览地址
|
||||
|
||||
### 3. 数据库添加配置字段
|
||||
|
||||
- 在`packages/service/core/dataset/schema.ts` 中添加第三方文档库的配置字段,类型统一设置成`Object`。
|
||||
- 在`FastGPT/packages/global/core/dataset/type.d.ts`中添加第三方文档库配置字段的数据类型,类型设置为第一步创建的参数。
|
||||
|
||||

|
||||
|
||||
{{% alert icon="🤖 " context="success" %}}
|
||||
`schema.ts`文件修改后,需要重新启动 FastGPT 项目才会生效。
|
||||
{{% /alert %}}
|
||||
|
||||
### 4. 添加知识库类型
|
||||
|
||||
在`projects/app/src/web/core/dataset/constants.ts`中,添加自己的知识库类型
|
||||
|
||||
```TS
|
||||
export const datasetTypeCourseMap: Record<`${DatasetTypeEnum}`, string> = {
|
||||
[DatasetTypeEnum.folder]: '',
|
||||
[DatasetTypeEnum.dataset]: '',
|
||||
[DatasetTypeEnum.apiDataset]: '/docs/guide/knowledge_base/api_dataset/',
|
||||
[DatasetTypeEnum.websiteDataset]: '/docs/guide/knowledge_base/websync/',
|
||||
[DatasetTypeEnum.feishuShare]: '/docs/guide/knowledge_base/lark_share_dataset/',
|
||||
[DatasetTypeEnum.feishuKnowledge]: '/docs/guide/knowledge_base/lark_knowledge_dataset/',
|
||||
[DatasetTypeEnum.yuque]: '/docs/guide/knowledge_base/yuque_dataset/',
|
||||
[DatasetTypeEnum.externalFile]: ''
|
||||
};
|
||||
```
|
||||
|
||||
{{% alert icon="🤖 " context="success" %}}
|
||||
在 datasetTypeCourseMap 中添加自己的知识库类型,`' '`内是相应的文档说明,如果有的话,可以添加。
|
||||
文档添加在`FastGPT\docSite\content\zh-cn\docs\guide\knowledge_base\`
|
||||
{{% /alert %}}
|
||||
|
||||
## 添加前端
|
||||
|
||||
`FastGPT\packages\web\i18n\zh-CN\dataset.json`,`FastGPT\packages\web\i18n\en\dataset.json`和`FastGPT\packages\web\i18n\zh-Hant\dataset.json`中添加自己的 I18n 翻译,以中文翻译为例,大体需要如下几个内容:
|
||||
|
||||

|
||||
|
||||
`FastGPT\packages\web\components\common\Icon\icons\core\dataset\`添加自己的知识库图标,一共是两个,分为`Outline`和`Color`,分别是有颜色的和无色的,具体看如下图片。
|
||||
|
||||

|
||||
|
||||
|
||||
在`FastGPT\packages\web\components\common\Icon\constants.ts`文件中,添加自己的图标。 `import` 是图标的存放路径。
|
||||
|
||||

|
||||
|
||||
在`FastGPT\packages\global\core\dataset\constants.ts`文件中,添加自己的知识库类型。
|
||||
|
||||

|
||||
|
||||
{{% alert icon="🤖 " context="success" %}}
|
||||
`label`内容是自己之前通过 i18n 翻译添加的知识库名称的
|
||||
`icon`是自己之前添加的 Icon , I18n 的添加看最后清单。
|
||||
{{% /alert %}}
|
||||
|
||||
在`FastGPT\projects\app\src\pages\dataset\list\index.tsx`文件下,添加如下内容。这个文件负责的是知识库列表页的`新建`按钮点击后的菜单,只有在该文件添加知识库后,才能创建知识库。
|
||||
|
||||

|
||||
|
||||
在`FastGPT\projects\app\src\pageComponents\dataset\detail\Info\index.tsx`文件下,添加如下内容。
|
||||
|
||||

|
||||
|
||||
在`FastGPT\projects\app\src\pageComponents\dataset\list\CreateModal.tsx`文件下,添加如下内容。
|
||||
|
||||
| | |
|
||||
| --- | --- |
|
||||
|  |  |
|
||||
|
||||
在`FastGPT\projects\app\src\pageComponents\dataset\list\SideTag.tsx`文件下,添加如下内容。
|
||||
|
||||

|
||||
|
||||
在`FastGPT\projects\app\src\web\core\dataset\context\datasetPageContext.tsx`文件下,添加如下内容。
|
||||
|
||||

|
||||
|
||||
## 添加配置表单
|
||||
|
||||
在`FastGPT\projects\app\src\pageComponents\dataset\ApiDatasetForm.tsx`文件下,添加自己如下内容。这个文件负责的是创建知识库页的字段填写。
|
||||
|
||||
| | | |
|
||||
| --- | --- | --- |
|
||||
|  |  |  |
|
||||
|
||||
代码中添加的两个组件是对根目录选择的渲染,对应设计的 api 的 getfiledetail 方法,如果你的文件不支持,你可以不引用。
|
||||
|
||||
```
|
||||
{renderBaseUrlSelector()} //这是对`Base URL`字段的渲染
|
||||
{renderDirectoryModal()} //点击`选择`后出现的`选择根目录`窗口,见图
|
||||
```
|
||||
|
||||
| | |
|
||||
| --- | --- |
|
||||
|  |  |
|
||||
|
||||
如果知识库需要支持根目录,还需要在`ApiDatasetForm`文件中添加相关内容。
|
||||
|
||||
## 添加杂项
|
||||
|
||||
最后,需要在很多文件里添加`server`类型,这里由于文件过多,且不大,不一一列举文件的清单。只提供方法:使用自己编程工具的全局搜索功能,搜索`YuqueServer`和`yuqueServer`。在搜索到的文件中,逐一添加自己的知识库类型。
|
||||
|
||||
## 提示
|
||||
|
||||
建议知识库创建完成后,完整测试一遍知识库的功能,以确定有无漏洞,如果你的知识库添加有问题,且无法在文档找到对应的文件解决,一定是杂项没有添加完全,建议重复一次全局搜索`YuqueServer`和`yuqueServer`,检查是否有地方没有加上自己的类型。
|
||||
@@ -6,7 +6,8 @@ export const fileImgs = [
|
||||
{ suffix: '(doc|docs)', src: 'file/fill/doc' },
|
||||
{ suffix: 'txt', src: 'file/fill/txt' },
|
||||
{ suffix: 'md', src: 'file/fill/markdown' },
|
||||
{ suffix: 'html', src: 'file/fill/html' }
|
||||
{ suffix: 'html', src: 'file/fill/html' },
|
||||
{ suffix: '(jpg|jpeg|png|gif|bmp|webp|svg|ico|tiff|tif)', src: 'image' }
|
||||
|
||||
// { suffix: '.', src: '/imgs/files/file.svg' }
|
||||
];
|
||||
|
||||
@@ -2,4 +2,5 @@ export type AuthFrequencyLimitProps = {
|
||||
eventId: string;
|
||||
maxAmount: number;
|
||||
expiredTime: Date;
|
||||
num?: number;
|
||||
};
|
||||
|
||||
@@ -34,7 +34,7 @@ export const valToStr = (val: any) => {
|
||||
};
|
||||
|
||||
// replace {{variable}} to value
|
||||
export function replaceVariable(text: any, obj: Record<string, string | number>) {
|
||||
export function replaceVariable(text: any, obj: Record<string, string | number | undefined>) {
|
||||
if (typeof text !== 'string') return text;
|
||||
|
||||
for (const key in obj) {
|
||||
|
||||
24
packages/global/core/dataset/api.d.ts
vendored
@@ -1,4 +1,9 @@
|
||||
import type { ChunkSettingsType, DatasetDataIndexItemType, DatasetSchemaType } from './type';
|
||||
import type {
|
||||
ChunkSettingsType,
|
||||
DatasetDataIndexItemType,
|
||||
DatasetDataFieldType,
|
||||
DatasetSchemaType
|
||||
} from './type';
|
||||
import type {
|
||||
DatasetCollectionTypeEnum,
|
||||
DatasetCollectionDataProcessModeEnum,
|
||||
@@ -7,12 +12,14 @@ import type {
|
||||
ChunkTriggerConfigTypeEnum,
|
||||
ParagraphChunkAIModeEnum
|
||||
} from './constants';
|
||||
import type { LLMModelItemType } from '../ai/model.d';
|
||||
import type { ParentIdType } from 'common/parentFolder/type';
|
||||
import type { ParentIdType } from '../../common/parentFolder/type';
|
||||
|
||||
/* ================= dataset ===================== */
|
||||
export type DatasetUpdateBody = {
|
||||
id: string;
|
||||
|
||||
apiDatasetServer?: DatasetSchemaType['apiDatasetServer'];
|
||||
|
||||
parentId?: ParentIdType;
|
||||
name?: string;
|
||||
avatar?: string;
|
||||
@@ -24,9 +31,6 @@ export type DatasetUpdateBody = {
|
||||
websiteConfig?: DatasetSchemaType['websiteConfig'];
|
||||
externalReadUrl?: DatasetSchemaType['externalReadUrl'];
|
||||
defaultPermission?: DatasetSchemaType['defaultPermission'];
|
||||
apiServer?: DatasetSchemaType['apiServer'];
|
||||
yuqueServer?: DatasetSchemaType['yuqueServer'];
|
||||
feishuServer?: DatasetSchemaType['feishuServer'];
|
||||
chunkSettings?: DatasetSchemaType['chunkSettings'];
|
||||
|
||||
// sync schedule
|
||||
@@ -100,6 +104,9 @@ export type ExternalFileCreateDatasetCollectionParams = ApiCreateDatasetCollecti
|
||||
externalFileUrl: string;
|
||||
filename?: string;
|
||||
};
|
||||
export type ImageCreateDatasetCollectionParams = ApiCreateDatasetCollectionParams & {
|
||||
collectionName: string;
|
||||
};
|
||||
|
||||
/* ================= tag ===================== */
|
||||
export type CreateDatasetCollectionTagParams = {
|
||||
@@ -125,8 +132,9 @@ export type PgSearchRawType = {
|
||||
score: number;
|
||||
};
|
||||
export type PushDatasetDataChunkProps = {
|
||||
q: string; // embedding content
|
||||
a?: string; // bonus content
|
||||
q?: string;
|
||||
a?: string;
|
||||
imageId?: string;
|
||||
chunkIndex?: number;
|
||||
indexes?: Omit<DatasetDataIndexItemType, 'dataId'>[];
|
||||
};
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { RequireOnlyOne } from '../../common/type/utils';
|
||||
import type { ParentIdType } from '../../common/parentFolder/type.d';
|
||||
import { RequireOnlyOne } from '../../../common/type/utils';
|
||||
import type { ParentIdType } from '../../../common/parentFolder/type';
|
||||
|
||||
export type APIFileItem = {
|
||||
id: string;
|
||||
@@ -28,6 +28,12 @@ export type YuqueServer = {
|
||||
basePath?: string;
|
||||
};
|
||||
|
||||
export type ApiDatasetServerType = {
|
||||
apiServer?: APIFileServer;
|
||||
feishuServer?: FeishuServer;
|
||||
yuqueServer?: YuqueServer;
|
||||
};
|
||||
|
||||
// Api dataset api
|
||||
|
||||
export type APIFileListResponse = APIFileItem[];
|
||||
31
packages/global/core/dataset/apiDataset/utils.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
import type { ApiDatasetServerType } from './type';
|
||||
|
||||
export const filterApiDatasetServerPublicData = (apiDatasetServer?: ApiDatasetServerType) => {
|
||||
if (!apiDatasetServer) return undefined;
|
||||
|
||||
const { apiServer, yuqueServer, feishuServer } = apiDatasetServer;
|
||||
|
||||
return {
|
||||
apiServer: apiServer
|
||||
? {
|
||||
baseUrl: apiServer.baseUrl,
|
||||
authorization: '',
|
||||
basePath: apiServer.basePath
|
||||
}
|
||||
: undefined,
|
||||
yuqueServer: yuqueServer
|
||||
? {
|
||||
userId: yuqueServer.userId,
|
||||
token: '',
|
||||
basePath: yuqueServer.basePath
|
||||
}
|
||||
: undefined,
|
||||
feishuServer: feishuServer
|
||||
? {
|
||||
appId: feishuServer.appId,
|
||||
appSecret: '',
|
||||
folderToken: feishuServer.folderToken
|
||||
}
|
||||
: undefined
|
||||
};
|
||||
};
|
||||
@@ -6,45 +6,80 @@ export enum DatasetTypeEnum {
|
||||
dataset = 'dataset',
|
||||
websiteDataset = 'websiteDataset', // depp link
|
||||
externalFile = 'externalFile',
|
||||
|
||||
apiDataset = 'apiDataset',
|
||||
feishu = 'feishu',
|
||||
yuque = 'yuque'
|
||||
}
|
||||
export const DatasetTypeMap = {
|
||||
|
||||
// @ts-ignore
|
||||
export const ApiDatasetTypeMap: Record<
|
||||
`${DatasetTypeEnum}`,
|
||||
{
|
||||
icon: string;
|
||||
avatar: string;
|
||||
label: any;
|
||||
collectionLabel: string;
|
||||
courseUrl?: string;
|
||||
}
|
||||
> = {
|
||||
[DatasetTypeEnum.apiDataset]: {
|
||||
icon: 'core/dataset/externalDatasetOutline',
|
||||
avatar: 'core/dataset/externalDatasetColor',
|
||||
label: i18nT('dataset:api_file'),
|
||||
collectionLabel: i18nT('common:File'),
|
||||
courseUrl: '/docs/guide/knowledge_base/api_dataset/'
|
||||
},
|
||||
[DatasetTypeEnum.feishu]: {
|
||||
icon: 'core/dataset/feishuDatasetOutline',
|
||||
avatar: 'core/dataset/feishuDatasetColor',
|
||||
label: i18nT('dataset:feishu_dataset'),
|
||||
collectionLabel: i18nT('common:File'),
|
||||
courseUrl: '/docs/guide/knowledge_base/lark_dataset/'
|
||||
},
|
||||
[DatasetTypeEnum.yuque]: {
|
||||
icon: 'core/dataset/yuqueDatasetOutline',
|
||||
avatar: 'core/dataset/yuqueDatasetColor',
|
||||
label: i18nT('dataset:yuque_dataset'),
|
||||
collectionLabel: i18nT('common:File'),
|
||||
courseUrl: '/docs/guide/knowledge_base/yuque_dataset/'
|
||||
}
|
||||
};
|
||||
export const DatasetTypeMap: Record<
|
||||
`${DatasetTypeEnum}`,
|
||||
{
|
||||
icon: string;
|
||||
avatar: string;
|
||||
label: any;
|
||||
collectionLabel: string;
|
||||
courseUrl?: string;
|
||||
}
|
||||
> = {
|
||||
...ApiDatasetTypeMap,
|
||||
[DatasetTypeEnum.folder]: {
|
||||
icon: 'common/folderFill',
|
||||
avatar: 'common/folderFill',
|
||||
label: i18nT('dataset:folder_dataset'),
|
||||
collectionLabel: i18nT('common:Folder')
|
||||
},
|
||||
[DatasetTypeEnum.dataset]: {
|
||||
icon: 'core/dataset/commonDatasetOutline',
|
||||
avatar: 'core/dataset/commonDatasetColor',
|
||||
label: i18nT('dataset:common_dataset'),
|
||||
collectionLabel: i18nT('common:File')
|
||||
},
|
||||
[DatasetTypeEnum.websiteDataset]: {
|
||||
icon: 'core/dataset/websiteDatasetOutline',
|
||||
avatar: 'core/dataset/websiteDatasetColor',
|
||||
label: i18nT('dataset:website_dataset'),
|
||||
collectionLabel: i18nT('common:Website')
|
||||
collectionLabel: i18nT('common:Website'),
|
||||
courseUrl: '/docs/guide/knowledge_base/websync/'
|
||||
},
|
||||
[DatasetTypeEnum.externalFile]: {
|
||||
icon: 'core/dataset/externalDatasetOutline',
|
||||
avatar: 'core/dataset/externalDatasetColor',
|
||||
label: i18nT('dataset:external_file'),
|
||||
collectionLabel: i18nT('common:File')
|
||||
},
|
||||
[DatasetTypeEnum.apiDataset]: {
|
||||
icon: 'core/dataset/externalDatasetOutline',
|
||||
label: i18nT('dataset:api_file'),
|
||||
collectionLabel: i18nT('common:File')
|
||||
},
|
||||
[DatasetTypeEnum.feishu]: {
|
||||
icon: 'core/dataset/feishuDatasetOutline',
|
||||
label: i18nT('dataset:feishu_dataset'),
|
||||
collectionLabel: i18nT('common:File')
|
||||
},
|
||||
[DatasetTypeEnum.yuque]: {
|
||||
icon: 'core/dataset/yuqueDatasetOutline',
|
||||
label: i18nT('dataset:yuque_dataset'),
|
||||
collectionLabel: i18nT('common:File')
|
||||
}
|
||||
};
|
||||
|
||||
@@ -77,7 +112,8 @@ export enum DatasetCollectionTypeEnum {
|
||||
file = 'file',
|
||||
link = 'link', // one link
|
||||
externalFile = 'externalFile',
|
||||
apiFile = 'apiFile'
|
||||
apiFile = 'apiFile',
|
||||
images = 'images'
|
||||
}
|
||||
export const DatasetCollectionTypeMap = {
|
||||
[DatasetCollectionTypeEnum.folder]: {
|
||||
@@ -97,6 +133,9 @@ export const DatasetCollectionTypeMap = {
|
||||
},
|
||||
[DatasetCollectionTypeEnum.apiFile]: {
|
||||
name: i18nT('common:core.dataset.apiFile')
|
||||
},
|
||||
[DatasetCollectionTypeEnum.images]: {
|
||||
name: i18nT('dataset:core.dataset.Image collection')
|
||||
}
|
||||
};
|
||||
|
||||
@@ -120,6 +159,7 @@ export const DatasetCollectionSyncResultMap = {
|
||||
export enum DatasetCollectionDataProcessModeEnum {
|
||||
chunk = 'chunk',
|
||||
qa = 'qa',
|
||||
imageParse = 'imageParse',
|
||||
backup = 'backup',
|
||||
|
||||
auto = 'auto' // abandon
|
||||
@@ -133,6 +173,10 @@ export const DatasetCollectionDataProcessModeMap = {
|
||||
label: i18nT('common:core.dataset.training.QA mode'),
|
||||
tooltip: i18nT('common:core.dataset.import.QA Import Tip')
|
||||
},
|
||||
[DatasetCollectionDataProcessModeEnum.imageParse]: {
|
||||
label: i18nT('dataset:training.Image mode'),
|
||||
tooltip: i18nT('common:core.dataset.import.Chunk Split Tip')
|
||||
},
|
||||
[DatasetCollectionDataProcessModeEnum.backup]: {
|
||||
label: i18nT('dataset:backup_mode'),
|
||||
tooltip: i18nT('dataset:backup_mode')
|
||||
@@ -172,14 +216,16 @@ export enum ImportDataSourceEnum {
|
||||
fileCustom = 'fileCustom',
|
||||
externalFile = 'externalFile',
|
||||
apiDataset = 'apiDataset',
|
||||
reTraining = 'reTraining'
|
||||
reTraining = 'reTraining',
|
||||
imageDataset = 'imageDataset'
|
||||
}
|
||||
|
||||
export enum TrainingModeEnum {
|
||||
chunk = 'chunk',
|
||||
qa = 'qa',
|
||||
auto = 'auto',
|
||||
image = 'image'
|
||||
image = 'image',
|
||||
imageParse = 'imageParse'
|
||||
}
|
||||
|
||||
/* ------------ search -------------- */
|
||||
|
||||
4
packages/global/core/dataset/controller.d.ts
vendored
@@ -8,17 +8,19 @@ export type CreateDatasetDataProps = {
|
||||
chunkIndex?: number;
|
||||
q: string;
|
||||
a?: string;
|
||||
imageId?: string;
|
||||
indexes?: Omit<DatasetDataIndexItemType, 'dataId'>[];
|
||||
};
|
||||
|
||||
export type UpdateDatasetDataProps = {
|
||||
dataId: string;
|
||||
|
||||
q?: string;
|
||||
q: string;
|
||||
a?: string;
|
||||
indexes?: (Omit<DatasetDataIndexItemType, 'dataId'> & {
|
||||
dataId?: string; // pg data id
|
||||
})[];
|
||||
imageId?: string;
|
||||
};
|
||||
|
||||
export type PatchIndexesProps =
|
||||
|
||||
13
packages/global/core/dataset/image/type.d.ts
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
export type DatasetImageSchema = {
|
||||
_id: string;
|
||||
teamId: string;
|
||||
datasetId: string;
|
||||
collectionId?: string;
|
||||
name: string;
|
||||
contentType: string;
|
||||
size: number;
|
||||
metadata?: Record<string, any>;
|
||||
expiredTime?: Date;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
};
|
||||
52
packages/global/core/dataset/type.d.ts
vendored
@@ -13,9 +13,15 @@ import type {
|
||||
ChunkTriggerConfigTypeEnum
|
||||
} from './constants';
|
||||
import type { DatasetPermission } from '../../support/permission/dataset/controller';
|
||||
import type { APIFileServer, FeishuServer, YuqueServer } from './apiDataset';
|
||||
import type {
|
||||
ApiDatasetServerType,
|
||||
APIFileServer,
|
||||
FeishuServer,
|
||||
YuqueServer
|
||||
} from './apiDataset/type';
|
||||
import type { SourceMemberType } from 'support/user/type';
|
||||
import type { DatasetDataIndexTypeEnum } from './data/constants';
|
||||
import type { ParentIdType } from 'common/parentFolder/type';
|
||||
|
||||
export type ChunkSettingsType = {
|
||||
trainingType?: DatasetCollectionDataProcessModeEnum;
|
||||
@@ -49,7 +55,7 @@ export type ChunkSettingsType = {
|
||||
|
||||
export type DatasetSchemaType = {
|
||||
_id: string;
|
||||
parentId?: string;
|
||||
parentId: ParentIdType;
|
||||
userId: string;
|
||||
teamId: string;
|
||||
tmbId: string;
|
||||
@@ -72,14 +78,16 @@ export type DatasetSchemaType = {
|
||||
chunkSettings?: ChunkSettingsType;
|
||||
|
||||
inheritPermission: boolean;
|
||||
apiServer?: APIFileServer;
|
||||
feishuServer?: FeishuServer;
|
||||
yuqueServer?: YuqueServer;
|
||||
|
||||
apiDatasetServer?: ApiDatasetServerType;
|
||||
|
||||
// abandon
|
||||
autoSync?: boolean;
|
||||
externalReadUrl?: string;
|
||||
defaultPermission?: number;
|
||||
apiServer?: APIFileServer;
|
||||
feishuServer?: FeishuServer;
|
||||
yuqueServer?: YuqueServer;
|
||||
};
|
||||
|
||||
export type DatasetCollectionSchemaType = ChunkSettingsType & {
|
||||
@@ -132,7 +140,13 @@ export type DatasetDataIndexItemType = {
|
||||
dataId: string; // pg data id
|
||||
text: string;
|
||||
};
|
||||
export type DatasetDataSchemaType = {
|
||||
|
||||
export type DatasetDataFieldType = {
|
||||
q: string; // large chunks or question
|
||||
a?: string; // answer or custom content
|
||||
imageId?: string;
|
||||
};
|
||||
export type DatasetDataSchemaType = DatasetDataFieldType & {
|
||||
_id: string;
|
||||
userId: string;
|
||||
teamId: string;
|
||||
@@ -141,13 +155,9 @@ export type DatasetDataSchemaType = {
|
||||
collectionId: string;
|
||||
chunkIndex: number;
|
||||
updateTime: Date;
|
||||
q: string; // large chunks or question
|
||||
a: string; // answer or custom content
|
||||
history?: {
|
||||
q: string;
|
||||
a: string;
|
||||
history?: (DatasetDataFieldType & {
|
||||
updateTime: Date;
|
||||
}[];
|
||||
})[];
|
||||
forbid?: boolean;
|
||||
fullTextToken: string;
|
||||
indexes: DatasetDataIndexItemType[];
|
||||
@@ -179,6 +189,7 @@ export type DatasetTrainingSchemaType = {
|
||||
dataId?: string;
|
||||
q: string;
|
||||
a: string;
|
||||
imageId?: string;
|
||||
chunkIndex: number;
|
||||
indexSize?: number;
|
||||
weight: number;
|
||||
@@ -244,20 +255,18 @@ export type DatasetCollectionItemType = CollectionWithDatasetType & {
|
||||
};
|
||||
|
||||
/* ================= data ===================== */
|
||||
export type DatasetDataItemType = {
|
||||
export type DatasetDataItemType = DatasetDataFieldType & {
|
||||
id: string;
|
||||
teamId: string;
|
||||
datasetId: string;
|
||||
imagePreivewUrl?: string;
|
||||
updateTime: Date;
|
||||
collectionId: string;
|
||||
sourceName: string;
|
||||
sourceId?: string;
|
||||
q: string;
|
||||
a: string;
|
||||
chunkIndex: number;
|
||||
indexes: DatasetDataIndexItemType[];
|
||||
isOwner: boolean;
|
||||
// permission: DatasetPermission;
|
||||
};
|
||||
|
||||
/* --------------- file ---------------------- */
|
||||
@@ -284,3 +293,14 @@ export type SearchDataResponseItemType = Omit<
|
||||
score: { type: `${SearchScoreTypeEnum}`; value: number; index: number }[];
|
||||
// score: number;
|
||||
};
|
||||
|
||||
export type DatasetCiteItemType = {
|
||||
_id: string;
|
||||
q: string;
|
||||
a?: string;
|
||||
imagePreivewUrl?: string;
|
||||
history?: DatasetDataSchemaType['history'];
|
||||
updateTime: DatasetDataSchemaType['updateTime'];
|
||||
index: DatasetDataSchemaType['chunkIndex'];
|
||||
updated?: boolean;
|
||||
};
|
||||
|
||||
@@ -2,10 +2,15 @@ import { TrainingModeEnum, DatasetCollectionTypeEnum } from './constants';
|
||||
import { getFileIcon } from '../../common/file/icon';
|
||||
import { strIsLink } from '../../common/string/tools';
|
||||
|
||||
export function getCollectionIcon(
|
||||
type: DatasetCollectionTypeEnum = DatasetCollectionTypeEnum.file,
|
||||
name = ''
|
||||
) {
|
||||
export function getCollectionIcon({
|
||||
type = DatasetCollectionTypeEnum.file,
|
||||
name = '',
|
||||
sourceId
|
||||
}: {
|
||||
type?: DatasetCollectionTypeEnum;
|
||||
name?: string;
|
||||
sourceId?: string;
|
||||
}) {
|
||||
if (type === DatasetCollectionTypeEnum.folder) {
|
||||
return 'common/folderFill';
|
||||
}
|
||||
@@ -15,7 +20,10 @@ export function getCollectionIcon(
|
||||
if (type === DatasetCollectionTypeEnum.virtual) {
|
||||
return 'file/fill/manual';
|
||||
}
|
||||
return getFileIcon(name);
|
||||
if (type === DatasetCollectionTypeEnum.images) {
|
||||
return 'core/dataset/imageFill';
|
||||
}
|
||||
return getSourceNameIcon({ sourceName: name, sourceId });
|
||||
}
|
||||
export function getSourceNameIcon({
|
||||
sourceName,
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
export enum OperationLogEventEnum {
|
||||
//Team
|
||||
LOGIN = 'LOGIN',
|
||||
CREATE_INVITATION_LINK = 'CREATE_INVITATION_LINK',
|
||||
JOIN_TEAM = 'JOIN_TEAM',
|
||||
@@ -11,5 +12,52 @@ export enum OperationLogEventEnum {
|
||||
RELOCATE_DEPARTMENT = 'RELOCATE_DEPARTMENT',
|
||||
CREATE_GROUP = 'CREATE_GROUP',
|
||||
DELETE_GROUP = 'DELETE_GROUP',
|
||||
ASSIGN_PERMISSION = 'ASSIGN_PERMISSION'
|
||||
ASSIGN_PERMISSION = 'ASSIGN_PERMISSION',
|
||||
//APP
|
||||
CREATE_APP = 'CREATE_APP',
|
||||
UPDATE_APP_INFO = 'UPDATE_APP_INFO',
|
||||
MOVE_APP = 'MOVE_APP',
|
||||
DELETE_APP = 'DELETE_APP',
|
||||
UPDATE_APP_COLLABORATOR = 'UPDATE_APP_COLLABORATOR',
|
||||
DELETE_APP_COLLABORATOR = 'DELETE_APP_COLLABORATOR',
|
||||
TRANSFER_APP_OWNERSHIP = 'TRANSFER_APP_OWNERSHIP',
|
||||
CREATE_APP_COPY = 'CREATE_APP_COPY',
|
||||
CREATE_APP_FOLDER = 'CREATE_APP_FOLDER',
|
||||
UPDATE_PUBLISH_APP = 'UPDATE_PUBLISH_APP',
|
||||
CREATE_APP_PUBLISH_CHANNEL = 'CREATE_APP_PUBLISH_CHANNEL',
|
||||
UPDATE_APP_PUBLISH_CHANNEL = 'UPDATE_APP_PUBLISH_CHANNEL',
|
||||
DELETE_APP_PUBLISH_CHANNEL = 'DELETE_APP_PUBLISH_CHANNEL',
|
||||
EXPORT_APP_CHAT_LOG = 'EXPORT_APP_CHAT_LOG',
|
||||
//Dataset
|
||||
CREATE_DATASET = 'CREATE_DATASET',
|
||||
UPDATE_DATASET = 'UPDATE_DATASET',
|
||||
DELETE_DATASET = 'DELETE_DATASET',
|
||||
MOVE_DATASET = 'MOVE_DATASET',
|
||||
UPDATE_DATASET_COLLABORATOR = 'UPDATE_DATASET_COLLABORATOR',
|
||||
DELETE_DATASET_COLLABORATOR = 'DELETE_DATASET_COLLABORATOR',
|
||||
TRANSFER_DATASET_OWNERSHIP = 'TRANSFER_DATASET_OWNERSHIP',
|
||||
EXPORT_DATASET = 'EXPORT_DATASET',
|
||||
CREATE_DATASET_FOLDER = 'CREATE_DATASET_FOLDER',
|
||||
//Collection
|
||||
CREATE_COLLECTION = 'CREATE_COLLECTION',
|
||||
UPDATE_COLLECTION = 'UPDATE_COLLECTION',
|
||||
DELETE_COLLECTION = 'DELETE_COLLECTION',
|
||||
RETRAIN_COLLECTION = 'RETRAIN_COLLECTION',
|
||||
//Data
|
||||
CREATE_DATA = 'CREATE_DATA',
|
||||
UPDATE_DATA = 'UPDATE_DATA',
|
||||
DELETE_DATA = 'DELETE_DATA',
|
||||
//SearchTest
|
||||
SEARCH_TEST = 'SEARCH_TEST',
|
||||
//Account
|
||||
CHANGE_PASSWORD = 'CHANGE_PASSWORD',
|
||||
CHANGE_NOTIFICATION_SETTINGS = 'CHANGE_NOTIFICATION_SETTINGS',
|
||||
CHANGE_MEMBER_NAME_ACCOUNT = 'CHANGE_MEMBER_NAME_ACCOUNT',
|
||||
PURCHASE_PLAN = 'PURCHASE_PLAN',
|
||||
EXPORT_BILL_RECORDS = 'EXPORT_BILL_RECORDS',
|
||||
CREATE_INVOICE = 'CREATE_INVOICE',
|
||||
SET_INVOICE_HEADER = 'SET_INVOICE_HEADER',
|
||||
CREATE_API_KEY = 'CREATE_API_KEY',
|
||||
UPDATE_API_KEY = 'UPDATE_API_KEY',
|
||||
DELETE_API_KEY = 'DELETE_API_KEY'
|
||||
}
|
||||
|
||||
@@ -13,6 +13,7 @@ const staticPluginList = [
|
||||
'WeWorkWebhook',
|
||||
'google',
|
||||
'bing',
|
||||
'bocha',
|
||||
'delay'
|
||||
];
|
||||
// Run in worker thread (Have npm packages)
|
||||
|
||||
677
packages/plugins/src/bocha/template.json
Normal file
@@ -0,0 +1,677 @@
|
||||
{
|
||||
"author": "",
|
||||
"name": "博查搜索",
|
||||
"avatar": "core/workflow/template/bocha",
|
||||
"intro": "使用博查AI搜索引擎进行网络搜索。",
|
||||
"showStatus": true,
|
||||
"weight": 10,
|
||||
"courseUrl": "",
|
||||
"isTool": true,
|
||||
"templateType": "search",
|
||||
"workflow": {
|
||||
"nodes": [
|
||||
{
|
||||
"nodeId": "pluginInput",
|
||||
"name": "workflow:template.plugin_start",
|
||||
"intro": "workflow:intro_plugin_input",
|
||||
"avatar": "core/workflow/template/workflowStart",
|
||||
"flowNodeType": "pluginInput",
|
||||
"showStatus": false,
|
||||
"position": {
|
||||
"x": 636.3048409085379,
|
||||
"y": -238.61714728578016
|
||||
},
|
||||
"version": "481",
|
||||
"inputs": [
|
||||
{
|
||||
"renderTypeList": [
|
||||
"input"
|
||||
],
|
||||
"selectedTypeIndex": 0,
|
||||
"valueType": "string",
|
||||
"canEdit": true,
|
||||
"key": "apiKey",
|
||||
"label": "apiKey",
|
||||
"description": "博查API密钥",
|
||||
"defaultValue": "",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"renderTypeList": [
|
||||
"input",
|
||||
"reference"
|
||||
],
|
||||
"selectedTypeIndex": 0,
|
||||
"valueType": "string",
|
||||
"canEdit": true,
|
||||
"key": "query",
|
||||
"label": "query",
|
||||
"description": "搜索查询词",
|
||||
"defaultValue": "",
|
||||
"required": true,
|
||||
"toolDescription": "搜索查询词"
|
||||
},
|
||||
{
|
||||
"renderTypeList": [
|
||||
"input",
|
||||
"reference"
|
||||
],
|
||||
"selectedTypeIndex": 0,
|
||||
"valueType": "string",
|
||||
"canEdit": true,
|
||||
"key": "freshness",
|
||||
"label": "freshness",
|
||||
"description": "搜索指定时间范围内的网页。可填值:oneDay(一天内)、oneWeek(一周内)、oneMonth(一个月内)、oneYear(一年内)、noLimit(不限,默认)、YYYY-MM-DD..YYYY-MM-DD(日期范围)、YYYY-MM-DD(指定日期)",
|
||||
"defaultValue": "noLimit",
|
||||
"required": false,
|
||||
"toolDescription": "搜索时间范围"
|
||||
},
|
||||
{
|
||||
"renderTypeList": [
|
||||
"input",
|
||||
"reference"
|
||||
],
|
||||
"selectedTypeIndex": 0,
|
||||
"valueType": "boolean",
|
||||
"canEdit": true,
|
||||
"key": "summary",
|
||||
"label": "summary",
|
||||
"description": "是否显示文本摘要。true显示,false不显示(默认)",
|
||||
"defaultValue": false,
|
||||
"required": false,
|
||||
"toolDescription": "是否显示文本摘要"
|
||||
},
|
||||
{
|
||||
"renderTypeList": [
|
||||
"input",
|
||||
"reference"
|
||||
],
|
||||
"selectedTypeIndex": 0,
|
||||
"valueType": "string",
|
||||
"canEdit": true,
|
||||
"key": "include",
|
||||
"label": "include",
|
||||
"description": "指定搜索的site范围。多个域名使用|或,分隔,最多20个。例如:qq.com|m.163.com",
|
||||
"defaultValue": "",
|
||||
"required": false,
|
||||
"toolDescription": "指定搜索的site范围"
|
||||
},
|
||||
{
|
||||
"renderTypeList": [
|
||||
"input",
|
||||
"reference"
|
||||
],
|
||||
"selectedTypeIndex": 0,
|
||||
"valueType": "string",
|
||||
"canEdit": true,
|
||||
"key": "exclude",
|
||||
"label": "exclude",
|
||||
"description": "排除搜索的网站范围。多个域名使用|或,分隔,最多20个。例如:qq.com|m.163.com",
|
||||
"defaultValue": "",
|
||||
"required": false,
|
||||
"toolDescription": "排除搜索的网站范围"
|
||||
},
|
||||
{
|
||||
"renderTypeList": [
|
||||
"input",
|
||||
"reference"
|
||||
],
|
||||
"selectedTypeIndex": 0,
|
||||
"valueType": "number",
|
||||
"canEdit": true,
|
||||
"key": "count",
|
||||
"label": "count",
|
||||
"description": "返回结果的条数。可填范围:1-50,默认为10",
|
||||
"defaultValue": 10,
|
||||
"required": false,
|
||||
"min": 1,
|
||||
"max": 50,
|
||||
"toolDescription": "返回结果条数"
|
||||
}
|
||||
],
|
||||
"outputs": [
|
||||
{
|
||||
"id": "apiKey",
|
||||
"valueType": "string",
|
||||
"key": "apiKey",
|
||||
"label": "apiKey",
|
||||
"type": "hidden"
|
||||
},
|
||||
{
|
||||
"id": "query",
|
||||
"valueType": "string",
|
||||
"key": "query",
|
||||
"label": "query",
|
||||
"type": "hidden"
|
||||
},
|
||||
{
|
||||
"id": "freshness",
|
||||
"valueType": "string",
|
||||
"key": "freshness",
|
||||
"label": "freshness",
|
||||
"type": "hidden"
|
||||
},
|
||||
{
|
||||
"id": "summary",
|
||||
"valueType": "boolean",
|
||||
"key": "summary",
|
||||
"label": "summary",
|
||||
"type": "hidden"
|
||||
},
|
||||
{
|
||||
"id": "include",
|
||||
"valueType": "string",
|
||||
"key": "include",
|
||||
"label": "include",
|
||||
"type": "hidden"
|
||||
},
|
||||
{
|
||||
"id": "exclude",
|
||||
"valueType": "string",
|
||||
"key": "exclude",
|
||||
"label": "exclude",
|
||||
"type": "hidden"
|
||||
},
|
||||
{
|
||||
"id": "count",
|
||||
"valueType": "number",
|
||||
"key": "count",
|
||||
"label": "count",
|
||||
"type": "hidden"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"nodeId": "pluginOutput",
|
||||
"name": "common:core.module.template.self_output",
|
||||
"intro": "workflow:intro_custom_plugin_output",
|
||||
"avatar": "core/workflow/template/pluginOutput",
|
||||
"flowNodeType": "pluginOutput",
|
||||
"showStatus": false,
|
||||
"position": {
|
||||
"x": 2764.1105686698083,
|
||||
"y": -30.617147285780163
|
||||
},
|
||||
"version": "481",
|
||||
"inputs": [
|
||||
{
|
||||
"renderTypeList": [
|
||||
"reference"
|
||||
],
|
||||
"valueType": "object",
|
||||
"canEdit": true,
|
||||
"key": "result",
|
||||
"label": "result",
|
||||
"isToolOutput": true,
|
||||
"description": "",
|
||||
"value": [
|
||||
"nyA6oA8mF1iW",
|
||||
"httpRawResponse"
|
||||
]
|
||||
}
|
||||
],
|
||||
"outputs": []
|
||||
},
|
||||
{
|
||||
"nodeId": "pluginConfig",
|
||||
"name": "common:core.module.template.system_config",
|
||||
"intro": "",
|
||||
"avatar": "core/workflow/template/systemConfig",
|
||||
"flowNodeType": "pluginConfig",
|
||||
"position": {
|
||||
"x": 184.66337662472682,
|
||||
"y": -216.05298493910115
|
||||
},
|
||||
"version": "4811",
|
||||
"inputs": [],
|
||||
"outputs": []
|
||||
},
|
||||
{
|
||||
"nodeId": "nyA6oA8mF1iW",
|
||||
"name": "HTTP 请求",
|
||||
"intro": "调用博查搜索API",
|
||||
"avatar": "core/workflow/template/httpRequest",
|
||||
"flowNodeType": "httpRequest468",
|
||||
"showStatus": true,
|
||||
"position": {
|
||||
"x": 1335.0647252518884,
|
||||
"y": -455.9043948565971
|
||||
},
|
||||
"version": "481",
|
||||
"inputs": [
|
||||
{
|
||||
"key": "system_addInputParam",
|
||||
"renderTypeList": [
|
||||
"addInputParam"
|
||||
],
|
||||
"valueType": "dynamic",
|
||||
"label": "",
|
||||
"required": false,
|
||||
"description": "common:core.module.input.description.HTTP Dynamic Input",
|
||||
"customInputConfig": {
|
||||
"selectValueTypeList": [
|
||||
"string",
|
||||
"number",
|
||||
"boolean",
|
||||
"object",
|
||||
"arrayString",
|
||||
"arrayNumber",
|
||||
"arrayBoolean",
|
||||
"arrayObject",
|
||||
"arrayAny",
|
||||
"any",
|
||||
"chatHistory",
|
||||
"datasetQuote",
|
||||
"dynamic",
|
||||
"selectDataset",
|
||||
"selectApp"
|
||||
],
|
||||
"showDescription": false,
|
||||
"showDefaultValue": true
|
||||
},
|
||||
"debugLabel": "",
|
||||
"toolDescription": ""
|
||||
},
|
||||
{
|
||||
"key": "system_httpMethod",
|
||||
"renderTypeList": [
|
||||
"custom"
|
||||
],
|
||||
"valueType": "string",
|
||||
"label": "",
|
||||
"value": "POST",
|
||||
"required": true,
|
||||
"debugLabel": "",
|
||||
"toolDescription": ""
|
||||
},
|
||||
{
|
||||
"key": "system_httpTimeout",
|
||||
"renderTypeList": [
|
||||
"custom"
|
||||
],
|
||||
"valueType": "number",
|
||||
"label": "",
|
||||
"value": 30,
|
||||
"min": 5,
|
||||
"max": 600,
|
||||
"required": true,
|
||||
"debugLabel": "",
|
||||
"toolDescription": ""
|
||||
},
|
||||
{
|
||||
"key": "system_httpReqUrl",
|
||||
"renderTypeList": [
|
||||
"hidden"
|
||||
],
|
||||
"valueType": "string",
|
||||
"label": "",
|
||||
"description": "common:core.module.input.description.Http Request Url",
|
||||
"placeholder": "https://api.ai.com/getInventory",
|
||||
"required": false,
|
||||
"value": "https://api.bochaai.com/v1/web-search",
|
||||
"debugLabel": "",
|
||||
"toolDescription": ""
|
||||
},
|
||||
{
|
||||
"key": "system_httpHeader",
|
||||
"renderTypeList": [
|
||||
"custom"
|
||||
],
|
||||
"valueType": "any",
|
||||
"value": [
|
||||
{
|
||||
"key": "Authorization",
|
||||
"type": "string",
|
||||
"value": "Bearer {{$pluginInput.apiKey$}}"
|
||||
},
|
||||
{
|
||||
"key": "Content-Type",
|
||||
"type": "string",
|
||||
"value": "application/json"
|
||||
}
|
||||
],
|
||||
"label": "",
|
||||
"description": "common:core.module.input.description.Http Request Header",
|
||||
"placeholder": "common:core.module.input.description.Http Request Header",
|
||||
"required": false,
|
||||
"debugLabel": "",
|
||||
"toolDescription": ""
|
||||
},
|
||||
{
|
||||
"key": "system_httpParams",
|
||||
"renderTypeList": [
|
||||
"hidden"
|
||||
],
|
||||
"valueType": "any",
|
||||
"value": [],
|
||||
"label": "",
|
||||
"required": false,
|
||||
"debugLabel": "",
|
||||
"toolDescription": ""
|
||||
},
|
||||
{
|
||||
"key": "system_httpJsonBody",
|
||||
"renderTypeList": [
|
||||
"hidden"
|
||||
],
|
||||
"valueType": "any",
|
||||
"value": "{\n \"query\": \"{{query}}\",\n \"freshness\": \"{{freshness}}\",\n \"summary\": {{summary}},\n \"include\": \"{{include}}\",\n \"exclude\": \"{{exclude}}\",\n \"count\": {{count}}\n}",
|
||||
"label": "",
|
||||
"required": false,
|
||||
"debugLabel": "",
|
||||
"toolDescription": ""
|
||||
},
|
||||
{
|
||||
"key": "system_httpFormBody",
|
||||
"renderTypeList": [
|
||||
"hidden"
|
||||
],
|
||||
"valueType": "any",
|
||||
"value": [],
|
||||
"label": "",
|
||||
"required": false,
|
||||
"debugLabel": "",
|
||||
"toolDescription": ""
|
||||
},
|
||||
{
|
||||
"key": "system_httpContentType",
|
||||
"renderTypeList": [
|
||||
"hidden"
|
||||
],
|
||||
"valueType": "string",
|
||||
"value": "json",
|
||||
"label": "",
|
||||
"required": false,
|
||||
"debugLabel": "",
|
||||
"toolDescription": ""
|
||||
},
|
||||
{
|
||||
"valueType": "string",
|
||||
"renderTypeList": [
|
||||
"reference"
|
||||
],
|
||||
"key": "query",
|
||||
"label": "query",
|
||||
"toolDescription": "博查搜索检索词",
|
||||
"required": true,
|
||||
"canEdit": true,
|
||||
"editField": {
|
||||
"key": true,
|
||||
"description": true
|
||||
},
|
||||
"customInputConfig": {
|
||||
"selectValueTypeList": [
|
||||
"string",
|
||||
"number",
|
||||
"boolean",
|
||||
"object",
|
||||
"arrayString",
|
||||
"arrayNumber",
|
||||
"arrayBoolean",
|
||||
"arrayObject",
|
||||
"arrayAny",
|
||||
"any",
|
||||
"chatHistory",
|
||||
"datasetQuote",
|
||||
"dynamic",
|
||||
"selectApp",
|
||||
"selectDataset"
|
||||
],
|
||||
"showDescription": false,
|
||||
"showDefaultValue": true
|
||||
},
|
||||
"value": [
|
||||
"pluginInput",
|
||||
"query"
|
||||
]
|
||||
},
|
||||
{
|
||||
"valueType": "string",
|
||||
"renderTypeList": [
|
||||
"reference"
|
||||
],
|
||||
"key": "freshness",
|
||||
"label": "freshness",
|
||||
"toolDescription": "搜索时间范围",
|
||||
"required": false,
|
||||
"canEdit": true,
|
||||
"editField": {
|
||||
"key": true,
|
||||
"description": true
|
||||
},
|
||||
"customInputConfig": {
|
||||
"selectValueTypeList": [
|
||||
"string",
|
||||
"number",
|
||||
"boolean",
|
||||
"object",
|
||||
"arrayString",
|
||||
"arrayNumber",
|
||||
"arrayBoolean",
|
||||
"arrayObject",
|
||||
"arrayAny",
|
||||
"any",
|
||||
"chatHistory",
|
||||
"datasetQuote",
|
||||
"dynamic",
|
||||
"selectApp",
|
||||
"selectDataset"
|
||||
],
|
||||
"showDescription": false,
|
||||
"showDefaultValue": true
|
||||
},
|
||||
"value": [
|
||||
"pluginInput",
|
||||
"freshness"
|
||||
]
|
||||
},
|
||||
{
|
||||
"valueType": "boolean",
|
||||
"renderTypeList": [
|
||||
"reference"
|
||||
],
|
||||
"key": "summary",
|
||||
"label": "summary",
|
||||
"toolDescription": "是否显示文本摘要",
|
||||
"required": false,
|
||||
"canEdit": true,
|
||||
"editField": {
|
||||
"key": true,
|
||||
"description": true
|
||||
},
|
||||
"customInputConfig": {
|
||||
"selectValueTypeList": [
|
||||
"string",
|
||||
"number",
|
||||
"boolean",
|
||||
"object",
|
||||
"arrayString",
|
||||
"arrayNumber",
|
||||
"arrayBoolean",
|
||||
"arrayObject",
|
||||
"arrayAny",
|
||||
"any",
|
||||
"chatHistory",
|
||||
"datasetQuote",
|
||||
"dynamic",
|
||||
"selectApp",
|
||||
"selectDataset"
|
||||
],
|
||||
"showDescription": false,
|
||||
"showDefaultValue": true
|
||||
},
|
||||
"value": [
|
||||
"pluginInput",
|
||||
"summary"
|
||||
]
|
||||
},
|
||||
{
|
||||
"valueType": "string",
|
||||
"renderTypeList": [
|
||||
"reference"
|
||||
],
|
||||
"key": "include",
|
||||
"label": "include",
|
||||
"toolDescription": "指定搜索的site范围",
|
||||
"required": false,
|
||||
"canEdit": true,
|
||||
"editField": {
|
||||
"key": true,
|
||||
"description": true
|
||||
},
|
||||
"customInputConfig": {
|
||||
"selectValueTypeList": [
|
||||
"string",
|
||||
"number",
|
||||
"boolean",
|
||||
"object",
|
||||
"arrayString",
|
||||
"arrayNumber",
|
||||
"arrayBoolean",
|
||||
"arrayObject",
|
||||
"arrayAny",
|
||||
"any",
|
||||
"chatHistory",
|
||||
"datasetQuote",
|
||||
"dynamic",
|
||||
"selectApp",
|
||||
"selectDataset"
|
||||
],
|
||||
"showDescription": false,
|
||||
"showDefaultValue": true
|
||||
},
|
||||
"value": [
|
||||
"pluginInput",
|
||||
"include"
|
||||
]
|
||||
},
|
||||
{
|
||||
"valueType": "string",
|
||||
"renderTypeList": [
|
||||
"reference"
|
||||
],
|
||||
"key": "exclude",
|
||||
"label": "exclude",
|
||||
"toolDescription": "排除搜索的网站范围",
|
||||
"required": false,
|
||||
"canEdit": true,
|
||||
"editField": {
|
||||
"key": true,
|
||||
"description": true
|
||||
},
|
||||
"customInputConfig": {
|
||||
"selectValueTypeList": [
|
||||
"string",
|
||||
"number",
|
||||
"boolean",
|
||||
"object",
|
||||
"arrayString",
|
||||
"arrayNumber",
|
||||
"arrayBoolean",
|
||||
"arrayObject",
|
||||
"arrayAny",
|
||||
"any",
|
||||
"chatHistory",
|
||||
"datasetQuote",
|
||||
"dynamic",
|
||||
"selectApp",
|
||||
"selectDataset"
|
||||
],
|
||||
"showDescription": false,
|
||||
"showDefaultValue": true
|
||||
},
|
||||
"value": [
|
||||
"pluginInput",
|
||||
"exclude"
|
||||
]
|
||||
},
|
||||
{
|
||||
"valueType": "number",
|
||||
"renderTypeList": [
|
||||
"reference"
|
||||
],
|
||||
"key": "count",
|
||||
"label": "count",
|
||||
"toolDescription": "返回结果条数",
|
||||
"required": false,
|
||||
"canEdit": true,
|
||||
"editField": {
|
||||
"key": true,
|
||||
"description": true
|
||||
},
|
||||
"customInputConfig": {
|
||||
"selectValueTypeList": [
|
||||
"string",
|
||||
"number",
|
||||
"boolean",
|
||||
"object",
|
||||
"arrayString",
|
||||
"arrayNumber",
|
||||
"arrayBoolean",
|
||||
"arrayObject",
|
||||
"arrayAny",
|
||||
"any",
|
||||
"chatHistory",
|
||||
"datasetQuote",
|
||||
"dynamic",
|
||||
"selectApp",
|
||||
"selectDataset"
|
||||
],
|
||||
"showDescription": false,
|
||||
"showDefaultValue": true
|
||||
},
|
||||
"value": [
|
||||
"pluginInput",
|
||||
"count"
|
||||
]
|
||||
}
|
||||
],
|
||||
"outputs": [
|
||||
{
|
||||
"id": "error",
|
||||
"key": "error",
|
||||
"label": "workflow:request_error",
|
||||
"description": "HTTP请求错误信息,成功时返回空",
|
||||
"valueType": "object",
|
||||
"type": "static"
|
||||
},
|
||||
{
|
||||
"id": "httpRawResponse",
|
||||
"key": "httpRawResponse",
|
||||
"required": true,
|
||||
"label": "workflow:raw_response",
|
||||
"description": "HTTP请求的原始响应。只能接受字符串或JSON类型响应数据。",
|
||||
"valueType": "any",
|
||||
"type": "static"
|
||||
},
|
||||
{
|
||||
"id": "system_addOutputParam",
|
||||
"key": "system_addOutputParam",
|
||||
"type": "dynamic",
|
||||
"valueType": "dynamic",
|
||||
"label": "",
|
||||
"editField": {
|
||||
"key": true,
|
||||
"valueType": true
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"edges": [
|
||||
{
|
||||
"source": "pluginInput",
|
||||
"target": "nyA6oA8mF1iW",
|
||||
"sourceHandle": "pluginInput-source-right",
|
||||
"targetHandle": "nyA6oA8mF1iW-target-left"
|
||||
},
|
||||
{
|
||||
"source": "nyA6oA8mF1iW",
|
||||
"target": "pluginOutput",
|
||||
"sourceHandle": "nyA6oA8mF1iW-source-right",
|
||||
"targetHandle": "pluginOutput-target-left"
|
||||
}
|
||||
]
|
||||
},
|
||||
"chatConfig": {}
|
||||
}
|
||||
7
packages/service/common/api/type.d.ts
vendored
@@ -1,5 +1,8 @@
|
||||
import type { ApiDatasetDetailResponse } from '@fastgpt/global/core/dataset/apiDataset';
|
||||
import { FeishuServer, YuqueServer } from '@fastgpt/global/core/dataset/apiDataset';
|
||||
import type {
|
||||
ApiDatasetDetailResponse,
|
||||
FeishuServer,
|
||||
YuqueServer
|
||||
} from '@fastgpt/global/core/dataset/apiDataset/type';
|
||||
import type {
|
||||
DeepRagSearchProps,
|
||||
SearchDatasetDataResponse
|
||||
|
||||
@@ -142,23 +142,26 @@ export const updateRawTextBufferExpiredTime = async ({
|
||||
};
|
||||
|
||||
export const clearExpiredRawTextBufferCron = async () => {
|
||||
const gridBucket = getGridBucket();
|
||||
|
||||
const clearExpiredRawTextBuffer = async () => {
|
||||
addLog.debug('Clear expired raw text buffer start');
|
||||
const gridBucket = getGridBucket();
|
||||
|
||||
return retryFn(async () => {
|
||||
const data = await MongoRawTextBufferSchema.find(
|
||||
{
|
||||
'metadata.expiredTime': { $lt: new Date() }
|
||||
},
|
||||
'_id'
|
||||
).lean();
|
||||
const data = await MongoRawTextBufferSchema.find(
|
||||
{
|
||||
'metadata.expiredTime': { $lt: new Date() }
|
||||
},
|
||||
'_id'
|
||||
).lean();
|
||||
|
||||
for (const item of data) {
|
||||
for (const item of data) {
|
||||
try {
|
||||
await gridBucket.delete(item._id);
|
||||
} catch (error) {
|
||||
addLog.error('Delete expired raw text buffer error', error);
|
||||
}
|
||||
addLog.debug('Clear expired raw text buffer end');
|
||||
});
|
||||
}
|
||||
addLog.debug('Clear expired raw text buffer end');
|
||||
};
|
||||
|
||||
setCron('*/10 * * * *', async () => {
|
||||
|
||||
@@ -7,12 +7,13 @@ import { MongoChatFileSchema, MongoDatasetFileSchema } from './schema';
|
||||
import { detectFileEncoding, detectFileEncodingByPath } from '@fastgpt/global/common/file/tools';
|
||||
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
|
||||
import { readRawContentByFileBuffer } from '../read/utils';
|
||||
import { gridFsStream2Buffer, stream2Encoding } from './utils';
|
||||
import { computeGridFsChunSize, gridFsStream2Buffer, stream2Encoding } from './utils';
|
||||
import { addLog } from '../../system/log';
|
||||
import { parseFileExtensionFromUrl } from '@fastgpt/global/common/string/tools';
|
||||
import { Readable } from 'stream';
|
||||
import { addRawTextBuffer, getRawTextBuffer } from '../../buffer/rawText/controller';
|
||||
import { addMinutes } from 'date-fns';
|
||||
import { retryFn } from '@fastgpt/global/common/system/utils';
|
||||
|
||||
export function getGFSCollection(bucket: `${BucketNameEnum}`) {
|
||||
MongoDatasetFileSchema;
|
||||
@@ -64,23 +65,7 @@ export async function uploadFile({
|
||||
// create a gridfs bucket
|
||||
const bucket = getGridBucket(bucketName);
|
||||
|
||||
const fileSize = stats.size;
|
||||
// 单块大小:尽可能大,但不超过 14MB,不小于512KB
|
||||
const chunkSizeBytes = (() => {
|
||||
// 计算理想块大小:文件大小 ÷ 目标块数(10)。 并且每个块需要小于 14MB
|
||||
const idealChunkSize = Math.min(Math.ceil(fileSize / 10), 14 * 1024 * 1024);
|
||||
|
||||
// 确保块大小至少为512KB
|
||||
const minChunkSize = 512 * 1024; // 512KB
|
||||
|
||||
// 取理想块大小和最小块大小中的较大值
|
||||
let chunkSize = Math.max(idealChunkSize, minChunkSize);
|
||||
|
||||
// 将块大小向上取整到最接近的64KB的倍数,使其更整齐
|
||||
chunkSize = Math.ceil(chunkSize / (64 * 1024)) * (64 * 1024);
|
||||
|
||||
return chunkSize;
|
||||
})();
|
||||
const chunkSizeBytes = computeGridFsChunSize(stats.size);
|
||||
|
||||
const stream = bucket.openUploadStream(filename, {
|
||||
metadata,
|
||||
@@ -173,24 +158,18 @@ export async function getFileById({
|
||||
|
||||
export async function delFileByFileIdList({
|
||||
bucketName,
|
||||
fileIdList,
|
||||
retry = 3
|
||||
fileIdList
|
||||
}: {
|
||||
bucketName: `${BucketNameEnum}`;
|
||||
fileIdList: string[];
|
||||
retry?: number;
|
||||
}): Promise<any> {
|
||||
try {
|
||||
return retryFn(async () => {
|
||||
const bucket = getGridBucket(bucketName);
|
||||
|
||||
for await (const fileId of fileIdList) {
|
||||
await bucket.delete(new Types.ObjectId(fileId));
|
||||
}
|
||||
} catch (error) {
|
||||
if (retry > 0) {
|
||||
return delFileByFileIdList({ bucketName, fileIdList, retry: retry - 1 });
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function getDownloadStream({
|
||||
|
||||
@@ -105,3 +105,20 @@ export const stream2Encoding = async (stream: NodeJS.ReadableStream) => {
|
||||
stream: copyStream
|
||||
};
|
||||
};
|
||||
|
||||
// 单块大小:尽可能大,但不超过 14MB,不小于512KB
|
||||
export const computeGridFsChunSize = (fileSize: number) => {
|
||||
// 计算理想块大小:文件大小 ÷ 目标块数(10)。 并且每个块需要小于 14MB
|
||||
const idealChunkSize = Math.min(Math.ceil(fileSize / 10), 14 * 1024 * 1024);
|
||||
|
||||
// 确保块大小至少为512KB
|
||||
const minChunkSize = 512 * 1024; // 512KB
|
||||
|
||||
// 取理想块大小和最小块大小中的较大值
|
||||
let chunkSize = Math.max(idealChunkSize, minChunkSize);
|
||||
|
||||
// 将块大小向上取整到最接近的64KB的倍数,使其更整齐
|
||||
chunkSize = Math.ceil(chunkSize / (64 * 1024)) * (64 * 1024);
|
||||
|
||||
return chunkSize;
|
||||
};
|
||||
|
||||
@@ -22,7 +22,7 @@ export const getUploadModel = ({ maxSize = 500 }: { maxSize?: number }) => {
|
||||
maxSize *= 1024 * 1024;
|
||||
|
||||
class UploadModel {
|
||||
uploader = multer({
|
||||
uploaderSingle = multer({
|
||||
limits: {
|
||||
fieldSize: maxSize
|
||||
},
|
||||
@@ -41,8 +41,7 @@ export const getUploadModel = ({ maxSize = 500 }: { maxSize?: number }) => {
|
||||
}
|
||||
})
|
||||
}).single('file');
|
||||
|
||||
async doUpload<T = any>(
|
||||
async getUploadFile<T = any>(
|
||||
req: NextApiRequest,
|
||||
res: NextApiResponse,
|
||||
originBucketName?: `${BucketNameEnum}`
|
||||
@@ -54,7 +53,7 @@ export const getUploadModel = ({ maxSize = 500 }: { maxSize?: number }) => {
|
||||
bucketName?: `${BucketNameEnum}`;
|
||||
}>((resolve, reject) => {
|
||||
// @ts-ignore
|
||||
this.uploader(req, res, (error) => {
|
||||
this.uploaderSingle(req, res, (error) => {
|
||||
if (error) {
|
||||
return reject(error);
|
||||
}
|
||||
@@ -94,6 +93,58 @@ export const getUploadModel = ({ maxSize = 500 }: { maxSize?: number }) => {
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
uploaderMultiple = multer({
|
||||
limits: {
|
||||
fieldSize: maxSize
|
||||
},
|
||||
preservePath: true,
|
||||
storage: multer.diskStorage({
|
||||
// destination: (_req, _file, cb) => {
|
||||
// cb(null, tmpFileDirPath);
|
||||
// },
|
||||
filename: (req, file, cb) => {
|
||||
if (!file?.originalname) {
|
||||
cb(new Error('File not found'), '');
|
||||
} else {
|
||||
const { ext } = path.parse(decodeURIComponent(file.originalname));
|
||||
cb(null, `${getNanoid()}${ext}`);
|
||||
}
|
||||
}
|
||||
})
|
||||
}).array('file', global.feConfigs?.uploadFileMaxSize);
|
||||
async getUploadFiles<T = any>(req: NextApiRequest, res: NextApiResponse) {
|
||||
return new Promise<{
|
||||
files: FileType[];
|
||||
data: T;
|
||||
}>((resolve, reject) => {
|
||||
// @ts-ignore
|
||||
this.uploaderMultiple(req, res, (error) => {
|
||||
if (error) {
|
||||
console.log(error);
|
||||
return reject(error);
|
||||
}
|
||||
|
||||
// @ts-ignore
|
||||
const files = req.files as FileType[];
|
||||
|
||||
resolve({
|
||||
files: files.map((file) => ({
|
||||
...file,
|
||||
originalname: decodeURIComponent(file.originalname)
|
||||
})),
|
||||
data: (() => {
|
||||
if (!req.body?.data) return {};
|
||||
try {
|
||||
return JSON.parse(req.body.data);
|
||||
} catch (error) {
|
||||
return {};
|
||||
}
|
||||
})()
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return new UploadModel();
|
||||
|
||||
@@ -4,7 +4,8 @@ import { MongoFrequencyLimit } from './schema';
|
||||
export const authFrequencyLimit = async ({
|
||||
eventId,
|
||||
maxAmount,
|
||||
expiredTime
|
||||
expiredTime,
|
||||
num = 1
|
||||
}: AuthFrequencyLimitProps) => {
|
||||
try {
|
||||
// 对应 eventId 的 account+1, 不存在的话,则创建一个
|
||||
@@ -14,7 +15,7 @@ export const authFrequencyLimit = async ({
|
||||
expiredTime: { $gte: new Date() }
|
||||
},
|
||||
{
|
||||
$inc: { amount: 1 },
|
||||
$inc: { amount: num },
|
||||
// If not exist, set the expiredTime
|
||||
$setOnInsert: { expiredTime }
|
||||
},
|
||||
|
||||
@@ -6,7 +6,9 @@ export enum TimerIdEnum {
|
||||
updateStandardPlan = 'updateStandardPlan',
|
||||
scheduleTriggerApp = 'scheduleTriggerApp',
|
||||
notification = 'notification',
|
||||
clearExpiredRawTextBuffer = 'clearExpiredRawTextBuffer'
|
||||
|
||||
clearExpiredRawTextBuffer = 'clearExpiredRawTextBuffer',
|
||||
clearExpiredDatasetImage = 'clearExpiredDatasetImage'
|
||||
}
|
||||
|
||||
export enum LockNotificationEnum {
|
||||
|
||||
@@ -20,6 +20,10 @@ export const getVlmModel = (model?: string) => {
|
||||
?.find((item) => item.model === model || item.name === model);
|
||||
};
|
||||
|
||||
export const getVlmModelList = () => {
|
||||
return Array.from(global.llmModelMap.values())?.filter((item) => item.vision) || [];
|
||||
};
|
||||
|
||||
export const getDefaultEmbeddingModel = () => global?.systemDefaultModel.embedding!;
|
||||
export const getEmbeddingModel = (model?: string) => {
|
||||
if (!model) return getDefaultEmbeddingModel();
|
||||
|
||||
@@ -3,12 +3,11 @@ import type {
|
||||
ApiFileReadContentResponse,
|
||||
APIFileReadResponse,
|
||||
ApiDatasetDetailResponse,
|
||||
APIFileServer,
|
||||
APIFileItem
|
||||
} from '@fastgpt/global/core/dataset/apiDataset';
|
||||
APIFileServer
|
||||
} from '@fastgpt/global/core/dataset/apiDataset/type';
|
||||
import axios, { type Method } from 'axios';
|
||||
import { addLog } from '../../../common/system/log';
|
||||
import { readFileRawTextByUrl } from '../read';
|
||||
import { addLog } from '../../../../common/system/log';
|
||||
import { readFileRawTextByUrl } from '../../read';
|
||||
import { type ParentIdType } from '@fastgpt/global/common/parentFolder/type';
|
||||
import { type RequireOnlyOne } from '@fastgpt/global/common/type/utils';
|
||||
|
||||
@@ -3,10 +3,10 @@ import type {
|
||||
ApiFileReadContentResponse,
|
||||
ApiDatasetDetailResponse,
|
||||
FeishuServer
|
||||
} from '@fastgpt/global/core/dataset/apiDataset';
|
||||
} from '@fastgpt/global/core/dataset/apiDataset/type';
|
||||
import { type ParentIdType } from '@fastgpt/global/common/parentFolder/type';
|
||||
import axios, { type Method } from 'axios';
|
||||
import { addLog } from '../../../common/system/log';
|
||||
import { addLog } from '../../../../common/system/log';
|
||||
|
||||
type ResponseDataType = {
|
||||
success: boolean;
|
||||
@@ -1,18 +1,10 @@
|
||||
import type {
|
||||
APIFileServer,
|
||||
YuqueServer,
|
||||
FeishuServer
|
||||
} from '@fastgpt/global/core/dataset/apiDataset';
|
||||
import { useApiDatasetRequest } from './api';
|
||||
import { useYuqueDatasetRequest } from '../yuqueDataset/api';
|
||||
import { useFeishuDatasetRequest } from '../feishuDataset/api';
|
||||
import { useApiDatasetRequest } from './custom/api';
|
||||
import { useYuqueDatasetRequest } from './yuqueDataset/api';
|
||||
import { useFeishuDatasetRequest } from './feishuDataset/api';
|
||||
import type { ApiDatasetServerType } from '@fastgpt/global/core/dataset/apiDataset/type';
|
||||
|
||||
export const getApiDatasetRequest = async (data: {
|
||||
apiServer?: APIFileServer;
|
||||
yuqueServer?: YuqueServer;
|
||||
feishuServer?: FeishuServer;
|
||||
}) => {
|
||||
const { apiServer, yuqueServer, feishuServer } = data;
|
||||
export const getApiDatasetRequest = async (apiDatasetServer?: ApiDatasetServerType) => {
|
||||
const { apiServer, yuqueServer, feishuServer } = apiDatasetServer || {};
|
||||
|
||||
if (apiServer) {
|
||||
return useApiDatasetRequest({ apiServer });
|
||||
|
||||
@@ -3,9 +3,9 @@ import type {
|
||||
ApiFileReadContentResponse,
|
||||
YuqueServer,
|
||||
ApiDatasetDetailResponse
|
||||
} from '@fastgpt/global/core/dataset/apiDataset';
|
||||
} from '@fastgpt/global/core/dataset/apiDataset/type';
|
||||
import axios, { type Method } from 'axios';
|
||||
import { addLog } from '../../../common/system/log';
|
||||
import { addLog } from '../../../../common/system/log';
|
||||
import { type ParentIdType } from '@fastgpt/global/common/parentFolder/type';
|
||||
|
||||
type ResponseDataType = {
|
||||
@@ -105,7 +105,6 @@ export const useYuqueDatasetRequest = ({ yuqueServer }: { yuqueServer: YuqueServ
|
||||
if (!parentId) {
|
||||
if (yuqueServer.basePath) parentId = yuqueServer.basePath;
|
||||
}
|
||||
|
||||
let files: APIFileItem[] = [];
|
||||
|
||||
if (!parentId) {
|
||||
@@ -5,9 +5,10 @@ import {
|
||||
} from '@fastgpt/global/core/dataset/constants';
|
||||
import type { CreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api.d';
|
||||
import { MongoDatasetCollection } from './schema';
|
||||
import {
|
||||
type DatasetCollectionSchemaType,
|
||||
type DatasetSchemaType
|
||||
import type {
|
||||
DatasetCollectionSchemaType,
|
||||
DatasetDataFieldType,
|
||||
DatasetSchemaType
|
||||
} from '@fastgpt/global/core/dataset/type';
|
||||
import { MongoDatasetTraining } from '../training/schema';
|
||||
import { MongoDatasetData } from '../data/schema';
|
||||
@@ -15,7 +16,7 @@ import { delImgByRelatedId } from '../../../common/file/image/controller';
|
||||
import { deleteDatasetDataVector } from '../../../common/vectorDB/controller';
|
||||
import { delFileByFileIdList } from '../../../common/file/gridfs/controller';
|
||||
import { BucketNameEnum } from '@fastgpt/global/common/file/constants';
|
||||
import { type ClientSession } from '../../../common/mongo';
|
||||
import type { ClientSession } from '../../../common/mongo';
|
||||
import { createOrGetCollectionTags } from './utils';
|
||||
import { rawText2Chunks } from '../read';
|
||||
import { checkDatasetLimit } from '../../../support/permission/teamLimit';
|
||||
@@ -38,20 +39,25 @@ import {
|
||||
getLLMMaxChunkSize
|
||||
} from '@fastgpt/global/core/dataset/training/utils';
|
||||
import { DatasetDataIndexTypeEnum } from '@fastgpt/global/core/dataset/data/constants';
|
||||
import { deleteDatasetImage } from '../image/controller';
|
||||
import { clearCollectionImages, removeDatasetImageExpiredTime } from '../image/utils';
|
||||
|
||||
export const createCollectionAndInsertData = async ({
|
||||
dataset,
|
||||
rawText,
|
||||
relatedId,
|
||||
imageIds,
|
||||
createCollectionParams,
|
||||
backupParse = false,
|
||||
billId,
|
||||
session
|
||||
}: {
|
||||
dataset: DatasetSchemaType;
|
||||
rawText: string;
|
||||
rawText?: string;
|
||||
relatedId?: string;
|
||||
imageIds?: string[];
|
||||
createCollectionParams: CreateOneCollectionParams;
|
||||
|
||||
backupParse?: boolean;
|
||||
|
||||
billId?: string;
|
||||
@@ -69,13 +75,13 @@ export const createCollectionAndInsertData = async ({
|
||||
// Set default params
|
||||
const trainingType =
|
||||
createCollectionParams.trainingType || DatasetCollectionDataProcessModeEnum.chunk;
|
||||
const chunkSize = computeChunkSize({
|
||||
...createCollectionParams,
|
||||
trainingType,
|
||||
llmModel: getLLMModel(dataset.agentModel)
|
||||
});
|
||||
const chunkSplitter = computeChunkSplitter(createCollectionParams);
|
||||
const paragraphChunkDeep = computeParagraphChunkDeep(createCollectionParams);
|
||||
const trainingMode = getTrainingModeByCollection({
|
||||
trainingType: trainingType,
|
||||
autoIndexes: createCollectionParams.autoIndexes,
|
||||
imageIndex: createCollectionParams.imageIndex
|
||||
});
|
||||
|
||||
if (
|
||||
trainingType === DatasetCollectionDataProcessModeEnum.qa ||
|
||||
@@ -90,35 +96,60 @@ export const createCollectionAndInsertData = async ({
|
||||
delete createCollectionParams.qaPrompt;
|
||||
}
|
||||
|
||||
// 1. split chunks
|
||||
const chunks = rawText2Chunks({
|
||||
rawText,
|
||||
chunkTriggerType: createCollectionParams.chunkTriggerType,
|
||||
chunkTriggerMinSize: createCollectionParams.chunkTriggerMinSize,
|
||||
chunkSize,
|
||||
paragraphChunkDeep,
|
||||
paragraphChunkMinSize: createCollectionParams.paragraphChunkMinSize,
|
||||
maxSize: getLLMMaxChunkSize(getLLMModel(dataset.agentModel)),
|
||||
overlapRatio: trainingType === DatasetCollectionDataProcessModeEnum.chunk ? 0.2 : 0,
|
||||
customReg: chunkSplitter ? [chunkSplitter] : [],
|
||||
backupParse
|
||||
});
|
||||
// 1. split chunks or create image chunks
|
||||
const {
|
||||
chunks,
|
||||
chunkSize
|
||||
}: {
|
||||
chunks: Array<{
|
||||
q?: string;
|
||||
a?: string; // answer or custom content
|
||||
imageId?: string;
|
||||
indexes?: string[];
|
||||
}>;
|
||||
chunkSize?: number;
|
||||
} = (() => {
|
||||
if (rawText) {
|
||||
const chunkSize = computeChunkSize({
|
||||
...createCollectionParams,
|
||||
trainingType,
|
||||
llmModel: getLLMModel(dataset.agentModel)
|
||||
});
|
||||
// Process text chunks
|
||||
const chunks = rawText2Chunks({
|
||||
rawText,
|
||||
chunkTriggerType: createCollectionParams.chunkTriggerType,
|
||||
chunkTriggerMinSize: createCollectionParams.chunkTriggerMinSize,
|
||||
chunkSize,
|
||||
paragraphChunkDeep,
|
||||
paragraphChunkMinSize: createCollectionParams.paragraphChunkMinSize,
|
||||
maxSize: getLLMMaxChunkSize(getLLMModel(dataset.agentModel)),
|
||||
overlapRatio: trainingType === DatasetCollectionDataProcessModeEnum.chunk ? 0.2 : 0,
|
||||
customReg: chunkSplitter ? [chunkSplitter] : [],
|
||||
backupParse
|
||||
});
|
||||
return { chunks, chunkSize };
|
||||
}
|
||||
|
||||
if (imageIds) {
|
||||
// Process image chunks
|
||||
const chunks = imageIds.map((imageId: string) => ({
|
||||
imageId,
|
||||
indexes: []
|
||||
}));
|
||||
return { chunks };
|
||||
}
|
||||
throw new Error('Either rawText or imageIdList must be provided');
|
||||
})();
|
||||
|
||||
// 2. auth limit
|
||||
await checkDatasetLimit({
|
||||
teamId,
|
||||
insertLen: predictDataLimitLength(
|
||||
getTrainingModeByCollection({
|
||||
trainingType: trainingType,
|
||||
autoIndexes: createCollectionParams.autoIndexes,
|
||||
imageIndex: createCollectionParams.imageIndex
|
||||
}),
|
||||
chunks
|
||||
)
|
||||
insertLen: predictDataLimitLength(trainingMode, chunks)
|
||||
});
|
||||
|
||||
const fn = async (session: ClientSession) => {
|
||||
// 3. create collection
|
||||
// 3. Create collection
|
||||
const { _id: collectionId } = await createOneCollection({
|
||||
...createCollectionParams,
|
||||
trainingType,
|
||||
@@ -126,8 +157,8 @@ export const createCollectionAndInsertData = async ({
|
||||
chunkSize,
|
||||
chunkSplitter,
|
||||
|
||||
hashRawText: hashStr(rawText),
|
||||
rawTextLength: rawText.length,
|
||||
hashRawText: rawText ? hashStr(rawText) : undefined,
|
||||
rawTextLength: rawText?.length,
|
||||
nextSyncTime: (() => {
|
||||
// ignore auto collections sync for website datasets
|
||||
if (!dataset.autoSync && dataset.type === DatasetTypeEnum.websiteDataset) return undefined;
|
||||
@@ -169,11 +200,7 @@ export const createCollectionAndInsertData = async ({
|
||||
vectorModel: dataset.vectorModel,
|
||||
vlmModel: dataset.vlmModel,
|
||||
indexSize: createCollectionParams.indexSize,
|
||||
mode: getTrainingModeByCollection({
|
||||
trainingType: trainingType,
|
||||
autoIndexes: createCollectionParams.autoIndexes,
|
||||
imageIndex: createCollectionParams.imageIndex
|
||||
}),
|
||||
mode: trainingMode,
|
||||
prompt: createCollectionParams.qaPrompt,
|
||||
billId: traingBillId,
|
||||
data: chunks.map((item, index) => ({
|
||||
@@ -187,7 +214,12 @@ export const createCollectionAndInsertData = async ({
|
||||
session
|
||||
});
|
||||
|
||||
// 6. remove related image ttl
|
||||
// 6. Remove images ttl index
|
||||
await removeDatasetImageExpiredTime({
|
||||
ids: imageIds,
|
||||
collectionId,
|
||||
session
|
||||
});
|
||||
if (relatedId) {
|
||||
await MongoImage.updateMany(
|
||||
{
|
||||
@@ -207,7 +239,7 @@ export const createCollectionAndInsertData = async ({
|
||||
}
|
||||
|
||||
return {
|
||||
collectionId,
|
||||
collectionId: String(collectionId),
|
||||
insertResults
|
||||
};
|
||||
};
|
||||
@@ -288,17 +320,20 @@ export const delCollectionRelatedSource = async ({
|
||||
.map((item) => item?.metadata?.relatedImgId || '')
|
||||
.filter(Boolean);
|
||||
|
||||
// Delete files
|
||||
await delFileByFileIdList({
|
||||
bucketName: BucketNameEnum.dataset,
|
||||
fileIdList
|
||||
});
|
||||
// Delete images
|
||||
await delImgByRelatedId({
|
||||
teamId,
|
||||
relateIds: relatedImageIds,
|
||||
session
|
||||
});
|
||||
// Delete files and images in parallel
|
||||
await Promise.all([
|
||||
// Delete files
|
||||
delFileByFileIdList({
|
||||
bucketName: BucketNameEnum.dataset,
|
||||
fileIdList
|
||||
}),
|
||||
// Delete images
|
||||
delImgByRelatedId({
|
||||
teamId,
|
||||
relateIds: relatedImageIds,
|
||||
session
|
||||
})
|
||||
]);
|
||||
};
|
||||
/**
|
||||
* delete collection and it related data
|
||||
@@ -343,16 +378,16 @@ export async function delCollection({
|
||||
datasetId: { $in: datasetIds },
|
||||
collectionId: { $in: collectionIds }
|
||||
}),
|
||||
// Delete dataset_images
|
||||
clearCollectionImages(collectionIds),
|
||||
// Delete images if needed
|
||||
...(delImg
|
||||
? [
|
||||
delImgByRelatedId({
|
||||
teamId,
|
||||
relateIds: collections
|
||||
.map((item) => item?.metadata?.relatedImgId || '')
|
||||
.filter(Boolean)
|
||||
})
|
||||
]
|
||||
? collections
|
||||
.map((item) => item?.metadata?.relatedImgId || '')
|
||||
.filter(Boolean)
|
||||
.map((imageId) => deleteDatasetImage(imageId))
|
||||
: []),
|
||||
// Delete files if needed
|
||||
...(delFile
|
||||
? [
|
||||
delFileByFileIdList({
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
import { MongoDatasetCollection } from './schema';
|
||||
import { type ClientSession } from '../../../common/mongo';
|
||||
import type { ClientSession } from '../../../common/mongo';
|
||||
import { MongoDatasetCollectionTags } from '../tag/schema';
|
||||
import { readFromSecondary } from '../../../common/mongo/utils';
|
||||
import {
|
||||
type CollectionWithDatasetType,
|
||||
type DatasetCollectionSchemaType
|
||||
} from '@fastgpt/global/core/dataset/type';
|
||||
import type { CollectionWithDatasetType } from '@fastgpt/global/core/dataset/type';
|
||||
import { DatasetCollectionSchemaType } from '@fastgpt/global/core/dataset/type';
|
||||
import {
|
||||
DatasetCollectionDataProcessModeEnum,
|
||||
DatasetCollectionSyncResultEnum,
|
||||
@@ -159,9 +157,7 @@ export const syncCollection = async (collection: CollectionWithDatasetType) => {
|
||||
return {
|
||||
type: DatasetSourceReadTypeEnum.apiFile,
|
||||
sourceId,
|
||||
apiServer: dataset.apiServer,
|
||||
feishuServer: dataset.feishuServer,
|
||||
yuqueServer: dataset.yuqueServer
|
||||
apiDatasetServer: dataset.apiDatasetServer
|
||||
};
|
||||
})();
|
||||
|
||||
@@ -233,18 +229,37 @@ export const syncCollection = async (collection: CollectionWithDatasetType) => {
|
||||
QA: 独立进程
|
||||
Chunk: Image Index -> Auto index -> chunk index
|
||||
*/
|
||||
export const getTrainingModeByCollection = (collection: {
|
||||
trainingType: DatasetCollectionSchemaType['trainingType'];
|
||||
autoIndexes?: DatasetCollectionSchemaType['autoIndexes'];
|
||||
imageIndex?: DatasetCollectionSchemaType['imageIndex'];
|
||||
export const getTrainingModeByCollection = ({
|
||||
trainingType,
|
||||
autoIndexes,
|
||||
imageIndex
|
||||
}: {
|
||||
trainingType: DatasetCollectionDataProcessModeEnum;
|
||||
autoIndexes?: boolean;
|
||||
imageIndex?: boolean;
|
||||
}) => {
|
||||
if (collection.trainingType === DatasetCollectionDataProcessModeEnum.qa) {
|
||||
if (
|
||||
trainingType === DatasetCollectionDataProcessModeEnum.imageParse &&
|
||||
global.feConfigs?.isPlus
|
||||
) {
|
||||
return TrainingModeEnum.imageParse;
|
||||
}
|
||||
|
||||
if (trainingType === DatasetCollectionDataProcessModeEnum.qa) {
|
||||
return TrainingModeEnum.qa;
|
||||
}
|
||||
if (collection.imageIndex && global.feConfigs?.isPlus) {
|
||||
if (
|
||||
trainingType === DatasetCollectionDataProcessModeEnum.chunk &&
|
||||
imageIndex &&
|
||||
global.feConfigs?.isPlus
|
||||
) {
|
||||
return TrainingModeEnum.image;
|
||||
}
|
||||
if (collection.autoIndexes && global.feConfigs?.isPlus) {
|
||||
if (
|
||||
trainingType === DatasetCollectionDataProcessModeEnum.chunk &&
|
||||
autoIndexes &&
|
||||
global.feConfigs?.isPlus
|
||||
) {
|
||||
return TrainingModeEnum.auto;
|
||||
}
|
||||
return TrainingModeEnum.chunk;
|
||||
|
||||
@@ -9,6 +9,7 @@ import { deleteDatasetDataVector } from '../../common/vectorDB/controller';
|
||||
import { MongoDatasetDataText } from './data/dataTextSchema';
|
||||
import { DatasetErrEnum } from '@fastgpt/global/common/error/code/dataset';
|
||||
import { retryFn } from '@fastgpt/global/common/system/utils';
|
||||
import { clearDatasetImages } from './image/utils';
|
||||
|
||||
/* ============= dataset ========== */
|
||||
/* find all datasetId by top datasetId */
|
||||
@@ -102,8 +103,10 @@ export async function delDatasetRelevantData({
|
||||
}),
|
||||
//delete dataset_datas
|
||||
MongoDatasetData.deleteMany({ teamId, datasetId: { $in: datasetIds } }),
|
||||
// Delete Image and file
|
||||
// Delete collection image and file
|
||||
delCollectionRelatedSource({ collections }),
|
||||
// Delete dataset Image
|
||||
clearDatasetImages(datasetIds),
|
||||
// Delete vector data
|
||||
deleteDatasetDataVector({ teamId, datasetIds })
|
||||
]);
|
||||
|
||||
56
packages/service/core/dataset/data/controller.ts
Normal file
@@ -0,0 +1,56 @@
|
||||
import { getDatasetImagePreviewUrl } from '../image/utils';
|
||||
import type { DatasetCiteItemType, DatasetDataSchemaType } from '@fastgpt/global/core/dataset/type';
|
||||
|
||||
export const formatDatasetDataValue = ({
|
||||
q,
|
||||
a,
|
||||
imageId,
|
||||
teamId,
|
||||
datasetId
|
||||
}: {
|
||||
q: string;
|
||||
a?: string;
|
||||
imageId?: string;
|
||||
teamId: string;
|
||||
datasetId: string;
|
||||
}): {
|
||||
q: string;
|
||||
a?: string;
|
||||
imagePreivewUrl?: string;
|
||||
} => {
|
||||
if (!imageId) {
|
||||
return {
|
||||
q,
|
||||
a
|
||||
};
|
||||
}
|
||||
|
||||
const previewUrl = getDatasetImagePreviewUrl({
|
||||
imageId,
|
||||
teamId,
|
||||
datasetId,
|
||||
expiredMinutes: 60 * 24 * 7 // 7 days
|
||||
});
|
||||
|
||||
return {
|
||||
q: ``,
|
||||
a,
|
||||
imagePreivewUrl: previewUrl
|
||||
};
|
||||
};
|
||||
|
||||
export const getFormatDatasetCiteList = (list: DatasetDataSchemaType[]) => {
|
||||
return list.map<DatasetCiteItemType>((item) => ({
|
||||
_id: item._id,
|
||||
...formatDatasetDataValue({
|
||||
teamId: item.teamId,
|
||||
datasetId: item.datasetId,
|
||||
q: item.q,
|
||||
a: item.a,
|
||||
imageId: item.imageId
|
||||
}),
|
||||
history: item.history,
|
||||
updateTime: item.updateTime,
|
||||
index: item.chunkIndex
|
||||
}));
|
||||
};
|
||||
@@ -37,8 +37,7 @@ const DatasetDataSchema = new Schema({
|
||||
required: true
|
||||
},
|
||||
a: {
|
||||
type: String,
|
||||
default: ''
|
||||
type: String
|
||||
},
|
||||
history: {
|
||||
type: [
|
||||
@@ -74,6 +73,9 @@ const DatasetDataSchema = new Schema({
|
||||
default: []
|
||||
},
|
||||
|
||||
imageId: {
|
||||
type: String
|
||||
},
|
||||
updateTime: {
|
||||
type: Date,
|
||||
default: () => new Date()
|
||||
|
||||
166
packages/service/core/dataset/image/controller.ts
Normal file
@@ -0,0 +1,166 @@
|
||||
import { addMinutes } from 'date-fns';
|
||||
import { bucketName, MongoDatasetImageSchema } from './schema';
|
||||
import { connectionMongo, Types } from '../../../common/mongo';
|
||||
import fs from 'fs';
|
||||
import type { FileType } from '../../../common/file/multer';
|
||||
import fsp from 'fs/promises';
|
||||
import { computeGridFsChunSize } from '../../../common/file/gridfs/utils';
|
||||
import { setCron } from '../../../common/system/cron';
|
||||
import { checkTimerLock } from '../../../common/system/timerLock/utils';
|
||||
import { TimerIdEnum } from '../../../common/system/timerLock/constants';
|
||||
import { addLog } from '../../../common/system/log';
|
||||
|
||||
const getGridBucket = () => {
|
||||
return new connectionMongo.mongo.GridFSBucket(connectionMongo.connection.db!, {
|
||||
bucketName: bucketName
|
||||
});
|
||||
};
|
||||
|
||||
export const createDatasetImage = async ({
|
||||
teamId,
|
||||
datasetId,
|
||||
file,
|
||||
expiredTime = addMinutes(new Date(), 30)
|
||||
}: {
|
||||
teamId: string;
|
||||
datasetId: string;
|
||||
file: FileType;
|
||||
expiredTime?: Date;
|
||||
}): Promise<{ imageId: string; previewUrl: string }> => {
|
||||
const path = file.path;
|
||||
const gridBucket = getGridBucket();
|
||||
const metadata = {
|
||||
teamId: String(teamId),
|
||||
datasetId: String(datasetId),
|
||||
expiredTime
|
||||
};
|
||||
|
||||
const stats = await fsp.stat(path);
|
||||
if (!stats.isFile()) return Promise.reject(`${path} is not a file`);
|
||||
|
||||
const readStream = fs.createReadStream(path, {
|
||||
highWaterMark: 256 * 1024
|
||||
});
|
||||
const chunkSizeBytes = computeGridFsChunSize(stats.size);
|
||||
|
||||
const stream = gridBucket.openUploadStream(file.originalname, {
|
||||
metadata,
|
||||
contentType: file.mimetype,
|
||||
chunkSizeBytes
|
||||
});
|
||||
|
||||
// save to gridfs
|
||||
await new Promise((resolve, reject) => {
|
||||
readStream
|
||||
.pipe(stream as any)
|
||||
.on('finish', resolve)
|
||||
.on('error', reject);
|
||||
});
|
||||
|
||||
return {
|
||||
imageId: String(stream.id),
|
||||
previewUrl: ''
|
||||
};
|
||||
};
|
||||
|
||||
export const getDatasetImageReadData = async (imageId: string) => {
|
||||
// Get file metadata to get contentType
|
||||
const fileInfo = await MongoDatasetImageSchema.findOne({
|
||||
_id: new Types.ObjectId(imageId)
|
||||
}).lean();
|
||||
if (!fileInfo) {
|
||||
return Promise.reject('Image not found');
|
||||
}
|
||||
|
||||
const gridBucket = getGridBucket();
|
||||
return {
|
||||
stream: gridBucket.openDownloadStream(new Types.ObjectId(imageId)),
|
||||
fileInfo
|
||||
};
|
||||
};
|
||||
export const getDatasetImageBase64 = async (imageId: string) => {
|
||||
// Get file metadata to get contentType
|
||||
const fileInfo = await MongoDatasetImageSchema.findOne({
|
||||
_id: new Types.ObjectId(imageId)
|
||||
}).lean();
|
||||
if (!fileInfo) {
|
||||
return Promise.reject('Image not found');
|
||||
}
|
||||
|
||||
// Get image stream from GridFS
|
||||
const { stream } = await getDatasetImageReadData(imageId);
|
||||
|
||||
// Convert stream to buffer
|
||||
const chunks: Buffer[] = [];
|
||||
|
||||
return new Promise<string>((resolve, reject) => {
|
||||
stream.on('data', (chunk: Buffer) => {
|
||||
chunks.push(chunk);
|
||||
});
|
||||
|
||||
stream.on('end', () => {
|
||||
// Combine all chunks into a single buffer
|
||||
const buffer = Buffer.concat(chunks);
|
||||
// Convert buffer to base64 string
|
||||
const base64 = buffer.toString('base64');
|
||||
const dataUrl = `data:${fileInfo.contentType || 'image/jpeg'};base64,${base64}`;
|
||||
resolve(dataUrl);
|
||||
});
|
||||
|
||||
stream.on('error', reject);
|
||||
});
|
||||
};
|
||||
|
||||
export const deleteDatasetImage = async (imageId: string) => {
|
||||
const gridBucket = getGridBucket();
|
||||
|
||||
try {
|
||||
await gridBucket.delete(new Types.ObjectId(imageId));
|
||||
} catch (error: any) {
|
||||
const msg = error?.message;
|
||||
if (msg.includes('File not found')) {
|
||||
addLog.warn('Delete dataset image error', error);
|
||||
return;
|
||||
} else {
|
||||
return Promise.reject(error);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export const clearExpiredDatasetImageCron = async () => {
|
||||
const gridBucket = getGridBucket();
|
||||
const clearExpiredDatasetImages = async () => {
|
||||
addLog.debug('Clear expired dataset image start');
|
||||
|
||||
const data = await MongoDatasetImageSchema.find(
|
||||
{
|
||||
'metadata.expiredTime': { $lt: new Date() }
|
||||
},
|
||||
'_id'
|
||||
).lean();
|
||||
|
||||
for (const item of data) {
|
||||
try {
|
||||
await gridBucket.delete(item._id);
|
||||
} catch (error) {
|
||||
addLog.error('Delete expired dataset image error', error);
|
||||
}
|
||||
}
|
||||
addLog.debug('Clear expired dataset image end');
|
||||
};
|
||||
|
||||
setCron('*/10 * * * *', async () => {
|
||||
if (
|
||||
await checkTimerLock({
|
||||
timerId: TimerIdEnum.clearExpiredDatasetImage,
|
||||
lockMinuted: 9
|
||||
})
|
||||
) {
|
||||
try {
|
||||
await clearExpiredDatasetImages();
|
||||
} catch (error) {
|
||||
addLog.error('clearExpiredDatasetImageCron error', error);
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
||||
36
packages/service/core/dataset/image/schema.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
import type { Types } from '../../../common/mongo';
|
||||
import { getMongoModel, Schema } from '../../../common/mongo';
|
||||
|
||||
export const bucketName = 'dataset_image';
|
||||
|
||||
const MongoDatasetImage = new Schema({
|
||||
length: { type: Number, required: true },
|
||||
chunkSize: { type: Number, required: true },
|
||||
uploadDate: { type: Date, required: true },
|
||||
filename: { type: String, required: true },
|
||||
contentType: { type: String, required: true },
|
||||
metadata: {
|
||||
teamId: { type: String, required: true },
|
||||
datasetId: { type: String, required: true },
|
||||
collectionId: { type: String },
|
||||
expiredTime: { type: Date, required: true }
|
||||
}
|
||||
});
|
||||
MongoDatasetImage.index({ 'metadata.datasetId': 'hashed' });
|
||||
MongoDatasetImage.index({ 'metadata.collectionId': 'hashed' });
|
||||
MongoDatasetImage.index({ 'metadata.expiredTime': -1 });
|
||||
|
||||
export const MongoDatasetImageSchema = getMongoModel<{
|
||||
_id: Types.ObjectId;
|
||||
length: number;
|
||||
chunkSize: number;
|
||||
uploadDate: Date;
|
||||
filename: string;
|
||||
contentType: string;
|
||||
metadata: {
|
||||
teamId: string;
|
||||
datasetId: string;
|
||||
collectionId: string;
|
||||
expiredTime: Date;
|
||||
};
|
||||
}>(`${bucketName}.files`, MongoDatasetImage);
|
||||
101
packages/service/core/dataset/image/utils.ts
Normal file
@@ -0,0 +1,101 @@
|
||||
import { ERROR_ENUM } from '@fastgpt/global/common/error/errorCode';
|
||||
import { Types, type ClientSession } from '../../../common/mongo';
|
||||
import { deleteDatasetImage } from './controller';
|
||||
import { MongoDatasetImageSchema } from './schema';
|
||||
import { addMinutes } from 'date-fns';
|
||||
import jwt from 'jsonwebtoken';
|
||||
|
||||
export const removeDatasetImageExpiredTime = async ({
|
||||
ids = [],
|
||||
collectionId,
|
||||
session
|
||||
}: {
|
||||
ids?: string[];
|
||||
collectionId: string;
|
||||
session?: ClientSession;
|
||||
}) => {
|
||||
if (ids.length === 0) return;
|
||||
return MongoDatasetImageSchema.updateMany(
|
||||
{
|
||||
_id: {
|
||||
$in: ids
|
||||
.filter((id) => Types.ObjectId.isValid(id))
|
||||
.map((id) => (typeof id === 'string' ? new Types.ObjectId(id) : id))
|
||||
}
|
||||
},
|
||||
{
|
||||
$unset: { 'metadata.expiredTime': '' },
|
||||
$set: {
|
||||
'metadata.collectionId': String(collectionId)
|
||||
}
|
||||
},
|
||||
{ session }
|
||||
);
|
||||
};
|
||||
|
||||
export const getDatasetImagePreviewUrl = ({
|
||||
imageId,
|
||||
teamId,
|
||||
datasetId,
|
||||
expiredMinutes
|
||||
}: {
|
||||
imageId: string;
|
||||
teamId: string;
|
||||
datasetId: string;
|
||||
expiredMinutes: number;
|
||||
}) => {
|
||||
const expiredTime = Math.floor(addMinutes(new Date(), expiredMinutes).getTime() / 1000);
|
||||
|
||||
const key = (process.env.FILE_TOKEN_KEY as string) ?? 'filetoken';
|
||||
const token = jwt.sign(
|
||||
{
|
||||
teamId: String(teamId),
|
||||
datasetId: String(datasetId),
|
||||
exp: expiredTime
|
||||
},
|
||||
key
|
||||
);
|
||||
|
||||
return `/api/core/dataset/image/${imageId}?token=${token}`;
|
||||
};
|
||||
export const authDatasetImagePreviewUrl = (token?: string) =>
|
||||
new Promise<{
|
||||
teamId: string;
|
||||
datasetId: string;
|
||||
}>((resolve, reject) => {
|
||||
if (!token) {
|
||||
return reject(ERROR_ENUM.unAuthFile);
|
||||
}
|
||||
const key = (process.env.FILE_TOKEN_KEY as string) ?? 'filetoken';
|
||||
|
||||
jwt.verify(token, key, (err, decoded: any) => {
|
||||
if (err || !decoded?.teamId || !decoded?.datasetId) {
|
||||
reject(ERROR_ENUM.unAuthFile);
|
||||
return;
|
||||
}
|
||||
resolve({
|
||||
teamId: decoded.teamId,
|
||||
datasetId: decoded.datasetId
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
export const clearDatasetImages = async (datasetIds: string[]) => {
|
||||
const images = await MongoDatasetImageSchema.find(
|
||||
{
|
||||
'metadata.datasetId': { $in: datasetIds.map((item) => String(item)) }
|
||||
},
|
||||
'_id'
|
||||
).lean();
|
||||
await Promise.all(images.map((image) => deleteDatasetImage(String(image._id))));
|
||||
};
|
||||
|
||||
export const clearCollectionImages = async (collectionIds: string[]) => {
|
||||
const images = await MongoDatasetImageSchema.find(
|
||||
{
|
||||
'metadata.collectionId': { $in: collectionIds.map((item) => String(item)) }
|
||||
},
|
||||
'_id'
|
||||
).lean();
|
||||
await Promise.all(images.map((image) => deleteDatasetImage(String(image._id))));
|
||||
};
|
||||
@@ -9,13 +9,9 @@ import { type TextSplitProps, splitText2Chunks } from '@fastgpt/global/common/st
|
||||
import axios from 'axios';
|
||||
import { readRawContentByFileBuffer } from '../../common/file/read/utils';
|
||||
import { parseFileExtensionFromUrl } from '@fastgpt/global/common/string/tools';
|
||||
import {
|
||||
type APIFileServer,
|
||||
type FeishuServer,
|
||||
type YuqueServer
|
||||
} from '@fastgpt/global/core/dataset/apiDataset';
|
||||
import { getApiDatasetRequest } from './apiDataset';
|
||||
import Papa from 'papaparse';
|
||||
import type { ApiDatasetServerType } from '@fastgpt/global/core/dataset/apiDataset/type';
|
||||
|
||||
export const readFileRawTextByUrl = async ({
|
||||
teamId,
|
||||
@@ -69,9 +65,7 @@ export const readDatasetSourceRawText = async ({
|
||||
sourceId,
|
||||
selector,
|
||||
externalFileId,
|
||||
apiServer,
|
||||
feishuServer,
|
||||
yuqueServer,
|
||||
apiDatasetServer,
|
||||
customPdfParse,
|
||||
getFormatText
|
||||
}: {
|
||||
@@ -84,9 +78,7 @@ export const readDatasetSourceRawText = async ({
|
||||
|
||||
selector?: string; // link selector
|
||||
externalFileId?: string; // external file dataset
|
||||
apiServer?: APIFileServer; // api dataset
|
||||
feishuServer?: FeishuServer; // feishu dataset
|
||||
yuqueServer?: YuqueServer; // yuque dataset
|
||||
apiDatasetServer?: ApiDatasetServerType; // api dataset
|
||||
}): Promise<{
|
||||
title?: string;
|
||||
rawText: string;
|
||||
@@ -128,9 +120,7 @@ export const readDatasetSourceRawText = async ({
|
||||
};
|
||||
} else if (type === DatasetSourceReadTypeEnum.apiFile) {
|
||||
const { title, rawText } = await readApiServerFileContent({
|
||||
apiServer,
|
||||
feishuServer,
|
||||
yuqueServer,
|
||||
apiDatasetServer,
|
||||
apiFileId: sourceId,
|
||||
teamId,
|
||||
tmbId
|
||||
@@ -147,17 +137,13 @@ export const readDatasetSourceRawText = async ({
|
||||
};
|
||||
|
||||
export const readApiServerFileContent = async ({
|
||||
apiServer,
|
||||
feishuServer,
|
||||
yuqueServer,
|
||||
apiDatasetServer,
|
||||
apiFileId,
|
||||
teamId,
|
||||
tmbId,
|
||||
customPdfParse
|
||||
}: {
|
||||
apiServer?: APIFileServer;
|
||||
feishuServer?: FeishuServer;
|
||||
yuqueServer?: YuqueServer;
|
||||
apiDatasetServer?: ApiDatasetServerType;
|
||||
apiFileId: string;
|
||||
teamId: string;
|
||||
tmbId: string;
|
||||
@@ -166,13 +152,7 @@ export const readApiServerFileContent = async ({
|
||||
title?: string;
|
||||
rawText: string;
|
||||
}> => {
|
||||
return (
|
||||
await getApiDatasetRequest({
|
||||
apiServer,
|
||||
yuqueServer,
|
||||
feishuServer
|
||||
})
|
||||
).getFileContent({
|
||||
return (await getApiDatasetRequest(apiDatasetServer)).getFileContent({
|
||||
teamId,
|
||||
tmbId,
|
||||
apiFileId,
|
||||
@@ -186,9 +166,11 @@ export const rawText2Chunks = ({
|
||||
chunkTriggerMinSize = 1000,
|
||||
backupParse,
|
||||
chunkSize = 512,
|
||||
imageIdList,
|
||||
...splitProps
|
||||
}: {
|
||||
rawText: string;
|
||||
imageIdList?: string[];
|
||||
|
||||
chunkTriggerType?: ChunkTriggerConfigTypeEnum;
|
||||
chunkTriggerMinSize?: number; // maxSize from agent model, not store
|
||||
@@ -199,6 +181,7 @@ export const rawText2Chunks = ({
|
||||
q: string;
|
||||
a: string;
|
||||
indexes?: string[];
|
||||
imageIdList?: string[];
|
||||
}[] => {
|
||||
const parseDatasetBackup2Chunks = (rawText: string) => {
|
||||
const csvArr = Papa.parse(rawText).data as string[][];
|
||||
@@ -209,7 +192,8 @@ export const rawText2Chunks = ({
|
||||
.map((item) => ({
|
||||
q: item[0] || '',
|
||||
a: item[1] || '',
|
||||
indexes: item.slice(2)
|
||||
indexes: item.slice(2),
|
||||
imageIdList
|
||||
}))
|
||||
.filter((item) => item.q || item.a);
|
||||
|
||||
@@ -231,7 +215,8 @@ export const rawText2Chunks = ({
|
||||
return [
|
||||
{
|
||||
q: rawText,
|
||||
a: ''
|
||||
a: '',
|
||||
imageIdList
|
||||
}
|
||||
];
|
||||
}
|
||||
@@ -240,7 +225,7 @@ export const rawText2Chunks = ({
|
||||
if (chunkTriggerType !== ChunkTriggerConfigTypeEnum.forceChunk) {
|
||||
const textLength = rawText.trim().length;
|
||||
if (textLength < chunkTriggerMinSize) {
|
||||
return [{ q: rawText, a: '' }];
|
||||
return [{ q: rawText, a: '', imageIdList }];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -253,6 +238,7 @@ export const rawText2Chunks = ({
|
||||
return chunks.map((item) => ({
|
||||
q: item,
|
||||
a: '',
|
||||
indexes: []
|
||||
indexes: [],
|
||||
imageIdList
|
||||
}));
|
||||
};
|
||||
|
||||
@@ -127,14 +127,16 @@ const DatasetSchema = new Schema({
|
||||
type: Boolean,
|
||||
default: true
|
||||
},
|
||||
apiServer: Object,
|
||||
feishuServer: Object,
|
||||
yuqueServer: Object,
|
||||
|
||||
apiDatasetServer: Object,
|
||||
|
||||
// abandoned
|
||||
autoSync: Boolean,
|
||||
externalReadUrl: String,
|
||||
defaultPermission: Number
|
||||
defaultPermission: Number,
|
||||
apiServer: Object,
|
||||
feishuServer: Object,
|
||||
yuqueServer: Object
|
||||
});
|
||||
|
||||
try {
|
||||
|
||||
@@ -28,6 +28,7 @@ import type { NodeInputKeyEnum } from '@fastgpt/global/core/workflow/constants';
|
||||
import { datasetSearchQueryExtension } from './utils';
|
||||
import type { RerankModelItemType } from '@fastgpt/global/core/ai/model.d';
|
||||
import { addLog } from '../../../common/system/log';
|
||||
import { formatDatasetDataValue } from '../data/controller';
|
||||
|
||||
export type SearchDatasetDataProps = {
|
||||
histories: ChatItemType[];
|
||||
@@ -175,6 +176,12 @@ export async function searchDatasetData(
|
||||
collectionFilterMatch
|
||||
} = props;
|
||||
|
||||
// Constants data
|
||||
const datasetDataSelectField =
|
||||
'_id datasetId collectionId updateTime q a imageId chunkIndex indexes';
|
||||
const datsaetCollectionSelectField =
|
||||
'_id name fileId rawLink apiFileId externalFileId externalFileUrl';
|
||||
|
||||
/* init params */
|
||||
searchMode = DatasetSearchModeMap[searchMode] ? searchMode : DatasetSearchModeEnum.embedding;
|
||||
usingReRank = usingReRank && !!getDefaultRerankModel();
|
||||
@@ -463,14 +470,14 @@ export async function searchDatasetData(
|
||||
collectionId: { $in: collectionIdList },
|
||||
'indexes.dataId': { $in: results.map((item) => item.id?.trim()) }
|
||||
},
|
||||
'_id datasetId collectionId updateTime q a chunkIndex indexes',
|
||||
datasetDataSelectField,
|
||||
{ ...readFromSecondary }
|
||||
).lean(),
|
||||
MongoDatasetCollection.find(
|
||||
{
|
||||
_id: { $in: collectionIdList }
|
||||
},
|
||||
'_id name fileId rawLink apiFileId externalFileId externalFileUrl',
|
||||
datsaetCollectionSelectField,
|
||||
{ ...readFromSecondary }
|
||||
).lean()
|
||||
]);
|
||||
@@ -494,8 +501,13 @@ export async function searchDatasetData(
|
||||
const result: SearchDataResponseItemType = {
|
||||
id: String(data._id),
|
||||
updateTime: data.updateTime,
|
||||
q: data.q,
|
||||
a: data.a,
|
||||
...formatDatasetDataValue({
|
||||
teamId,
|
||||
datasetId: data.datasetId,
|
||||
q: data.q,
|
||||
a: data.a,
|
||||
imageId: data.imageId
|
||||
}),
|
||||
chunkIndex: data.chunkIndex,
|
||||
datasetId: String(data.datasetId),
|
||||
collectionId: String(data.collectionId),
|
||||
@@ -597,14 +609,14 @@ export async function searchDatasetData(
|
||||
{
|
||||
_id: { $in: searchResults.map((item) => item.dataId) }
|
||||
},
|
||||
'_id datasetId collectionId updateTime q a chunkIndex indexes',
|
||||
datasetDataSelectField,
|
||||
{ ...readFromSecondary }
|
||||
).lean(),
|
||||
MongoDatasetCollection.find(
|
||||
{
|
||||
_id: { $in: searchResults.map((item) => item.collectionId) }
|
||||
},
|
||||
'_id name fileId rawLink apiFileId externalFileId externalFileUrl',
|
||||
datsaetCollectionSelectField,
|
||||
{ ...readFromSecondary }
|
||||
).lean()
|
||||
]);
|
||||
@@ -630,8 +642,13 @@ export async function searchDatasetData(
|
||||
datasetId: String(data.datasetId),
|
||||
collectionId: String(data.collectionId),
|
||||
updateTime: data.updateTime,
|
||||
q: data.q,
|
||||
a: data.a,
|
||||
...formatDatasetDataValue({
|
||||
teamId,
|
||||
datasetId: data.datasetId,
|
||||
q: data.q,
|
||||
a: data.a,
|
||||
imageId: data.imageId
|
||||
}),
|
||||
chunkIndex: data.chunkIndex,
|
||||
indexes: data.indexes,
|
||||
...getCollectionSourceData(collection),
|
||||
|
||||
@@ -12,10 +12,7 @@ import { getCollectionWithDataset } from '../controller';
|
||||
import { mongoSessionRun } from '../../../common/mongo/sessionRun';
|
||||
import { type PushDataToTrainingQueueProps } from '@fastgpt/global/core/dataset/training/type';
|
||||
import { i18nT } from '../../../../web/i18n/utils';
|
||||
import {
|
||||
getLLMDefaultChunkSize,
|
||||
getLLMMaxChunkSize
|
||||
} from '../../../../global/core/dataset/training/utils';
|
||||
import { getLLMMaxChunkSize } from '../../../../global/core/dataset/training/utils';
|
||||
|
||||
export const lockTrainingDataByTeamId = async (teamId: string): Promise<any> => {
|
||||
try {
|
||||
@@ -65,7 +62,7 @@ export async function pushDataListToTrainingQueue({
|
||||
const getImageChunkMode = (data: PushDatasetDataChunkProps, mode: TrainingModeEnum) => {
|
||||
if (mode !== TrainingModeEnum.image) return mode;
|
||||
// 检查内容中,是否包含  的图片格式
|
||||
const text = data.q + data.a || '';
|
||||
const text = (data.q || '') + (data.a || '');
|
||||
const regex = /!\[\]\((.*?)\)/g;
|
||||
const match = text.match(regex);
|
||||
if (match) {
|
||||
@@ -82,9 +79,6 @@ export async function pushDataListToTrainingQueue({
|
||||
if (!agentModelData) {
|
||||
return Promise.reject(i18nT('common:error_llm_not_config'));
|
||||
}
|
||||
if (mode === TrainingModeEnum.chunk || mode === TrainingModeEnum.auto) {
|
||||
prompt = undefined;
|
||||
}
|
||||
|
||||
const { model, maxToken, weight } = await (async () => {
|
||||
if (mode === TrainingModeEnum.chunk) {
|
||||
@@ -101,7 +95,7 @@ export async function pushDataListToTrainingQueue({
|
||||
weight: 0
|
||||
};
|
||||
}
|
||||
if (mode === TrainingModeEnum.image) {
|
||||
if (mode === TrainingModeEnum.image || mode === TrainingModeEnum.imageParse) {
|
||||
const vllmModelData = getVlmModel(vlmModel);
|
||||
if (!vllmModelData) {
|
||||
return Promise.reject(i18nT('common:error_vlm_not_config'));
|
||||
@@ -117,11 +111,9 @@ export async function pushDataListToTrainingQueue({
|
||||
})();
|
||||
|
||||
// filter repeat or equal content
|
||||
const set = new Set();
|
||||
const filterResult: Record<string, PushDatasetDataChunkProps[]> = {
|
||||
success: [],
|
||||
overToken: [],
|
||||
repeat: [],
|
||||
error: []
|
||||
};
|
||||
|
||||
@@ -140,7 +132,7 @@ export async function pushDataListToTrainingQueue({
|
||||
.filter(Boolean);
|
||||
|
||||
// filter repeat content
|
||||
if (!item.q) {
|
||||
if (!item.imageId && !item.q) {
|
||||
filterResult.error.push(item);
|
||||
return;
|
||||
}
|
||||
@@ -153,32 +145,26 @@ export async function pushDataListToTrainingQueue({
|
||||
return;
|
||||
}
|
||||
|
||||
if (set.has(text)) {
|
||||
filterResult.repeat.push(item);
|
||||
} else {
|
||||
filterResult.success.push(item);
|
||||
set.add(text);
|
||||
}
|
||||
filterResult.success.push(item);
|
||||
});
|
||||
|
||||
// insert data to db
|
||||
const insertLen = filterResult.success.length;
|
||||
const failedDocuments: PushDatasetDataChunkProps[] = [];
|
||||
|
||||
// 使用 insertMany 批量插入
|
||||
const batchSize = 200;
|
||||
const batchSize = 500;
|
||||
const insertData = async (startIndex: number, session: ClientSession) => {
|
||||
const list = filterResult.success.slice(startIndex, startIndex + batchSize);
|
||||
|
||||
if (list.length === 0) return;
|
||||
|
||||
try {
|
||||
await MongoDatasetTraining.insertMany(
|
||||
const result = await MongoDatasetTraining.insertMany(
|
||||
list.map((item) => ({
|
||||
teamId,
|
||||
tmbId,
|
||||
datasetId,
|
||||
collectionId,
|
||||
datasetId: datasetId,
|
||||
collectionId: collectionId,
|
||||
billId,
|
||||
mode: getImageChunkMode(item, mode),
|
||||
prompt,
|
||||
@@ -189,25 +175,25 @@ export async function pushDataListToTrainingQueue({
|
||||
indexSize,
|
||||
weight: weight ?? 0,
|
||||
indexes: item.indexes,
|
||||
retryCount: 5
|
||||
retryCount: 5,
|
||||
...(item.imageId ? { imageId: item.imageId } : {})
|
||||
})),
|
||||
{
|
||||
session,
|
||||
ordered: true
|
||||
ordered: false,
|
||||
rawResult: true,
|
||||
includeResultMetadata: false // 进一步减少返回数据
|
||||
}
|
||||
);
|
||||
|
||||
if (result.insertedCount !== list.length) {
|
||||
return Promise.reject(`Insert data error, ${JSON.stringify(result)}`);
|
||||
}
|
||||
} catch (error: any) {
|
||||
addLog.error(`Insert error`, error);
|
||||
// 如果有错误,将失败的文档添加到失败列表中
|
||||
error.writeErrors?.forEach((writeError: any) => {
|
||||
failedDocuments.push(data[writeError.index]);
|
||||
});
|
||||
console.log('failed', failedDocuments);
|
||||
return Promise.reject(error);
|
||||
}
|
||||
|
||||
// 对于失败的文档,尝试单独插入
|
||||
await MongoDatasetTraining.create(failedDocuments, { session });
|
||||
|
||||
return insertData(startIndex + batchSize, session);
|
||||
};
|
||||
|
||||
@@ -222,7 +208,6 @@ export async function pushDataListToTrainingQueue({
|
||||
delete filterResult.success;
|
||||
|
||||
return {
|
||||
insertLen,
|
||||
...filterResult
|
||||
insertLen
|
||||
};
|
||||
}
|
||||
|
||||
@@ -99,6 +99,9 @@ const TrainingDataSchema = new Schema({
|
||||
],
|
||||
default: []
|
||||
},
|
||||
imageId: {
|
||||
type: String
|
||||
},
|
||||
|
||||
errorMsg: String
|
||||
});
|
||||
|
||||
@@ -358,7 +358,7 @@ async function filterDatasetQuote({
|
||||
return replaceVariable(quoteTemplate, {
|
||||
id: item.id,
|
||||
q: item.q,
|
||||
a: item.a,
|
||||
a: item.a || '',
|
||||
updateTime: formatTime2YMDHM(item.updateTime),
|
||||
source: item.sourceName,
|
||||
sourceId: String(item.sourceId || ''),
|
||||
|
||||
@@ -2,6 +2,7 @@ import { OperationLogEventEnum } from '@fastgpt/global/support/operationLog/cons
|
||||
import { i18nT } from '../../../web/i18n/utils';
|
||||
|
||||
export const operationLogMap = {
|
||||
//Team
|
||||
[OperationLogEventEnum.LOGIN]: {
|
||||
content: i18nT('account_team:log_login'),
|
||||
typeLabel: i18nT('account_team:login'),
|
||||
@@ -66,6 +67,309 @@ export const operationLogMap = {
|
||||
content: i18nT('account_team:log_assign_permission'),
|
||||
typeLabel: i18nT('account_team:assign_permission'),
|
||||
params: {} as { name?: string; objectName: string; permission: string }
|
||||
},
|
||||
//APP
|
||||
[OperationLogEventEnum.CREATE_APP]: {
|
||||
content: i18nT('account_team:log_create_app'),
|
||||
typeLabel: i18nT('account_team:create_app'),
|
||||
params: {} as { name?: string; appName: string; appType: string }
|
||||
},
|
||||
[OperationLogEventEnum.UPDATE_APP_INFO]: {
|
||||
content: i18nT('account_team:log_update_app_info'),
|
||||
typeLabel: i18nT('account_team:update_app_info'),
|
||||
params: {} as {
|
||||
name?: string;
|
||||
appName: string;
|
||||
newItemNames: string[];
|
||||
newItemValues: string[];
|
||||
appType: string;
|
||||
}
|
||||
},
|
||||
[OperationLogEventEnum.MOVE_APP]: {
|
||||
content: i18nT('account_team:log_move_app'),
|
||||
typeLabel: i18nT('account_team:move_app'),
|
||||
params: {} as { name?: string; appName: string; targetFolderName: string; appType: string }
|
||||
},
|
||||
[OperationLogEventEnum.DELETE_APP]: {
|
||||
content: i18nT('account_team:log_delete_app'),
|
||||
typeLabel: i18nT('account_team:delete_app'),
|
||||
params: {} as { name?: string; appName: string; appType: string }
|
||||
},
|
||||
[OperationLogEventEnum.UPDATE_APP_COLLABORATOR]: {
|
||||
content: i18nT('account_team:log_update_app_collaborator'),
|
||||
typeLabel: i18nT('account_team:update_app_collaborator'),
|
||||
params: {} as {
|
||||
name?: string;
|
||||
appName: string;
|
||||
appType: string;
|
||||
tmbList: string[];
|
||||
groupList: string[];
|
||||
orgList: string[];
|
||||
permission: string;
|
||||
}
|
||||
},
|
||||
[OperationLogEventEnum.DELETE_APP_COLLABORATOR]: {
|
||||
content: i18nT('account_team:log_delete_app_collaborator'),
|
||||
typeLabel: i18nT('account_team:delete_app_collaborator'),
|
||||
params: {} as {
|
||||
name?: string;
|
||||
appName: string;
|
||||
appType: string;
|
||||
itemName: string;
|
||||
itemValueName: string;
|
||||
}
|
||||
},
|
||||
[OperationLogEventEnum.TRANSFER_APP_OWNERSHIP]: {
|
||||
content: i18nT('account_team:log_transfer_app_ownership'),
|
||||
typeLabel: i18nT('account_team:transfer_app_ownership'),
|
||||
params: {} as {
|
||||
name?: string;
|
||||
appName: string;
|
||||
appType: string;
|
||||
oldOwnerName: string;
|
||||
newOwnerName: string;
|
||||
}
|
||||
},
|
||||
[OperationLogEventEnum.CREATE_APP_COPY]: {
|
||||
content: i18nT('account_team:log_create_app_copy'),
|
||||
typeLabel: i18nT('account_team:create_app_copy'),
|
||||
params: {} as { name?: string; appName: string; appType: string }
|
||||
},
|
||||
[OperationLogEventEnum.CREATE_APP_FOLDER]: {
|
||||
content: i18nT('account_team:log_create_app_folder'),
|
||||
typeLabel: i18nT('account_team:create_app_folder'),
|
||||
params: {} as { name?: string; folderName: string }
|
||||
},
|
||||
[OperationLogEventEnum.UPDATE_PUBLISH_APP]: {
|
||||
content: i18nT('account_team:log_update_publish_app'),
|
||||
typeLabel: i18nT('account_team:update_publish_app'),
|
||||
params: {} as {
|
||||
name?: string;
|
||||
operationName: string;
|
||||
appName: string;
|
||||
appId: string;
|
||||
appType: string;
|
||||
}
|
||||
},
|
||||
[OperationLogEventEnum.CREATE_APP_PUBLISH_CHANNEL]: {
|
||||
content: i18nT('account_team:log_create_app_publish_channel'),
|
||||
typeLabel: i18nT('account_team:create_app_publish_channel'),
|
||||
params: {} as { name?: string; appName: string; channelName: string; appType: string }
|
||||
},
|
||||
[OperationLogEventEnum.UPDATE_APP_PUBLISH_CHANNEL]: {
|
||||
content: i18nT('account_team:log_update_app_publish_channel'),
|
||||
typeLabel: i18nT('account_team:update_app_publish_channel'),
|
||||
params: {} as { name?: string; appName: string; channelName: string; appType: string }
|
||||
},
|
||||
[OperationLogEventEnum.DELETE_APP_PUBLISH_CHANNEL]: {
|
||||
content: i18nT('account_team:log_delete_app_publish_channel'),
|
||||
typeLabel: i18nT('account_team:delete_app_publish_channel'),
|
||||
params: {} as { name?: string; appName: string; channelName: string; appType: string }
|
||||
},
|
||||
[OperationLogEventEnum.EXPORT_APP_CHAT_LOG]: {
|
||||
content: i18nT('account_team:log_export_app_chat_log'),
|
||||
typeLabel: i18nT('account_team:export_app_chat_log'),
|
||||
params: {} as { name?: string; appName: string; appType: string }
|
||||
},
|
||||
//Dataset
|
||||
[OperationLogEventEnum.CREATE_DATASET]: {
|
||||
content: i18nT('account_team:log_create_dataset'),
|
||||
typeLabel: i18nT('account_team:create_dataset'),
|
||||
params: {} as { name?: string; datasetName: string; datasetType: string }
|
||||
},
|
||||
[OperationLogEventEnum.UPDATE_DATASET]: {
|
||||
content: i18nT('account_team:log_update_dataset'),
|
||||
typeLabel: i18nT('account_team:update_dataset'),
|
||||
params: {} as { name?: string; datasetName: string; datasetType: string }
|
||||
},
|
||||
[OperationLogEventEnum.DELETE_DATASET]: {
|
||||
content: i18nT('account_team:log_delete_dataset'),
|
||||
typeLabel: i18nT('account_team:delete_dataset'),
|
||||
params: {} as { name?: string; datasetName: string; datasetType: string }
|
||||
},
|
||||
[OperationLogEventEnum.MOVE_DATASET]: {
|
||||
content: i18nT('account_team:log_move_dataset'),
|
||||
typeLabel: i18nT('account_team:move_dataset'),
|
||||
params: {} as {
|
||||
name?: string;
|
||||
datasetName: string;
|
||||
targetFolderName: string;
|
||||
datasetType: string;
|
||||
}
|
||||
},
|
||||
[OperationLogEventEnum.UPDATE_DATASET_COLLABORATOR]: {
|
||||
content: i18nT('account_team:log_update_dataset_collaborator'),
|
||||
typeLabel: i18nT('account_team:update_dataset_collaborator'),
|
||||
params: {} as {
|
||||
name?: string;
|
||||
datasetName: string;
|
||||
datasetType: string;
|
||||
tmbList: string[];
|
||||
groupList: string[];
|
||||
orgList: string[];
|
||||
permission: string;
|
||||
}
|
||||
},
|
||||
[OperationLogEventEnum.DELETE_DATASET_COLLABORATOR]: {
|
||||
content: i18nT('account_team:log_delete_dataset_collaborator'),
|
||||
typeLabel: i18nT('account_team:delete_dataset_collaborator'),
|
||||
params: {} as {
|
||||
name?: string;
|
||||
datasetName: string;
|
||||
datasetType: string;
|
||||
itemName: string;
|
||||
itemValueName: string;
|
||||
}
|
||||
},
|
||||
[OperationLogEventEnum.TRANSFER_DATASET_OWNERSHIP]: {
|
||||
content: i18nT('account_team:log_transfer_dataset_ownership'),
|
||||
typeLabel: i18nT('account_team:transfer_dataset_ownership'),
|
||||
params: {} as {
|
||||
name?: string;
|
||||
datasetName: string;
|
||||
datasetType: string;
|
||||
oldOwnerName: string;
|
||||
newOwnerName: string;
|
||||
}
|
||||
},
|
||||
[OperationLogEventEnum.EXPORT_DATASET]: {
|
||||
content: i18nT('account_team:log_export_dataset'),
|
||||
typeLabel: i18nT('account_team:export_dataset'),
|
||||
params: {} as { name?: string; datasetName: string; datasetType: string }
|
||||
},
|
||||
[OperationLogEventEnum.CREATE_DATASET_FOLDER]: {
|
||||
content: i18nT('account_team:log_create_dataset_folder'),
|
||||
typeLabel: i18nT('account_team:create_dataset_folder'),
|
||||
params: {} as { name?: string; folderName: string }
|
||||
},
|
||||
//Collection
|
||||
[OperationLogEventEnum.CREATE_COLLECTION]: {
|
||||
content: i18nT('account_team:log_create_collection'),
|
||||
typeLabel: i18nT('account_team:create_collection'),
|
||||
params: {} as {
|
||||
name?: string;
|
||||
collectionName: string;
|
||||
datasetName: string;
|
||||
datasetType: string;
|
||||
}
|
||||
},
|
||||
[OperationLogEventEnum.UPDATE_COLLECTION]: {
|
||||
content: i18nT('account_team:log_update_collection'),
|
||||
typeLabel: i18nT('account_team:update_collection'),
|
||||
params: {} as {
|
||||
name?: string;
|
||||
collectionName: string;
|
||||
datasetName: string;
|
||||
datasetType: string;
|
||||
}
|
||||
},
|
||||
[OperationLogEventEnum.DELETE_COLLECTION]: {
|
||||
content: i18nT('account_team:log_delete_collection'),
|
||||
typeLabel: i18nT('account_team:delete_collection'),
|
||||
params: {} as {
|
||||
name?: string;
|
||||
collectionName: string;
|
||||
datasetName: string;
|
||||
datasetType: string;
|
||||
}
|
||||
},
|
||||
[OperationLogEventEnum.RETRAIN_COLLECTION]: {
|
||||
content: i18nT('account_team:log_retrain_collection'),
|
||||
typeLabel: i18nT('account_team:retrain_collection'),
|
||||
params: {} as {
|
||||
name?: string;
|
||||
collectionName: string;
|
||||
datasetName: string;
|
||||
datasetType: string;
|
||||
}
|
||||
},
|
||||
//Data
|
||||
[OperationLogEventEnum.CREATE_DATA]: {
|
||||
content: i18nT('account_team:log_create_data'),
|
||||
typeLabel: i18nT('account_team:create_data'),
|
||||
params: {} as {
|
||||
name?: string;
|
||||
collectionName: string;
|
||||
datasetName: string;
|
||||
datasetType: string;
|
||||
}
|
||||
},
|
||||
[OperationLogEventEnum.UPDATE_DATA]: {
|
||||
content: i18nT('account_team:log_update_data'),
|
||||
typeLabel: i18nT('account_team:update_data'),
|
||||
params: {} as {
|
||||
name?: string;
|
||||
collectionName: string;
|
||||
datasetName: string;
|
||||
datasetType: string;
|
||||
}
|
||||
},
|
||||
[OperationLogEventEnum.DELETE_DATA]: {
|
||||
content: i18nT('account_team:log_delete_data'),
|
||||
typeLabel: i18nT('account_team:delete_data'),
|
||||
params: {} as {
|
||||
name?: string;
|
||||
collectionName: string;
|
||||
datasetName: string;
|
||||
datasetType: string;
|
||||
}
|
||||
},
|
||||
//SearchTest
|
||||
[OperationLogEventEnum.SEARCH_TEST]: {
|
||||
content: i18nT('account_team:log_search_test'),
|
||||
typeLabel: i18nT('account_team:search_test'),
|
||||
params: {} as { name?: string; datasetName: string; datasetType: string }
|
||||
},
|
||||
//Account
|
||||
[OperationLogEventEnum.CHANGE_PASSWORD]: {
|
||||
content: i18nT('account_team:log_change_password'),
|
||||
typeLabel: i18nT('account_team:change_password'),
|
||||
params: {} as { name?: string }
|
||||
},
|
||||
[OperationLogEventEnum.CHANGE_NOTIFICATION_SETTINGS]: {
|
||||
content: i18nT('account_team:log_change_notification_settings'),
|
||||
typeLabel: i18nT('account_team:change_notification_settings'),
|
||||
params: {} as { name?: string }
|
||||
},
|
||||
[OperationLogEventEnum.CHANGE_MEMBER_NAME_ACCOUNT]: {
|
||||
content: i18nT('account_team:log_change_member_name_self'),
|
||||
typeLabel: i18nT('account_team:change_member_name_self'),
|
||||
params: {} as { name?: string; oldName: string; newName: string }
|
||||
},
|
||||
[OperationLogEventEnum.PURCHASE_PLAN]: {
|
||||
content: i18nT('account_team:log_purchase_plan'),
|
||||
typeLabel: i18nT('account_team:purchase_plan'),
|
||||
params: {} as { name?: string }
|
||||
},
|
||||
[OperationLogEventEnum.EXPORT_BILL_RECORDS]: {
|
||||
content: i18nT('account_team:log_export_bill_records'),
|
||||
typeLabel: i18nT('account_team:export_bill_records'),
|
||||
params: {} as { name?: string }
|
||||
},
|
||||
[OperationLogEventEnum.CREATE_INVOICE]: {
|
||||
content: i18nT('account_team:log_create_invoice'),
|
||||
typeLabel: i18nT('account_team:create_invoice'),
|
||||
params: {} as { name?: string }
|
||||
},
|
||||
[OperationLogEventEnum.SET_INVOICE_HEADER]: {
|
||||
content: i18nT('account_team:log_set_invoice_header'),
|
||||
typeLabel: i18nT('account_team:set_invoice_header'),
|
||||
params: {} as { name?: string }
|
||||
},
|
||||
[OperationLogEventEnum.CREATE_API_KEY]: {
|
||||
content: i18nT('account_team:log_create_api_key'),
|
||||
typeLabel: i18nT('account_team:create_api_key'),
|
||||
params: {} as { name?: string; keyName: string }
|
||||
},
|
||||
[OperationLogEventEnum.UPDATE_API_KEY]: {
|
||||
content: i18nT('account_team:log_update_api_key'),
|
||||
typeLabel: i18nT('account_team:update_api_key'),
|
||||
params: {} as { name?: string; keyName: string }
|
||||
},
|
||||
[OperationLogEventEnum.DELETE_API_KEY]: {
|
||||
content: i18nT('account_team:log_delete_api_key'),
|
||||
typeLabel: i18nT('account_team:delete_api_key'),
|
||||
params: {} as { name?: string; keyName: string }
|
||||
}
|
||||
} as const;
|
||||
|
||||
|
||||
36
packages/service/support/operationLog/util.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
import { AppTypeEnum } from '@fastgpt/global/core/app/constants';
|
||||
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
|
||||
import { i18nT } from '../../../web/i18n/utils';
|
||||
|
||||
export function getI18nAppType(type: AppTypeEnum): string {
|
||||
if (type === AppTypeEnum.folder) return i18nT('account_team:type.Folder');
|
||||
if (type === AppTypeEnum.simple) return i18nT('account_team:type.Simple bot');
|
||||
if (type === AppTypeEnum.workflow) return i18nT('account_team:type.Workflow bot');
|
||||
if (type === AppTypeEnum.plugin) return i18nT('account_team:type.Plugin');
|
||||
if (type === AppTypeEnum.httpPlugin) return i18nT('account_team:type.Http plugin');
|
||||
if (type === AppTypeEnum.toolSet) return i18nT('account_team:type.Tool set');
|
||||
if (type === AppTypeEnum.tool) return i18nT('account_team:type.Tool');
|
||||
return i18nT('common:UnKnow');
|
||||
}
|
||||
|
||||
export function getI18nCollaboratorItemType(
|
||||
tmbId: string | undefined,
|
||||
groupId: string | undefined,
|
||||
orgId: string | undefined
|
||||
): string {
|
||||
if (tmbId) return i18nT('account_team:member');
|
||||
if (groupId) return i18nT('account_team:group');
|
||||
if (orgId) return i18nT('account_team:department');
|
||||
return i18nT('common:UnKnow');
|
||||
}
|
||||
|
||||
export function getI18nDatasetType(type: DatasetTypeEnum | string): string {
|
||||
if (type === DatasetTypeEnum.folder) return i18nT('account_team:dataset.folder_dataset');
|
||||
if (type === DatasetTypeEnum.dataset) return i18nT('account_team:dataset.common_dataset');
|
||||
if (type === DatasetTypeEnum.websiteDataset) return i18nT('account_team:dataset.website_dataset');
|
||||
if (type === DatasetTypeEnum.externalFile) return i18nT('account_team:dataset.external_file');
|
||||
if (type === DatasetTypeEnum.apiDataset) return i18nT('account_team:dataset.api_file');
|
||||
if (type === DatasetTypeEnum.feishu) return i18nT('account_team:dataset.feishu_dataset');
|
||||
if (type === DatasetTypeEnum.yuque) return i18nT('account_team:dataset.yuque_dataset');
|
||||
return i18nT('common:UnKnow');
|
||||
}
|
||||
@@ -16,6 +16,7 @@ import { type AuthModeType, type AuthResponseType } from '../type';
|
||||
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
|
||||
import { type ParentIdType } from '@fastgpt/global/common/parentFolder/type';
|
||||
import { DatasetDefaultPermissionVal } from '@fastgpt/global/support/permission/dataset/constant';
|
||||
import { getDatasetImagePreviewUrl } from '../../../core/dataset/image/utils';
|
||||
|
||||
export const authDatasetByTmbId = async ({
|
||||
tmbId,
|
||||
@@ -267,6 +268,15 @@ export async function authDatasetData({
|
||||
updateTime: datasetData.updateTime,
|
||||
q: datasetData.q,
|
||||
a: datasetData.a,
|
||||
imageId: datasetData.imageId,
|
||||
imagePreivewUrl: datasetData.imageId
|
||||
? getDatasetImagePreviewUrl({
|
||||
imageId: datasetData.imageId,
|
||||
teamId: datasetData.teamId,
|
||||
datasetId: datasetData.datasetId,
|
||||
expiredMinutes: 30
|
||||
})
|
||||
: undefined,
|
||||
chunkIndex: datasetData.chunkIndex,
|
||||
indexes: datasetData.indexes,
|
||||
datasetId: String(datasetData.datasetId),
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { getWorkerController, WorkerNameEnum } from './utils';
|
||||
|
||||
export const preLoadWorker = async () => {
|
||||
const max = Number(global.systemEnv?.tokenWorkers || 30);
|
||||
const max = Math.min(Number(global.systemEnv?.tokenWorkers || 30), 100);
|
||||
const workerController = getWorkerController({
|
||||
name: WorkerNameEnum.countGptMessagesTokens,
|
||||
maxReservedThreads: max
|
||||
|
||||
@@ -220,9 +220,11 @@ export const iconPaths = {
|
||||
import('./icons/core/dataset/feishuDatasetOutline.svg'),
|
||||
'core/dataset/fileCollection': () => import('./icons/core/dataset/fileCollection.svg'),
|
||||
'core/dataset/fullTextRecall': () => import('./icons/core/dataset/fullTextRecall.svg'),
|
||||
'core/dataset/imageFill': () => import('./icons/core/dataset/imageFill.svg'),
|
||||
'core/dataset/manualCollection': () => import('./icons/core/dataset/manualCollection.svg'),
|
||||
'core/dataset/mixedRecall': () => import('./icons/core/dataset/mixedRecall.svg'),
|
||||
'core/dataset/modeEmbedding': () => import('./icons/core/dataset/modeEmbedding.svg'),
|
||||
'core/dataset/otherDataset': () => import('./icons/core/dataset/otherDataset.svg'),
|
||||
'core/dataset/questionExtension': () => import('./icons/core/dataset/questionExtension.svg'),
|
||||
'core/dataset/rerank': () => import('./icons/core/dataset/rerank.svg'),
|
||||
'core/dataset/searchfilter': () => import('./icons/core/dataset/searchfilter.svg'),
|
||||
@@ -230,7 +232,6 @@ export const iconPaths = {
|
||||
'core/dataset/tableCollection': () => import('./icons/core/dataset/tableCollection.svg'),
|
||||
'core/dataset/tag': () => import('./icons/core/dataset/tag.svg'),
|
||||
'core/dataset/websiteDataset': () => import('./icons/core/dataset/websiteDataset.svg'),
|
||||
'core/dataset/otherDataset': () => import('./icons/core/dataset/otherDataset.svg'),
|
||||
'core/dataset/websiteDatasetColor': () => import('./icons/core/dataset/websiteDatasetColor.svg'),
|
||||
'core/dataset/websiteDatasetOutline': () =>
|
||||
import('./icons/core/dataset/websiteDatasetOutline.svg'),
|
||||
@@ -287,6 +288,7 @@ export const iconPaths = {
|
||||
'core/workflow/template/aiChat': () => import('./icons/core/workflow/template/aiChat.svg'),
|
||||
'core/workflow/template/baseChart': () => import('./icons/core/workflow/template/baseChart.svg'),
|
||||
'core/workflow/template/bing': () => import('./icons/core/workflow/template/bing.svg'),
|
||||
'core/workflow/template/bocha': () => import('./icons/core/workflow/template/bocha.svg'),
|
||||
'core/workflow/template/codeRun': () => import('./icons/core/workflow/template/codeRun.svg'),
|
||||
'core/workflow/template/customFeedback': () =>
|
||||
import('./icons/core/workflow/template/customFeedback.svg'),
|
||||
@@ -378,10 +380,12 @@ export const iconPaths = {
|
||||
fullScreen: () => import('./icons/fullScreen.svg'),
|
||||
help: () => import('./icons/help.svg'),
|
||||
history: () => import('./icons/history.svg'),
|
||||
image: () => import('./icons/image.svg'),
|
||||
infoRounded: () => import('./icons/infoRounded.svg'),
|
||||
kbTest: () => import('./icons/kbTest.svg'),
|
||||
key: () => import('./icons/key.svg'),
|
||||
keyPrimary: () => import('./icons/keyPrimary.svg'),
|
||||
loading: () => import('./icons/loading.svg'),
|
||||
menu: () => import('./icons/menu.svg'),
|
||||
minus: () => import('./icons/minus.svg'),
|
||||
'modal/AddClb': () => import('./icons/modal/AddClb.svg'),
|
||||
|
||||
@@ -0,0 +1,3 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 21 20" >
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M2.24348 4.15292C1.9165 4.79466 1.9165 5.63474 1.9165 7.31489V12.6852C1.9165 14.3654 1.9165 15.2054 2.24348 15.8472C2.5311 16.4117 2.99005 16.8706 3.55453 17.1582C4.19627 17.4852 5.03635 17.4852 6.7165 17.4852H13.7832C15.4633 17.4852 16.3034 17.4852 16.9451 17.1582C17.5096 16.8706 17.9686 16.4117 18.2562 15.8472C18.5832 15.2054 18.5832 14.3654 18.5832 12.6852V7.31489C18.5832 5.63473 18.5832 4.79466 18.2562 4.15292C17.9686 3.58843 17.5096 3.12949 16.9451 2.84187C16.3034 2.51489 15.4633 2.51489 13.7832 2.51489H6.7165C5.03635 2.51489 4.19627 2.51489 3.55453 2.84187C2.99005 3.12949 2.5311 3.58843 2.24348 4.15292ZM7.88951 6.75656C7.88951 7.67703 7.14331 8.42322 6.22284 8.42322C5.30236 8.42322 4.55617 7.67703 4.55617 6.75656C4.55617 5.83608 5.30236 5.08989 6.22284 5.08989C7.14331 5.08989 7.88951 5.83608 7.88951 6.75656ZM12.8631 8.65525C12.5376 8.32981 12.01 8.32981 11.6845 8.65525L5.92965 14.4101C5.40468 14.9351 5.77648 15.8327 6.5189 15.8327L15.5062 15.8327C16.4267 15.8327 17.1729 15.0865 17.1729 14.1661V13.3103C17.1729 13.0892 17.0851 12.8773 16.9288 12.721L12.8631 8.65525Z" fill="#3370FF"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.2 KiB |
@@ -0,0 +1,5 @@
|
||||
<svg width="113" height="97" viewBox="0 0 113 97" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M0 31.7259C1.80046 29.9255 3.82784 28.3872 5.96621 27.1988C8.10469 26.0103 10.3126 25.1947 12.4634 24.7992C14.6143 24.4037 16.6664 24.4361 18.5022 24.8938C20.2678 25.334 21.7994 26.1604 23.0183 27.3272L23.021 27.3245L47.189 51.4924L33.4778 65.2037L0 31.7259Z" fill="#C4DEFE"/>
|
||||
<path d="M9.15662 11.5625C11.3617 10.2893 13.7181 9.32825 16.0912 8.73374C18.4645 8.13923 20.8082 7.92284 22.9882 8.09751C25.1681 8.27217 27.1419 8.83457 28.7966 9.75182C30.3881 10.6341 31.6537 11.8287 32.529 13.2712L32.5316 13.2697L32.6082 13.4025C32.6162 13.4162 32.6251 13.4297 32.633 13.4435L49.886 43.3286L33.0941 53.0234L9.15662 11.5625Z" fill="#A6CBFF"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M31.1377 0C33.6839 4.40811e-05 36.2052 0.345872 38.5576 1.01758C40.9099 1.68929 43.0472 2.67394 44.8477 3.91504C46.6482 5.15627 48.0773 6.63021 49.0518 8.25195C49.9888 9.81168 50.4867 11.4792 50.5234 13.166H50.5273V21.4072C56.6623 17.6586 63.874 15.498 71.5898 15.498C93.9304 15.4984 112.042 33.6087 112.042 55.9492C112.042 78.29 93.9305 96.401 71.5898 96.4014C49.3907 96.4014 31.3704 78.5193 31.1426 56.374H31.1377V0ZM71.9473 35.0439C60.1187 35.0441 50.5295 44.6334 50.5293 56.4619C50.5293 63.5338 53.9569 69.8057 59.2412 73.7061C66.4989 79.0625 76.5515 75.3841 85.3955 77.1592C92.613 78.608 97.2369 82.6827 98.3652 83.7686C97.3562 82.731 93.791 78.7138 92.2715 72.3291C89.8011 61.9479 94.8744 49.6043 87.5771 41.8184C83.6695 37.6493 78.1122 35.0441 71.9473 35.0439Z" fill="#006EFF"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.6 KiB |
4
packages/web/components/common/Icon/icons/image.svg
Normal file
@@ -0,0 +1,4 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 17 16" >
|
||||
<path d="M5.50794 6.8195C6.06022 6.8195 6.50794 6.37178 6.50794 5.8195C6.50794 5.26721 6.06022 4.8195 5.50794 4.8195C4.95565 4.8195 4.50794 5.26721 4.50794 5.8195C4.50794 6.37178 4.95565 6.8195 5.50794 6.8195Z" />
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M1.55029 5.85187C1.55029 4.50775 1.55029 3.83568 1.81188 3.32229C2.04197 2.87071 2.40913 2.50355 2.86072 2.27346C3.3741 2.01187 4.04617 2.01187 5.39029 2.01187H11.0436C12.3878 2.01187 13.0598 2.01187 13.5732 2.27346C14.0248 2.50355 14.3919 2.87071 14.622 3.32229C14.8836 3.83568 14.8836 4.50775 14.8836 5.85187V10.1481C14.8836 11.4922 14.8836 12.1643 14.622 12.6777C14.3919 13.1293 14.0248 13.4964 13.5732 13.7265C13.0598 13.9881 12.3878 13.9881 11.0436 13.9881H5.39029C4.04617 13.9881 3.3741 13.9881 2.86072 13.7265C2.40913 13.4964 2.04197 13.1293 1.81188 12.6777C1.55029 12.1643 1.55029 11.4922 1.55029 10.1481V5.85187ZM5.39029 3.3452H11.0436C11.7377 3.3452 12.1781 3.34624 12.5114 3.37347C12.8291 3.39944 12.9305 3.44241 12.9679 3.46146C13.1686 3.56373 13.3318 3.72691 13.434 3.92761C13.4531 3.96502 13.4961 4.06638 13.522 4.38413C13.5493 4.71745 13.5503 5.15781 13.5503 5.85187V10.1481C13.5503 10.1562 13.5503 10.1641 13.5503 10.1721L10.3165 6.93829C10.0561 6.67794 9.634 6.67794 9.37365 6.93829L3.70938 12.6026C3.5547 12.5791 3.49333 12.5524 3.46604 12.5385C3.26533 12.4363 3.10215 12.2731 2.99989 12.0724C2.98083 12.035 2.93786 11.9336 2.9119 11.6159C2.88466 11.2825 2.88363 10.8422 2.88363 10.1481V5.85187C2.88363 5.15781 2.88466 4.71745 2.9119 4.38413C2.93786 4.06638 2.98083 3.96502 2.99989 3.92761C3.10215 3.72691 3.26533 3.56373 3.46604 3.46146C3.50344 3.44241 3.6048 3.39944 3.92255 3.37347C4.25587 3.34624 4.69623 3.3452 5.39029 3.3452ZM9.84506 8.3525L5.54277 12.6548H11.0436C11.7377 12.6548 12.1781 12.6538 12.5114 12.6265C12.8291 12.6006 12.9305 12.5576 12.9679 12.5385C13.1686 12.4363 13.3318 12.2731 13.434 12.0724C13.4422 12.0563 13.4549 12.0283 13.4687 11.9762L9.84506 8.3525Z" />
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 2.0 KiB |
4
packages/web/components/common/Icon/icons/loading.svg
Normal file
@@ -0,0 +1,4 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="48" height="48" viewBox="0 0 48 48" >
|
||||
<path d="M47.3337 24C47.3337 36.8866 36.887 47.3333 24.0003 47.3333C11.1137 47.3333 0.666992 36.8866 0.666992 24C0.666992 11.1133 11.1137 0.666626 24.0003 0.666626C36.887 0.666626 47.3337 11.1133 47.3337 24ZM5.33366 24C5.33366 34.3093 13.691 42.6666 24.0003 42.6666C34.3096 42.6666 42.667 34.3093 42.667 24C42.667 13.6906 34.3096 5.33329 24.0003 5.33329C13.691 5.33329 5.33366 13.6906 5.33366 24Z" />
|
||||
<path d="M24.0003 2.99996C24.0003 1.71129 25.0476 0.654541 26.3298 0.783194C29.1026 1.06141 31.8097 1.83481 34.3204 3.07293C37.5303 4.6559 40.3331 6.95608 42.5119 9.79553C44.6907 12.635 46.1871 15.9376 46.8853 19.4479C47.4314 22.1934 47.4778 25.0084 47.0289 27.7588C46.8213 29.0306 45.5295 29.7687 44.2848 29.4352C43.04 29.1016 42.3169 27.8222 42.4926 26.5456C42.7752 24.4926 42.7147 22.4014 42.3083 20.3583C41.7497 17.5501 40.5526 14.908 38.8096 12.6364C37.0666 10.3649 34.8243 8.52471 32.2564 7.25833C30.3881 6.33698 28.3838 5.73731 26.3276 5.47894C25.049 5.31827 24.0003 4.28862 24.0003 2.99996Z" />
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.1 KiB |
331
packages/web/components/common/MyMenu/Multiple.tsx
Normal file
@@ -0,0 +1,331 @@
|
||||
import React, { useMemo, useRef, useState } from 'react';
|
||||
import {
|
||||
Box,
|
||||
Flex,
|
||||
type MenuItemProps,
|
||||
type PlacementWithLogical,
|
||||
type AvatarProps,
|
||||
type BoxProps,
|
||||
type DividerProps
|
||||
} from '@chakra-ui/react';
|
||||
import MyDivider from '../MyDivider';
|
||||
import type { IconNameType } from '../Icon/type';
|
||||
import { useSystem } from '../../../hooks/useSystem';
|
||||
import Avatar from '../Avatar';
|
||||
import MyPopover from '../MyPopover';
|
||||
|
||||
export type MenuItemType = 'primary' | 'danger' | 'gray' | 'grayBg';
|
||||
|
||||
export type MenuSizeType = 'sm' | 'md' | 'xs' | 'mini';
|
||||
|
||||
export type MenuItemData = {
|
||||
label?: string;
|
||||
children: Array<{
|
||||
isActive?: boolean;
|
||||
type?: MenuItemType;
|
||||
icon?: IconNameType | string;
|
||||
label: string | React.ReactNode;
|
||||
description?: string;
|
||||
onClick?: () => any;
|
||||
menuItemStyles?: MenuItemProps;
|
||||
menuList?: MenuItemData[];
|
||||
}>;
|
||||
};
|
||||
|
||||
export type Props = {
|
||||
label?: string;
|
||||
width?: number | string;
|
||||
offset?: [number, number];
|
||||
Trigger: React.ReactNode;
|
||||
trigger?: 'hover' | 'click';
|
||||
size?: MenuSizeType;
|
||||
placement?: PlacementWithLogical;
|
||||
hasArrow?: boolean;
|
||||
onClose?: () => void;
|
||||
menuList: MenuItemData[];
|
||||
};
|
||||
|
||||
const typeMapStyle: Record<MenuItemType, { styles: MenuItemProps; iconColor?: string }> = {
|
||||
primary: {
|
||||
styles: {
|
||||
_hover: {
|
||||
backgroundColor: 'primary.50',
|
||||
color: 'primary.600'
|
||||
},
|
||||
_focus: {
|
||||
backgroundColor: 'primary.50',
|
||||
color: 'primary.600'
|
||||
},
|
||||
_active: {
|
||||
backgroundColor: 'primary.50',
|
||||
color: 'primary.600'
|
||||
}
|
||||
},
|
||||
iconColor: 'myGray.600'
|
||||
},
|
||||
gray: {
|
||||
styles: {
|
||||
_hover: {
|
||||
backgroundColor: 'myGray.05',
|
||||
color: 'primary.600'
|
||||
},
|
||||
_focus: {
|
||||
backgroundColor: 'myGray.05',
|
||||
color: 'primary.600'
|
||||
},
|
||||
_active: {
|
||||
backgroundColor: 'myGray.05',
|
||||
color: 'primary.600'
|
||||
}
|
||||
},
|
||||
iconColor: 'myGray.400'
|
||||
},
|
||||
grayBg: {
|
||||
styles: {
|
||||
_hover: {
|
||||
backgroundColor: 'myGray.05',
|
||||
color: 'primary.600'
|
||||
},
|
||||
_focus: {
|
||||
backgroundColor: 'myGray.05',
|
||||
color: 'primary.600'
|
||||
},
|
||||
_active: {
|
||||
backgroundColor: 'myGray.05',
|
||||
color: 'primary.600'
|
||||
}
|
||||
},
|
||||
iconColor: 'myGray.600'
|
||||
},
|
||||
danger: {
|
||||
styles: {
|
||||
color: 'red.600',
|
||||
_hover: {
|
||||
background: 'red.1'
|
||||
},
|
||||
_focus: {
|
||||
background: 'red.1'
|
||||
},
|
||||
_active: {
|
||||
background: 'red.1'
|
||||
}
|
||||
},
|
||||
iconColor: 'red.600'
|
||||
}
|
||||
};
|
||||
const sizeMapStyle: Record<
|
||||
MenuSizeType,
|
||||
{
|
||||
iconStyle: AvatarProps;
|
||||
labelStyle: BoxProps;
|
||||
dividerStyle: DividerProps;
|
||||
menuItemStyle: MenuItemProps;
|
||||
}
|
||||
> = {
|
||||
mini: {
|
||||
iconStyle: {
|
||||
w: '14px'
|
||||
},
|
||||
labelStyle: {
|
||||
fontSize: 'mini'
|
||||
},
|
||||
dividerStyle: {
|
||||
my: 0.5
|
||||
},
|
||||
menuItemStyle: {
|
||||
py: 1.5,
|
||||
px: 2
|
||||
}
|
||||
},
|
||||
xs: {
|
||||
iconStyle: {
|
||||
w: '14px'
|
||||
},
|
||||
labelStyle: {
|
||||
fontSize: 'sm'
|
||||
},
|
||||
dividerStyle: {
|
||||
my: 0.5
|
||||
},
|
||||
menuItemStyle: {
|
||||
py: 1.5,
|
||||
px: 2
|
||||
}
|
||||
},
|
||||
sm: {
|
||||
iconStyle: {
|
||||
w: '1rem'
|
||||
},
|
||||
labelStyle: {
|
||||
fontSize: 'sm'
|
||||
},
|
||||
dividerStyle: {
|
||||
my: 1
|
||||
},
|
||||
menuItemStyle: {
|
||||
py: 2,
|
||||
px: 3,
|
||||
_notLast: {
|
||||
mb: 0.5
|
||||
}
|
||||
}
|
||||
},
|
||||
md: {
|
||||
iconStyle: {
|
||||
w: '2rem',
|
||||
borderRadius: '6px'
|
||||
},
|
||||
labelStyle: {
|
||||
fontSize: 'sm'
|
||||
},
|
||||
dividerStyle: {
|
||||
my: 1
|
||||
},
|
||||
menuItemStyle: {
|
||||
py: 2,
|
||||
px: 3,
|
||||
_notLast: {
|
||||
mb: 0.5
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const MenuItem = ({
|
||||
item,
|
||||
size,
|
||||
onClose
|
||||
}: {
|
||||
item: MenuItemData['children'][number];
|
||||
size: MenuSizeType;
|
||||
onClose: () => void;
|
||||
}) => {
|
||||
return (
|
||||
<Box
|
||||
px={3}
|
||||
py={2}
|
||||
cursor="pointer"
|
||||
borderRadius="md"
|
||||
_hover={{
|
||||
bg: 'primary.50',
|
||||
color: 'primary.600'
|
||||
}}
|
||||
onClick={(e) => {
|
||||
if (item.onClick) {
|
||||
item.onClick();
|
||||
}
|
||||
if (!item.menuList) {
|
||||
onClose();
|
||||
}
|
||||
}}
|
||||
>
|
||||
<Flex alignItems="center" w="100%">
|
||||
{!!item.icon && (
|
||||
<Avatar
|
||||
src={item.icon as any}
|
||||
mr={2}
|
||||
{...sizeMapStyle[size].iconStyle}
|
||||
color={item.isActive ? 'inherit' : typeMapStyle[item.type || 'primary'].iconColor}
|
||||
/>
|
||||
)}
|
||||
<Box flex="1">
|
||||
<Box
|
||||
color={item.description ? 'myGray.900' : 'inherit'}
|
||||
{...sizeMapStyle[size].labelStyle}
|
||||
>
|
||||
{item.label}
|
||||
</Box>
|
||||
{item.description && (
|
||||
<Box color={'myGray.500'} fontSize={'mini'}>
|
||||
{item.description}
|
||||
</Box>
|
||||
)}
|
||||
</Box>
|
||||
</Flex>
|
||||
</Box>
|
||||
);
|
||||
};
|
||||
|
||||
const MultipleMenu = (props: Props) => {
|
||||
const {
|
||||
width = 'auto',
|
||||
trigger = 'hover',
|
||||
size = 'sm',
|
||||
offset,
|
||||
Trigger,
|
||||
menuList,
|
||||
hasArrow = false,
|
||||
placement = 'bottom-start'
|
||||
} = props;
|
||||
|
||||
const { isPc } = useSystem();
|
||||
const formatTrigger = !isPc ? 'click' : trigger;
|
||||
|
||||
return (
|
||||
<MyPopover
|
||||
placement={placement}
|
||||
offset={offset}
|
||||
hasArrow={hasArrow}
|
||||
trigger={formatTrigger}
|
||||
w={width}
|
||||
zIndex={999}
|
||||
closeOnBlur={false}
|
||||
autoFocus={false}
|
||||
Trigger={Trigger}
|
||||
>
|
||||
{({ onClose }) => {
|
||||
const onCloseFn = () => {
|
||||
onClose();
|
||||
props?.onClose?.();
|
||||
};
|
||||
|
||||
return (
|
||||
<Box
|
||||
bg="white"
|
||||
maxW="300px"
|
||||
p="6px"
|
||||
border={'1px solid #fff'}
|
||||
boxShadow={'3'}
|
||||
borderRadius={'md'}
|
||||
>
|
||||
{menuList.map((group, i) => (
|
||||
<Box key={i}>
|
||||
{i !== 0 && <MyDivider h={'1.5px'} {...sizeMapStyle[size].dividerStyle} />}
|
||||
{group.label && (
|
||||
<Box fontSize="sm" px={3} py={1} color="myGray.500">
|
||||
{group.label}
|
||||
</Box>
|
||||
)}
|
||||
{group.children.map((item, index) => {
|
||||
return (
|
||||
<Box key={index}>
|
||||
{item.menuList ? (
|
||||
<MultipleMenu
|
||||
{...props}
|
||||
placement={'left'}
|
||||
trigger={'hover'}
|
||||
menuList={item.menuList}
|
||||
onClose={onCloseFn}
|
||||
Trigger={
|
||||
<Box>
|
||||
<MenuItem item={item} size={size} onClose={onCloseFn} />
|
||||
</Box>
|
||||
}
|
||||
hasArrow
|
||||
/>
|
||||
) : (
|
||||
<MenuItem item={item} size={size} onClose={onCloseFn} />
|
||||
)}
|
||||
</Box>
|
||||
);
|
||||
})}
|
||||
</Box>
|
||||
))}
|
||||
</Box>
|
||||
);
|
||||
}}
|
||||
</MyPopover>
|
||||
);
|
||||
};
|
||||
|
||||
export default React.memo(MultipleMenu);
|
||||
@@ -1,4 +1,4 @@
|
||||
import React, { useMemo, useRef, useState } from 'react';
|
||||
import React, { useCallback, useMemo, useRef, useState } from 'react';
|
||||
import {
|
||||
Menu,
|
||||
MenuList,
|
||||
@@ -18,9 +18,20 @@ import { useSystem } from '../../../hooks/useSystem';
|
||||
import Avatar from '../Avatar';
|
||||
|
||||
export type MenuItemType = 'primary' | 'danger' | 'gray' | 'grayBg';
|
||||
|
||||
export type MenuSizeType = 'sm' | 'md' | 'xs' | 'mini';
|
||||
|
||||
export type MenuItemData = {
|
||||
label?: string;
|
||||
children: Array<{
|
||||
isActive?: boolean;
|
||||
type?: MenuItemType;
|
||||
icon?: IconNameType | string;
|
||||
label: string | React.ReactNode;
|
||||
description?: string;
|
||||
onClick?: () => any;
|
||||
menuItemStyles?: MenuItemProps;
|
||||
}>;
|
||||
};
|
||||
export type Props = {
|
||||
width?: number | string;
|
||||
offset?: [number, number];
|
||||
@@ -29,18 +40,7 @@ export type Props = {
|
||||
size?: MenuSizeType;
|
||||
|
||||
placement?: PlacementWithLogical;
|
||||
menuList: {
|
||||
label?: string;
|
||||
children: {
|
||||
isActive?: boolean;
|
||||
type?: MenuItemType;
|
||||
icon?: IconNameType | string;
|
||||
label: string | React.ReactNode;
|
||||
description?: string;
|
||||
onClick?: () => any;
|
||||
menuItemStyles?: MenuItemProps;
|
||||
}[];
|
||||
}[];
|
||||
menuList: MenuItemData[];
|
||||
};
|
||||
|
||||
const typeMapStyle: Record<MenuItemType, { styles: MenuItemProps; iconColor?: string }> = {
|
||||
|
||||
@@ -43,11 +43,11 @@ const MyPopover = ({
|
||||
initialFocusRef={firstFieldRef}
|
||||
onOpen={() => {
|
||||
onOpen();
|
||||
onOpenFunc && onOpenFunc();
|
||||
onOpenFunc?.();
|
||||
}}
|
||||
onClose={() => {
|
||||
onClose();
|
||||
onCloseFunc && onCloseFunc();
|
||||
onCloseFunc?.();
|
||||
}}
|
||||
placement={placement}
|
||||
offset={offset}
|
||||
|
||||
@@ -6,8 +6,12 @@
|
||||
"accept": "accept",
|
||||
"action": "operate",
|
||||
"assign_permission": "Permission change",
|
||||
"audit_log": "audit",
|
||||
"change_department_name": "Department Editor",
|
||||
"change_member_name": "Member name change",
|
||||
"change_member_name_self": "Change member name",
|
||||
"change_notification_settings": "Change the way to receive notifications",
|
||||
"change_password": "change password",
|
||||
"confirm_delete_from_org": "Confirm to move {{username}} out of the department?",
|
||||
"confirm_delete_from_team": "Confirm to move {{username}} out of the team?",
|
||||
"confirm_delete_group": "Confirm to delete group?",
|
||||
@@ -15,22 +19,50 @@
|
||||
"confirm_forbidden": "Confirm forbidden",
|
||||
"confirm_leave_team": "Confirmed to leave the team? \nAfter exiting, all your resources in the team are transferred to the team owner.",
|
||||
"copy_link": "Copy link",
|
||||
"create_api_key": "Create API key",
|
||||
"create_app": "Create an application",
|
||||
"create_app_copy": "Create a copy of the application",
|
||||
"create_app_folder": "Create an application folder",
|
||||
"create_app_publish_channel": "Create a sharing channel",
|
||||
"create_data": "Insert data",
|
||||
"create_dataset": "Create a knowledge base",
|
||||
"create_dataset_folder": "Create a Knowledge Base Folder",
|
||||
"create_department": "Create a sub-department",
|
||||
"create_group": "Create group",
|
||||
"create_invitation_link": "Create Invitation Link",
|
||||
"create_invoice": "Issuing invoices",
|
||||
"create_org": "Create organization",
|
||||
"create_sub_org": "Create sub-organization",
|
||||
"dataset.api_file": "API Import",
|
||||
"dataset.common_dataset": "Dataset",
|
||||
"dataset.external_file": "External File",
|
||||
"dataset.feishu_dataset": "Feishu Spreadsheet",
|
||||
"dataset.folder_dataset": "Folder",
|
||||
"dataset.website_dataset": "Website Sync",
|
||||
"dataset.yuque_dataset": "Yuque Knowledge Base",
|
||||
"delete": "delete",
|
||||
"delete_api_key": "Delete the API key",
|
||||
"delete_app": "Delete the workbench application",
|
||||
"delete_app_collaborator": "App permissions delete",
|
||||
"delete_app_publish_channel": "Delete the publishing channel",
|
||||
"delete_collection": "Delete a collection",
|
||||
"delete_data": "Delete data",
|
||||
"delete_dataset": "Delete the knowledge base",
|
||||
"delete_dataset_collaborator": "Knowledge Base Permission Deletion",
|
||||
"delete_department": "Delete sub-department",
|
||||
"delete_from_org": "Move out of department",
|
||||
"delete_from_team": "Move out of the team",
|
||||
"delete_group": "Delete a group",
|
||||
"delete_org": "Delete organization",
|
||||
"department": "department",
|
||||
"edit_info": "Edit information",
|
||||
"edit_member": "Edit user",
|
||||
"edit_member_tip": "Name",
|
||||
"edit_org_info": "Edit organization information",
|
||||
"expires": "Expiration time",
|
||||
"export_app_chat_log": "Export the app chat history",
|
||||
"export_bill_records": "Export billing history",
|
||||
"export_dataset": "Export knowledge base",
|
||||
"export_members": "Export members",
|
||||
"forbid_hint": "After forbidden, this invitation link will become invalid. This action is irreversible. Are you sure you want to deactivate?",
|
||||
"forbid_success": "Forbid success",
|
||||
@@ -56,28 +88,69 @@
|
||||
"log_assign_permission": "[{{name}}] Updated the permissions of [{{objectName}}]: [Application creation: [{{appCreate}}], Knowledge Base: [{{datasetCreate}}], API Key: [{{apiKeyCreate}}], Management: [{{manage}}]]",
|
||||
"log_change_department": "【{{name}}】Updated department【{{departmentName}}】",
|
||||
"log_change_member_name": "【{{name}}】Rename member [{{memberName}}] to 【{{newName}}】",
|
||||
"log_change_member_name_self": "【{{name}}】Change your member name to 【{{newName}}】",
|
||||
"log_change_notification_settings": "【{{name}}】A change notification receiving method operation was carried out",
|
||||
"log_change_password": "【{{name}}】The password change operation was performed",
|
||||
"log_create_api_key": "【{{name}}】Create an API key named [{{keyName}}]",
|
||||
"log_create_app": "【{{name}}】Created [{{appType}}] named [{{appName}}]",
|
||||
"log_create_app_copy": "【{{name}}] Created a copy of [{{appType}}] named [{{appName}}]",
|
||||
"log_create_app_folder": "【{{name}}】Create a folder named [{{folderName}}]",
|
||||
"log_create_app_publish_channel": "[{{name}}] Created a channel named [{{channelName}}] for [{{appType}}] called [{{appName}}].",
|
||||
"log_create_collection": "[{{name}}] Create a collection named [{{collectionName}}] in [{{datasetType}}] called [{{datasetName}}].",
|
||||
"log_create_data": "[{{name}}] Insert data into a collection named [{{datasetName}}] in [{{datasetType}}] called [{{datasetName}}] into a collection named [{{collectionName}}]",
|
||||
"log_create_dataset": "【{{name}}】Created 【{{datasetType}}】 named 【{{datasetName}}】",
|
||||
"log_create_dataset_folder": "【{{name}}】Created a folder named {{folderName}}】",
|
||||
"log_create_department": "【{{name}}】Department【{{departmentName}}】",
|
||||
"log_create_group": "【{{name}}】Created group [{{groupName}}]",
|
||||
"log_create_invitation_link": "【{{name}}】Created invitation link【{{link}}】",
|
||||
"log_create_invoice": "【{{name}}】Invoice operation was carried out",
|
||||
"log_delete_api_key": "【{{name}}】Deleted the API key named [{{keyName}}]",
|
||||
"log_delete_app": "【{{name}}】Delete the [{{appType}}] named [{{appName}}]",
|
||||
"log_delete_app_collaborator": "【{{name}}】Delete the [itemName] permission named [itemValueName] in [{{appType}}] named [{{appName}}] delete the [itemName] permission named [{{appName}}] named [{{appName}}] named [{{appName}}] deleted the [{{itemName}}] permission named [{{itemValueName}}] named [{{appType}}] named [{{appName}}].",
|
||||
"log_delete_app_publish_channel": "[{{name}}] [{{appType}}] named [{{appName}}] deleted the channel named [{{channelName}}]",
|
||||
"log_delete_collection": "[{{name}}] Deleted a collection named [{{collectionName}}] in [{{datasetType}}] named [{{datasetName}}].",
|
||||
"log_delete_data": "[{{name}}] Delete data in a collection named [{{datasetName}}] in a collection named [{{datasetName}}]",
|
||||
"log_delete_dataset": "【{{name}}】Deleted 【{{datasetType}}】 named [{{datasetName}}]",
|
||||
"log_delete_dataset_collaborator": "【{{name}}】Updated the collaborators of 【{{appType}}】 named 【{{appName}}】 to: Organization: 【{{orgList}}】, Group: 【{{groupList}}】, Member 【{{tmbList}}】; updated the permissions to: Read permission: 【{{readPermission}}】, Write permission: 【{{writePermission}}】, Administrator permission: 【{{managePermission}}】",
|
||||
"log_delete_department": "{{name}} deleted department {{departmentName}}",
|
||||
"log_delete_group": "{{name}} deleted group {{groupName}}",
|
||||
"log_details": "Details",
|
||||
"log_export_app_chat_log": "【{{name}}】Export a chat history called [{{appName}}] called [{{appType}}]",
|
||||
"log_export_bill_records": "【{{name}}】Export the billing record",
|
||||
"log_export_dataset": "[{{name}}] Export [{{datasetType}}] called [{{datasetName}}]",
|
||||
"log_join_team": "【{{name}}】Join the team through the invitation link 【{{link}}】",
|
||||
"log_kick_out_team": "{{name}} removed member {{memberName}}",
|
||||
"log_login": "【{{name}}】Logined in the system",
|
||||
"log_move_app": "【{{name}}】Move [{{appType}}] named [{{appName}}] to [{{targetFolderName}}]",
|
||||
"log_move_dataset": "【{{name}}】Move [{{datasetType}}] named [{{datasetName}}] to [{{targetFolderName}}]",
|
||||
"log_recover_team_member": "【{{name}}】Restored member【{{memberName}}】",
|
||||
"log_relocate_department": "【{{name}}】Displayed department【{{departmentName}}】",
|
||||
"log_retrain_collection": "[{{name}}] Retrained the collection named [{{collectionName}}] in [{{datasetType}}] called [{{datasetName}}].",
|
||||
"log_search_test": "【{{name}}】Perform a search test operation on [{{datasetType}}] named [{{datasetName}}]",
|
||||
"log_set_invoice_header": "【{{name}}】The invoice header operation was set up",
|
||||
"log_time": "Operation time",
|
||||
"log_transfer_app_ownership": "【{{name}}] Transfer ownership of [{{appType}}] named [{{appName}}] from [{oldOwnerName}}] to [{{newOwnerName}}]",
|
||||
"log_transfer_dataset_ownership": "[{{name}}] Transfer ownership of [{{datasetType}}] named [{{datasetName}}] from [{oldOwnerName}}] to [{{newOwnerName}}]",
|
||||
"log_type": "Operation Type",
|
||||
"log_update_api_key": "【{{name}}】Updated the API key named [{{keyName}}]",
|
||||
"log_update_app_collaborator": "[{{name}}] Updated the collaborator named [{{appName}}] to: Organization: [{{orgList}}], Group: [{{groupList}}], Member [{{tmbList}}]; permissions updated to: Read permission: [{{readPermission}}], Write permission: [{{writePermission}}], Administrator permission: [{{managePermission}}]",
|
||||
"log_update_app_info": "[{{name}}] updated [{{appType}}] named [{{appName}}]: [{{newItemNames}}] to [{{newItemValues}}]",
|
||||
"log_update_app_publish_channel": "[{{name}}] Updated a channel named [{{channelName}}] for [{{appType}}] called [{{appName}}].",
|
||||
"log_update_collection": "[{{name}}] Updated a collection named [{{collectionName}}] in [{{datasetType}}] called [{{datasetName}}].",
|
||||
"log_update_data": "【{{name}}】Update data in a collection named 【{{datasetName}}】[{{datasetType}}] with [{{datasetType}}] with [{{collectionName}}]",
|
||||
"log_update_dataset": "【{{name}}】Updated [{{datasetType}}] named [{{datasetName}}]",
|
||||
"log_update_dataset_collaborator": "[{{name}}] Updated the collaborator named [{{datasetName}}] to: Organization: [{{orgList}}], Group: [{{groupList}}], Member [{{tmbList}}]; permissions updated to: [{{readPermission}}], [{{writePermission}}], [{{managePermission}}]",
|
||||
"log_update_publish_app": "【{{name}}】【{{operationName}}】【{{appType}}】 named [{{appName}}】",
|
||||
"log_user": "Operator",
|
||||
"login": "Log in",
|
||||
"manage_member": "Managing members",
|
||||
"member": "member",
|
||||
"member_group": "Belonging to member group",
|
||||
"move_app": "App location movement",
|
||||
"move_dataset": "Mobile Knowledge Base",
|
||||
"move_member": "Move member",
|
||||
"move_org": "Move organization",
|
||||
"notification_recieve": "Team notification reception",
|
||||
"operation_log": "log",
|
||||
"org": "organization",
|
||||
"org_description": "Organization description",
|
||||
"org_name": "Organization name",
|
||||
@@ -92,6 +165,7 @@
|
||||
"permission_manage": "Admin",
|
||||
"permission_manage_tip": "Can manage members, create groups, manage all groups, and assign permissions to groups and members",
|
||||
"please_bind_contact": "Please bind the contact information",
|
||||
"purchase_plan": "Upgrade package",
|
||||
"recover_team_member": "Member Recovery",
|
||||
"relocate_department": "Department Mobile",
|
||||
"remark": "remark",
|
||||
@@ -99,17 +173,40 @@
|
||||
"restore_tip": "Confirm to join the team {{username}}? \nOnly the availability and related permissions of this member account are restored, and the resources under the account cannot be restored.",
|
||||
"restore_tip_title": "Recovery confirmation",
|
||||
"retain_admin_permissions": "Keep administrator rights",
|
||||
"retrain_collection": "Retrain the set",
|
||||
"save_and_publish": "save and publish",
|
||||
"search_log": "Search log",
|
||||
"search_member": "Search for members",
|
||||
"search_member_group_name": "Search member/group name",
|
||||
"search_org": "Search Department",
|
||||
"search_test": "Search Test",
|
||||
"set_invoice_header": "Set up invoice header",
|
||||
"set_name_avatar": "Team avatar",
|
||||
"sync_immediately": "Synchronize now",
|
||||
"sync_member_failed": "Synchronization of members failed",
|
||||
"sync_member_success": "Synchronize members successfully",
|
||||
"total_team_members": "{{amount}} members in total",
|
||||
"transfer_ownership": "transfer owner",
|
||||
"total_team_members": "Total {{amount}} members",
|
||||
"transfer_app_ownership": "Transfer app ownership",
|
||||
"transfer_dataset_ownership": "Transfer dataset ownership",
|
||||
"transfer_ownership": "Transfer ownership",
|
||||
"type.Folder": "Folder",
|
||||
"type.Http plugin": "HTTP Plugin",
|
||||
"type.Plugin": "Plugin",
|
||||
"type.Simple bot": "Simple App",
|
||||
"type.Tool": "Tool",
|
||||
"type.Tool set": "Toolset",
|
||||
"type.Workflow bot": "Workflow",
|
||||
"unlimited": "Unlimited",
|
||||
"update": "update",
|
||||
"update_api_key": "Update API key",
|
||||
"update_app_collaborator": "Apply permission changes",
|
||||
"update_app_info": "Application information modification",
|
||||
"update_app_publish_channel": "Update the release channel",
|
||||
"update_collection": "Update the collection",
|
||||
"update_data": "Update data",
|
||||
"update_dataset": "Update the knowledge base",
|
||||
"update_dataset_collaborator": "Knowledge Base Permission Changes",
|
||||
"update_publish_app": "Application update",
|
||||
"used_times_limit": "Limit",
|
||||
"user_name": "username",
|
||||
"user_team_invite_member": "Invite members",
|
||||
|
||||
@@ -197,6 +197,9 @@
|
||||
"type.MCP tools": "MCP Toolset",
|
||||
"type.MCP_tools_url": "MCP Address",
|
||||
"type.Plugin": "Plugin",
|
||||
"type.Folder": "Folder",
|
||||
"type.Tool set": "Toolset",
|
||||
"type.Tool": "Tool",
|
||||
"type.Simple bot": "Simple App",
|
||||
"type.Workflow bot": "Workflow",
|
||||
"type.error.Workflow data is empty": "No workflow data was obtained",
|
||||
@@ -238,4 +241,4 @@
|
||||
"workflow.user_file_input_desc": "Links to documents and images uploaded by users.",
|
||||
"workflow.user_select": "User Select",
|
||||
"workflow.user_select_tip": "This module can configure multiple options for selection during the dialogue. Different options can lead to different workflow branches."
|
||||
}
|
||||
}
|
||||
@@ -71,13 +71,13 @@
|
||||
"response_embedding_model_tokens": "Vector Model Tokens",
|
||||
"response_hybrid_weight": "Embedding : Full text = {{emb}} : {{text}}",
|
||||
"response_rerank_tokens": "Rearrange Model Tokens",
|
||||
"search_results": "Search results",
|
||||
"select": "Select",
|
||||
"select_file": "Upload File",
|
||||
"select_file_img": "Upload file / image",
|
||||
"select_img": "Upload Image",
|
||||
"source_cronJob": "Scheduled execution",
|
||||
"stream_output": "Stream Output",
|
||||
"to_dataset": "Go to the Knowledge Base",
|
||||
"unsupported_file_type": "Unsupported file types",
|
||||
"upload": "Upload",
|
||||
"variable_invisable_in_share": "Custom variables are not visible in login-free links",
|
||||
|
||||
@@ -180,7 +180,7 @@
|
||||
"code_error.user_error.balance_not_enough": "Insufficient Account Balance",
|
||||
"code_error.user_error.bin_visitor_guest": "You Are Currently a Guest, Unauthorized to Operate",
|
||||
"code_error.user_error.un_auth_user": "User Not Found",
|
||||
"comfirm_import": "comfirm_import",
|
||||
"comfirm_import": "Confirm import",
|
||||
"comfirm_leave_page": "Confirm to Leave This Page?",
|
||||
"comfirn_create": "Confirm Creation",
|
||||
"commercial_function_tip": "Please Upgrade to the Commercial Version to Use This Feature: https://doc.fastgpt.cn/docs/commercial/intro/",
|
||||
@@ -215,6 +215,7 @@
|
||||
"core.app.Interval timer run": "Scheduled Execution",
|
||||
"core.app.Interval timer tip": "Can Execute App on Schedule",
|
||||
"core.app.Make a brief introduction of your app": "Give Your AI App an Introduction",
|
||||
"core.app.name": "name",
|
||||
"core.app.Name and avatar": "Avatar & Name",
|
||||
"core.app.Publish": "Publish",
|
||||
"core.app.Publish Confirm": "Confirm to Publish App? This Will Immediately Update the App Status on All Publishing Channels.",
|
||||
@@ -402,7 +403,6 @@
|
||||
"core.chat.response.module model": "Model",
|
||||
"core.chat.response.module name": "Model Name",
|
||||
"core.chat.response.module query": "Question/Search Term",
|
||||
"core.chat.response.module quoteList": "Quote Content",
|
||||
"core.chat.response.module similarity": "Similarity",
|
||||
"core.chat.response.module temperature": "Temperature",
|
||||
"core.chat.response.module time": "Run Time",
|
||||
@@ -433,7 +433,6 @@
|
||||
"core.dataset.Text collection": "Text Dataset",
|
||||
"core.dataset.apiFile": "API File",
|
||||
"core.dataset.collection.Click top config website": "Click to Configure Website",
|
||||
"core.dataset.collection.Collection name": "Dataset Name",
|
||||
"core.dataset.collection.Collection raw text": "Dataset Content",
|
||||
"core.dataset.collection.Empty Tip": "The Dataset is Empty",
|
||||
"core.dataset.collection.QA Prompt": "QA Split Prompt",
|
||||
@@ -450,7 +449,6 @@
|
||||
"core.dataset.collection.metadata.metadata": "Metadata",
|
||||
"core.dataset.collection.metadata.read source": "View Original Content",
|
||||
"core.dataset.collection.metadata.source": "Data Source",
|
||||
"core.dataset.collection.metadata.source name": "Source Name",
|
||||
"core.dataset.collection.metadata.source size": "Source Size",
|
||||
"core.dataset.collection.status.active": "Ready",
|
||||
"core.dataset.collection.status.error": "Error",
|
||||
@@ -742,7 +740,7 @@
|
||||
"core.workflow.value": "Value",
|
||||
"core.workflow.variable": "Variable",
|
||||
"create": "Create",
|
||||
"create_failed": "Creation Failed",
|
||||
"create_failed": "Create failed",
|
||||
"create_success": "Created Successfully",
|
||||
"create_time": "Creation Time",
|
||||
"cron_job_run_app": "Scheduled Task",
|
||||
@@ -787,7 +785,6 @@
|
||||
"dataset.dataset_name": "Dataset Name",
|
||||
"dataset.deleteFolderTips": "Confirm to Delete This Folder and All Its Contained Datasets? Data Cannot Be Recovered After Deletion, Please Confirm!",
|
||||
"dataset.test.noResult": "No Search Results",
|
||||
"dataset_data_import_q_placeholder": "Up to {{maxToken}} words.",
|
||||
"dataset_data_input_a": "Answer",
|
||||
"dataset_data_input_chunk": "Chunk",
|
||||
"dataset_data_input_chunk_content": "Chunk",
|
||||
@@ -801,7 +798,6 @@
|
||||
"delete_success": "Deleted Successfully",
|
||||
"delete_warning": "Deletion Warning",
|
||||
"embedding_model_not_config": "No index model is detected",
|
||||
"error.Create failed": "Create failed",
|
||||
"error.code_error": "Verification code error",
|
||||
"error.fileNotFound": "File not found~",
|
||||
"error.inheritPermissionError": "Inherit permission Error",
|
||||
@@ -1207,6 +1203,7 @@
|
||||
"templateTags.Writing": "Writing",
|
||||
"template_market": "Template Market",
|
||||
"textarea_variable_picker_tip": "Enter \"/\" to select a variable",
|
||||
"to_dataset": "To dataset",
|
||||
"ui.textarea.Magnifying": "Magnifying",
|
||||
"un_used": "Unused",
|
||||
"unauth_token": "The certificate has expired, please log in again",
|
||||
@@ -1305,4 +1302,4 @@
|
||||
"zoomin_tip_mac": "Zoom Out ⌘ -",
|
||||
"zoomout_tip": "Zoom In ctrl +",
|
||||
"zoomout_tip_mac": "Zoom In ⌘ +"
|
||||
}
|
||||
}
|
||||
@@ -28,16 +28,21 @@
|
||||
"collection.training_type": "Chunk type",
|
||||
"collection_data_count": "Data amount",
|
||||
"collection_metadata_custom_pdf_parse": "PDF enhancement analysis",
|
||||
"collection_name": "Collection name",
|
||||
"collection_not_support_retraining": "This collection type does not support retuning parameters",
|
||||
"collection_not_support_sync": "This collection does not support synchronization",
|
||||
"collection_sync": "Sync data",
|
||||
"collection_sync_confirm_tip": "Confirm to start synchronizing data? \nThe system will pull the latest data for comparison. If the contents are different, a new collection will be created and the old collection will be deleted. Please confirm!",
|
||||
"collection_tags": "Collection Tags",
|
||||
"common.dataset.data.Input Error Tip": "[Image Dataset] Process error:",
|
||||
"common.error.unKnow": "Unknown error",
|
||||
"common_dataset": "General Dataset",
|
||||
"common_dataset_desc": "Building a knowledge base by importing files, web page links, or manual entry",
|
||||
"condition": "condition",
|
||||
"config_sync_schedule": "Configure scheduled synchronization",
|
||||
"confirm_import_images": "Total {{num}} | Confirm create",
|
||||
"confirm_to_rebuild_embedding_tip": "Are you sure you want to switch the index for the Dataset?\nSwitching the index is a significant operation that requires re-indexing all data in your Dataset, which may take a long time. Please ensure your account has sufficient remaining points.\n\nAdditionally, you need to update the applications that use this Dataset to avoid conflicts with other indexed model Datasets.",
|
||||
"core.dataset.Image collection": "Image dataset",
|
||||
"core.dataset.import.Adjust parameters": "Adjust parameters",
|
||||
"custom_data_process_params": "Custom",
|
||||
"custom_data_process_params_desc": "Customize data processing rules",
|
||||
@@ -90,6 +95,7 @@
|
||||
"image_auto_parse": "Automatic image indexing",
|
||||
"image_auto_parse_tips": "Call VLM to automatically label the pictures in the document and generate additional search indexes",
|
||||
"image_training_queue": "Queue of image processing",
|
||||
"images_creating": "Creating",
|
||||
"immediate_sync": "Immediate Synchronization",
|
||||
"import.Auto mode Estimated Price Tips": "The text understanding model needs to be called, which requires more points: {{price}} points/1K tokens",
|
||||
"import.Embedding Estimated Price Tips": "Only use the index model and consume a small amount of AI points: {{price}} points/1K tokens",
|
||||
@@ -104,6 +110,8 @@
|
||||
"index_size": "Index size",
|
||||
"index_size_tips": "When vectorized, the system will automatically further segment the blocks according to this size.",
|
||||
"input_required_field_to_select_baseurl": "Please enter the required information first",
|
||||
"insert_images": "Added pictures",
|
||||
"insert_images_success": "The new picture is successfully added, and you need to wait for the training to be completed before it will be displayed.",
|
||||
"is_open_schedule": "Enable scheduled synchronization",
|
||||
"keep_image": "Keep the picture",
|
||||
"loading": "Loading...",
|
||||
@@ -135,6 +143,7 @@
|
||||
"process.Image_Index": "Image index generation",
|
||||
"process.Is_Ready": "Ready",
|
||||
"process.Is_Ready_Count": "{{count}} Group is ready",
|
||||
"process.Parse_Image": "Image analysis",
|
||||
"process.Parsing": "Parsing",
|
||||
"process.Vectorizing": "Index vectorization",
|
||||
"process.Waiting": "Queue",
|
||||
@@ -179,13 +188,19 @@
|
||||
"training.Error": "{{count}} Group exception",
|
||||
"training.Normal": "Normal",
|
||||
"training_mode": "Chunk mode",
|
||||
"training_queue_tip": "Training queue status",
|
||||
"training_ready": "{{count}} Group",
|
||||
"uploading_progress": "Uploading: {{num}}%",
|
||||
"vector_model_max_tokens_tip": "Each chunk of data has a maximum length of 3000 tokens",
|
||||
"vector_training_queue": "Vector training queue",
|
||||
"vllm_model": "Image understanding model",
|
||||
"vlm_model_required_tooltip": "A Vision Language Model is required to create image collections",
|
||||
"vlm_model_required_warning": "Image datasets require a Vision Language Model (VLM) to be configured. Please add a model that supports image understanding in the model configuration first.",
|
||||
"waiting_for_training": "Waiting for training",
|
||||
"website_dataset": "Website Sync",
|
||||
"website_dataset_desc": "Build knowledge base by crawling web page data in batches",
|
||||
"website_info": "Website Information",
|
||||
"yuque_dataset": "Yuque Dataset",
|
||||
"yuque_dataset_config": "Yuque Dataset Config",
|
||||
"yuque_dataset_desc": "Can build a dataset using Yuque documents by configuring permissions, without secondary storage"
|
||||
"yuque_dataset": "Yuque Knowledge Base",
|
||||
"yuque_dataset_config": "Configure Yuque Knowledge Base",
|
||||
"yuque_dataset_desc": "Build knowledge base using Yuque documents by configuring document permissions, documents will not be stored twice"
|
||||
}
|
||||
|
||||
@@ -1,9 +1,32 @@
|
||||
{
|
||||
"Action": "Please select the image to upload",
|
||||
"All images import failed": "All pictures failed to import",
|
||||
"Dataset_ID_not_found": "The dataset ID does not exist",
|
||||
"Failed_to_get_token": "Failed to obtain the token",
|
||||
"Image_ID_copied": "Copy ID",
|
||||
"Image_Preview": "Picture preview",
|
||||
"Image_dataset_requires_VLM_model_to_be_configured": "The image dataset needs to be configured with the image understanding model (VLM) to be used. Please add a model that supports image understanding in the model configuration first.",
|
||||
"Image_does_not_belong_to_current_team": "The picture does not belong to the current team",
|
||||
"Image_file_does_not_exist": "The picture does not exist",
|
||||
"Loading_image": "Loading the picture...",
|
||||
"Loading_image failed": "Preview loading failed",
|
||||
"Only_support_uploading_one_image": "Only support uploading one image",
|
||||
"Please select the image to upload": "Please select the image to upload",
|
||||
"Please select the image to upload select the image to upload": "",
|
||||
"Please wait for all files to upload": "Please wait for all files to be uploaded to complete",
|
||||
"bucket_chat": "Conversation Files",
|
||||
"bucket_file": "Dataset Documents",
|
||||
"click_to_view_raw_source": "Click to View Original Source",
|
||||
"common.dataset_data_input_image_support_format": "Support .jpg, .jpeg, .png, .gif, .webp formats",
|
||||
"delete_image": "Delete pictures",
|
||||
"file_name": "Filename",
|
||||
"file_size": "Filesize",
|
||||
"image": "picture",
|
||||
"image_collection": "Picture collection",
|
||||
"image_description": "Image description",
|
||||
"image_description_tip": "Please enter the description of the picture",
|
||||
"please_upload_image_first": "Please upload the picture first",
|
||||
"reached_max_file_count": "Maximum file count reached",
|
||||
"release_the_mouse_to_upload_the_file": "Release Mouse to Upload File",
|
||||
"select_and_drag_file_tip": "Click or Drag Files Here to Upload",
|
||||
"select_file_amount_limit": "You can select up to {{max}} files",
|
||||
@@ -12,7 +35,9 @@
|
||||
"support_file_type": "Supports {{fileType}} file types",
|
||||
"support_max_count": "Supports up to {{maxCount}} files",
|
||||
"support_max_size": "Maximum file size is {{maxSize}}",
|
||||
"total_files": "Total {{selectFiles.length}} files",
|
||||
"upload_error_description": "Only multiple files or a single folder can be uploaded at a time",
|
||||
"upload_failed": "Upload Failed",
|
||||
"reached_max_file_count": "Maximum file count reached",
|
||||
"upload_error_description": "Only multiple files or a single folder can be uploaded at a time"
|
||||
}
|
||||
"upload_file_error": "Please upload pictures",
|
||||
"uploading": "Uploading..."
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
{
|
||||
"account_team.delete_dataset": "删除知识库",
|
||||
"active_model": "可用模型",
|
||||
"add_default_model": "添加预设模型",
|
||||
"api_key": "API 密钥",
|
||||
|
||||
@@ -6,8 +6,12 @@
|
||||
"accept": "接受",
|
||||
"action": "操作",
|
||||
"assign_permission": "权限变更",
|
||||
"audit_log": "审计",
|
||||
"change_department_name": "部门编辑",
|
||||
"change_member_name": "成员改名",
|
||||
"change_member_name_self": "变更成员名",
|
||||
"change_notification_settings": "变更通知接收途径",
|
||||
"change_password": "更改密码",
|
||||
"confirm_delete_from_org": "确认将 {{username}} 移出部门?",
|
||||
"confirm_delete_from_team": "确认将 {{username}} 移出团队?",
|
||||
"confirm_delete_group": "确认删除群组?",
|
||||
@@ -15,22 +19,51 @@
|
||||
"confirm_forbidden": "确认停用",
|
||||
"confirm_leave_team": "确认离开该团队? \n退出后,您在该团队所有的资源均转让给团队所有者。",
|
||||
"copy_link": "复制链接",
|
||||
"create_api_key": "创建api密钥",
|
||||
"create_app": "创建应用",
|
||||
"create_app_copy": "创建应用副本",
|
||||
"create_app_folder": "创建应用文件夹",
|
||||
"create_app_publish_channel": "创建分享渠道",
|
||||
"create_collection": "创建集合",
|
||||
"create_data": "插入数据",
|
||||
"create_dataset": "创建知识库",
|
||||
"create_dataset_folder": "创建知识库文件夹",
|
||||
"create_department": "创建子部门",
|
||||
"create_group": "创建群组",
|
||||
"create_invitation_link": "创建邀请链接",
|
||||
"create_invoice": "开发票",
|
||||
"create_org": "创建部门",
|
||||
"create_sub_org": "创建子部门",
|
||||
"dataset.api_file": "API导入",
|
||||
"dataset.common_dataset": "知识库",
|
||||
"dataset.external_file": "外部文件",
|
||||
"dataset.feishu_dataset": "飞书多维表格",
|
||||
"dataset.folder_dataset": "文件夹",
|
||||
"dataset.website_dataset": "网站同步",
|
||||
"dataset.yuque_dataset": "语雀知识库",
|
||||
"delete": "删除",
|
||||
"delete_api_key": "删除api密钥",
|
||||
"delete_app": "删除工作台应用",
|
||||
"delete_app_collaborator": "应用权限删除",
|
||||
"delete_app_publish_channel": "删除发布渠道",
|
||||
"delete_collection": "删除集合",
|
||||
"delete_data": "删除数据",
|
||||
"delete_dataset": "删除知识库",
|
||||
"delete_dataset_collaborator": "知识库权限删除",
|
||||
"delete_department": "删除子部门",
|
||||
"delete_from_org": "移出部门",
|
||||
"delete_from_team": "移出团队",
|
||||
"delete_group": "删除群组",
|
||||
"delete_org": "删除部门",
|
||||
"department": "部门",
|
||||
"edit_info": "编辑信息",
|
||||
"edit_member": "编辑用户",
|
||||
"edit_member_tip": "成员名",
|
||||
"edit_org_info": "编辑部门信息",
|
||||
"expires": "过期时间",
|
||||
"export_app_chat_log": "导出应用聊天记录",
|
||||
"export_bill_records": "导出账单记录",
|
||||
"export_dataset": "导出知识库",
|
||||
"export_members": "导出成员",
|
||||
"forbid_hint": "停用后,该邀请链接将失效。 该操作不可撤销,是否确认停用?",
|
||||
"forbid_success": "停用成功",
|
||||
@@ -56,28 +89,70 @@
|
||||
"log_assign_permission": "【{{name}}】更新了【{{objectName}}】的权限:[应用创建:【{{appCreate}}】, 知识库:【{{datasetCreate}}】, API密钥:【{{apiKeyCreate}}】, 管理:【{{manage}}】]",
|
||||
"log_change_department": "【{{name}}】更新了部门【{{departmentName}}】",
|
||||
"log_change_member_name": "【{{name}}】将成员【{{memberName}}】重命名为【{{newName}}】",
|
||||
"log_change_member_name_self": "【{{name}}】把自己的成员名从【{{oldName}}】变更为【{{newName}}】",
|
||||
"log_change_notification_settings": "【{{name}}】进行了变更通知接收途径操作",
|
||||
"log_change_password": "【{{name}}】进行了变更密码操作",
|
||||
"log_create_api_key": "【{{name}}】创建了名为【{{keyName}}】的api密钥",
|
||||
"log_create_app": "【{{name}}】创建了名为【{{appName}}】的【{{appType}}】",
|
||||
"log_create_app_copy": "【{{name}}】给名为【{{appName}}】的【{{appType}}】创建了一个副本",
|
||||
"log_create_app_folder": "【{{name}}】创建了名为【{{folderName}}】的文件夹",
|
||||
"log_create_app_publish_channel": "【{{name}}】给名为【{{appName}}】的【{{appType}}】创建了名为【{{channelName}}】的渠道",
|
||||
"log_create_collection": "【{{name}}】在名为【{{datasetName}}】的【{{datasetType}}】创建了名为【{{collectionName}}】的集合",
|
||||
"log_create_data": "【{{name}}】在名为【{{datasetName}}】的【{{datasetType}}】往名为【{{collectionName}}】的集合插入数据",
|
||||
"log_create_dataset": "【{{name}}】创建了名为【{{datasetName}}】的【{{datasetType}}】",
|
||||
"log_create_dataset_folder": "【{{name}}】创建了名为{{folderName}}】的文件夹",
|
||||
"log_create_department": "【{{name}}】创建了部门【{{departmentName}}】",
|
||||
"log_create_group": "【{{name}}】创建了群组【{{groupName}}】",
|
||||
"log_create_invitation_link": "【{{name}}】创建了邀请链接【{{link}}】",
|
||||
"log_create_invoice": "【{{name}}】进行了开发票操作",
|
||||
"log_delete_api_key": "【{{name}}】删除了名为【{{keyName}}】的api密钥",
|
||||
"log_delete_app": "【{{name}}】将名为【{{appName}}】的【{{appType}}】删除",
|
||||
"log_delete_app_collaborator": "【{{name}}】将名为【{{appName}}】的【{{appType}}】中名为【{{itemValueName}}】的【{{itemName}}】权限删除",
|
||||
"log_delete_app_publish_channel": "【{{name}}】名为【{{appName}}】的【{{appType}}】删除了名为【{{channelName}}】的渠道",
|
||||
"log_delete_collection": "【{{name}}】在名为【{{datasetName}}】的【{{datasetType}}】删除了名为【{{collectionName}}】的集合",
|
||||
"log_delete_data": "【{{name}}】在名为【{{datasetName}}】的【{{datasetType}}】在名为【{{collectionName}}】的集合删除数据",
|
||||
"log_delete_dataset": "【{{name}}】删除了名为【{{datasetName}}】的【{{datasetType}}】",
|
||||
"log_delete_dataset_collaborator": "【{{name}}】将名为【{{datasetName}}】的【{{datasetType}}】中名为【itemValueName】的【itemName】权限删除",
|
||||
"log_delete_department": "【{{name}}】删除了部门【{{departmentName}}】",
|
||||
"log_delete_group": "【{{name}}】删除了群组【{{groupName}}】",
|
||||
"log_details": "详情",
|
||||
"log_export_app_chat_log": "【{{name}}】导出了名为【{{appName}}】的【{{appType}}】的聊天记录",
|
||||
"log_export_bill_records": "【{{name}}】导出了账单记录",
|
||||
"log_export_dataset": "【{{name}}】导出了名为【{{datasetName}}】的【{{datasetType}}】",
|
||||
"log_join_team": "【{{name}}】通过邀请链接【{{link}}】加入团队",
|
||||
"log_kick_out_team": "【{{name}}】移除了成员【{{memberName}}】",
|
||||
"log_login": "【{{name}}】登录了系统",
|
||||
"log_move_app": "【{{name}}】将名为【{{appName}}】的【{{appType}}】移动到【{{targetFolderName}}】",
|
||||
"log_move_dataset": "【{{name}}】将名为【{{datasetName}}】的【{{datasetType}}】移动到【{{targetFolderName}}】",
|
||||
"log_purchase_plan": "【{{name}}】购买了套餐",
|
||||
"log_recover_team_member": "【{{name}}】恢复了成员【{{memberName}}】",
|
||||
"log_relocate_department": "【{{name}}】移动了部门【{{departmentName}}】",
|
||||
"log_retrain_collection": "【{{name}}】在名为【{{datasetName}}】的【{{datasetType}}】重新训练了名为【{{collectionName}}】的集合",
|
||||
"log_search_test": "【{{name}}】在名为【{{datasetName}}】的【{{datasetType}}】执行搜索测试操作",
|
||||
"log_set_invoice_header": "【{{name}}】进行了设置发票抬头操作",
|
||||
"log_time": "操作时间",
|
||||
"log_transfer_app_ownership": "【{{name}}】将名为【{{appName}}】的【{{appType}}】的所有权从【{{oldOwnerName}}】转移到【{{newOwnerName}}】",
|
||||
"log_transfer_dataset_ownership": "【{{name}}】将名为【{{datasetName}}】的【{{datasetType}}】的所有权从【{{oldOwnerName}}】转移到【{{newOwnerName}}】",
|
||||
"log_type": "操作类型",
|
||||
"log_update_api_key": "【{{name}}】更新了名为【{{keyName}}】的api密钥",
|
||||
"log_update_app_collaborator": "【{{name}}】将名为【{{appName}}】的【{{appType}}】的合作者更新为:组织:【{{orgList}}】,群组:【{{groupList}}】,成员【{{tmbList}}】;权限更新为:读权限:【{{readPermission}}】,写权限:【{{writePermission}}】,管理员权限:【{{managePermission}}】",
|
||||
"log_update_app_info": "【{{name}}】更新了名为【{{appName}}】的【{{appType}}】:【{{newItemNames}}】为【{{newItemValues}}】",
|
||||
"log_update_app_publish_channel": "【{{name}}】给名为【{{appName}}】的【{{appType}}】更新了名为【{{channelName}}】的渠道",
|
||||
"log_update_collection": "【{{name}}】在名为【{{datasetName}}】的【{{datasetType}}】更新了名为【{{collectionName}}】的集合",
|
||||
"log_update_data": "【{{name}}】在名为【{{datasetName}}】的【{{datasetType}}】在名为【{{collectionName}}】的集合更新数据",
|
||||
"log_update_dataset": "【{{name}}】更新了名为【{{datasetName}}】的【{{datasetType}}】",
|
||||
"log_update_dataset_collaborator": "【{{name}}】将名为【{{datasetName}}】的【{{datasetType}}】的合作者更新为:组织:【{{orgList}}】,群组:【{{groupList}}】,成员【{{tmbList}}】;权限更新为:【{{readPermission}}】,【{{writePermission}}】,【{{managePermission}}】",
|
||||
"log_update_publish_app": "【{{name}}】【{{operationName}}】名为【{{appName}}】的【{{appType}}】",
|
||||
"log_user": "操作人员",
|
||||
"login": "登录",
|
||||
"manage_member": "管理成员",
|
||||
"member": "成员",
|
||||
"member_group": "所属群组",
|
||||
"move_app": "应用位置移动",
|
||||
"move_dataset": "移动知识库",
|
||||
"move_member": "移动成员",
|
||||
"move_org": "移动部门",
|
||||
"notification_recieve": "团队通知接收",
|
||||
"operation_log": "日志",
|
||||
"org": "部门",
|
||||
"org_description": "介绍",
|
||||
"org_name": "部门名称",
|
||||
@@ -92,6 +167,7 @@
|
||||
"permission_manage": "管理员",
|
||||
"permission_manage_tip": "可以管理成员、创建群组、管理所有群组、为群组和成员分配权限",
|
||||
"please_bind_contact": "请绑定联系方式",
|
||||
"purchase_plan": "升级套餐",
|
||||
"recover_team_member": "成员恢复",
|
||||
"relocate_department": "部门移动",
|
||||
"remark": "备注",
|
||||
@@ -99,17 +175,40 @@
|
||||
"restore_tip": "确认将 {{username}} 加入团队吗?仅恢复该成员账号可用性及相关权限,无法恢复账号下资源。",
|
||||
"restore_tip_title": "恢复确认",
|
||||
"retain_admin_permissions": "保留管理员权限",
|
||||
"retrain_collection": "重新训练集合",
|
||||
"save_and_publish": "保存并发布",
|
||||
"search_log": "搜索日志",
|
||||
"search_member": "搜索成员",
|
||||
"search_member_group_name": "搜索成员/群组名称",
|
||||
"search_org": "搜索部门",
|
||||
"search_test": "搜索测试",
|
||||
"set_invoice_header": "设置发票抬头",
|
||||
"set_name_avatar": "团队头像 & 团队名",
|
||||
"sync_immediately": "立即同步",
|
||||
"sync_member_failed": "同步成员失败",
|
||||
"sync_member_success": "同步成员成功",
|
||||
"total_team_members": "共 {{amount}} 名成员",
|
||||
"transfer_app_ownership": "转移应用所有权",
|
||||
"transfer_dataset_ownership": "转移知识库所有权",
|
||||
"transfer_ownership": "转让所有者",
|
||||
"type.Folder": "文件夹",
|
||||
"type.Http plugin": "HTTP 插件",
|
||||
"type.Plugin": "插件",
|
||||
"type.Simple bot": "简易应用",
|
||||
"type.Tool": "工具",
|
||||
"type.Tool set": "工具集",
|
||||
"type.Workflow bot": "工作流",
|
||||
"unlimited": "无限制",
|
||||
"update": "更新",
|
||||
"update_api_key": "更新api密钥",
|
||||
"update_app_collaborator": "应用权限更改",
|
||||
"update_app_info": "应用信息修改",
|
||||
"update_app_publish_channel": "更新发布渠道",
|
||||
"update_collection": "更新集合",
|
||||
"update_data": "更新数据",
|
||||
"update_dataset": "更新知识库",
|
||||
"update_dataset_collaborator": "知识库权限更改",
|
||||
"update_publish_app": "应用更新",
|
||||
"used_times_limit": "有效人数",
|
||||
"user_name": "用户名",
|
||||
"user_team_invite_member": "邀请成员",
|
||||
|
||||
@@ -189,6 +189,7 @@
|
||||
"type.Create simple bot tip": "通过填表单形式,创建简单的 AI 应用,适合新手",
|
||||
"type.Create workflow bot": "创建工作流",
|
||||
"type.Create workflow tip": "通过低代码的方式,构建逻辑复杂的多轮对话 AI 应用,推荐高级玩家使用",
|
||||
"type.Folder": "文件夹",
|
||||
"type.Http plugin": "HTTP 插件",
|
||||
"type.Import from json": "导入 JSON 配置",
|
||||
"type.Import from json tip": "通过 JSON 配置文件,直接创建应用",
|
||||
@@ -198,6 +199,8 @@
|
||||
"type.MCP_tools_url": "MCP 地址",
|
||||
"type.Plugin": "插件",
|
||||
"type.Simple bot": "简易应用",
|
||||
"type.Tool": "工具",
|
||||
"type.Tool set": "工具集",
|
||||
"type.Workflow bot": "工作流",
|
||||
"type.error.Workflow data is empty": "没有获取到工作流数据",
|
||||
"type.error.workflowresponseempty": "响应内容为空",
|
||||
@@ -238,4 +241,4 @@
|
||||
"workflow.user_file_input_desc": "用户上传的文档和图片链接",
|
||||
"workflow.user_select": "用户选择",
|
||||
"workflow.user_select_tip": "该模块可配置多个选项,以供对话时选择。不同选项可导向不同工作流支线"
|
||||
}
|
||||
}
|
||||
@@ -71,13 +71,13 @@
|
||||
"response_embedding_model_tokens": "向量模型 Tokens",
|
||||
"response_hybrid_weight": "语义检索 : 全文检索 = {{emb}} : {{text}}",
|
||||
"response_rerank_tokens": "重排模型 Tokens",
|
||||
"search_results": "搜索结果",
|
||||
"select": "选择",
|
||||
"select_file": "上传文件",
|
||||
"select_file_img": "上传文件/图片",
|
||||
"select_img": "上传图片",
|
||||
"source_cronJob": "定时执行",
|
||||
"stream_output": "流输出",
|
||||
"to_dataset": "前往知识库",
|
||||
"unsupported_file_type": "不支持的文件类型",
|
||||
"upload": "上传",
|
||||
"variable_invisable_in_share": "自定义变量在免登录链接中不可见",
|
||||
|
||||
@@ -215,6 +215,7 @@
|
||||
"core.app.Interval timer run": "定时执行",
|
||||
"core.app.Interval timer tip": "可定时执行应用",
|
||||
"core.app.Make a brief introduction of your app": "给你的 AI 应用一个介绍",
|
||||
"core.app.name": "名称",
|
||||
"core.app.Name and avatar": "头像 & 名称",
|
||||
"core.app.Publish": "发布",
|
||||
"core.app.Publish Confirm": "确认发布应用?会立即更新所有发布渠道的应用状态。",
|
||||
@@ -402,7 +403,6 @@
|
||||
"core.chat.response.module model": "模型",
|
||||
"core.chat.response.module name": "模型名",
|
||||
"core.chat.response.module query": "问题/检索词",
|
||||
"core.chat.response.module quoteList": "引用内容",
|
||||
"core.chat.response.module similarity": "相似度",
|
||||
"core.chat.response.module temperature": "温度",
|
||||
"core.chat.response.module time": "运行时长",
|
||||
@@ -433,7 +433,6 @@
|
||||
"core.dataset.Text collection": "文本数据集",
|
||||
"core.dataset.apiFile": "API 文件",
|
||||
"core.dataset.collection.Click top config website": "点击配置网站",
|
||||
"core.dataset.collection.Collection name": "数据集名称",
|
||||
"core.dataset.collection.Collection raw text": "数据集内容",
|
||||
"core.dataset.collection.Empty Tip": "数据集空空如也",
|
||||
"core.dataset.collection.QA Prompt": "QA 拆分引导词",
|
||||
@@ -450,7 +449,6 @@
|
||||
"core.dataset.collection.metadata.metadata": "元数据",
|
||||
"core.dataset.collection.metadata.read source": "查看原始内容",
|
||||
"core.dataset.collection.metadata.source": "数据来源",
|
||||
"core.dataset.collection.metadata.source name": "来源名",
|
||||
"core.dataset.collection.metadata.source size": "来源大小",
|
||||
"core.dataset.collection.status.active": "已就绪",
|
||||
"core.dataset.collection.status.error": "训练异常",
|
||||
@@ -742,7 +740,7 @@
|
||||
"core.workflow.value": "值",
|
||||
"core.workflow.variable": "变量",
|
||||
"create": "去创建",
|
||||
"create_failed": "创建异常",
|
||||
"create_failed": "创建失败",
|
||||
"create_success": "创建成功",
|
||||
"create_time": "创建时间",
|
||||
"cron_job_run_app": "定时任务",
|
||||
@@ -787,7 +785,6 @@
|
||||
"dataset.dataset_name": "知识库名称",
|
||||
"dataset.deleteFolderTips": "确认删除该文件夹及其包含的所有知识库?删除后数据无法恢复,请确认!",
|
||||
"dataset.test.noResult": "搜索结果为空",
|
||||
"dataset_data_import_q_placeholder": "最多 {{maxToken}} 字。",
|
||||
"dataset_data_input_a": "答案",
|
||||
"dataset_data_input_chunk": "常规模式",
|
||||
"dataset_data_input_chunk_content": "内容",
|
||||
@@ -801,7 +798,6 @@
|
||||
"delete_success": "删除成功",
|
||||
"delete_warning": "删除警告",
|
||||
"embedding_model_not_config": "检测到没有可用的索引模型",
|
||||
"error.Create failed": "创建失败",
|
||||
"error.code_error": "验证码错误",
|
||||
"error.fileNotFound": "文件找不到了~",
|
||||
"error.inheritPermissionError": "权限继承错误",
|
||||
@@ -1207,6 +1203,7 @@
|
||||
"templateTags.Writing": "文本创作",
|
||||
"template_market": "模板市场",
|
||||
"textarea_variable_picker_tip": "输入\"/\"可选择变量",
|
||||
"to_dataset": "前往知识库",
|
||||
"ui.textarea.Magnifying": "放大",
|
||||
"un_used": "未使用",
|
||||
"unauth_token": "凭证已过期,请重新登录",
|
||||
@@ -1305,4 +1302,4 @@
|
||||
"zoomin_tip_mac": "缩小 ⌘ -",
|
||||
"zoomout_tip": "放大 ctrl +",
|
||||
"zoomout_tip_mac": "放大 ⌘ +"
|
||||
}
|
||||
}
|
||||
@@ -28,16 +28,21 @@
|
||||
"collection.training_type": "处理模式",
|
||||
"collection_data_count": "数据量",
|
||||
"collection_metadata_custom_pdf_parse": "PDF增强解析",
|
||||
"collection_name": "数据集名称",
|
||||
"collection_not_support_retraining": "该集合类型不支持重新调整参数",
|
||||
"collection_not_support_sync": "该集合不支持同步",
|
||||
"collection_sync": "立即同步",
|
||||
"collection_sync_confirm_tip": "确认开始同步数据?系统将会拉取最新数据进行比较,如果内容不相同,则会创建一个新的集合并删除旧的集合,请确认!",
|
||||
"collection_tags": "集合标签",
|
||||
"common.dataset.data.Input Error Tip": "[图片数据集] 处理过程错误:",
|
||||
"common.error.unKnow": "未知错误",
|
||||
"common_dataset": "通用知识库",
|
||||
"common_dataset_desc": "通过导入文件、网页链接或手动录入形式构建知识库",
|
||||
"condition": "条件",
|
||||
"config_sync_schedule": "配置定时同步",
|
||||
"confirm_import_images": "共 {{num}} 张图片 | 确认创建",
|
||||
"confirm_to_rebuild_embedding_tip": "确认为知识库切换索引?\n切换索引是一个非常重量的操作,需要对您知识库内所有数据进行重新索引,时间可能较长,请确保账号内剩余积分充足。\n\n此外,你还需要注意修改选择该知识库的应用,避免它们与其他索引模型知识库混用。",
|
||||
"core.dataset.Image collection": "图片数据集",
|
||||
"core.dataset.import.Adjust parameters": "调整参数",
|
||||
"custom_data_process_params": "自定义",
|
||||
"custom_data_process_params_desc": "自定义设置数据处理规则",
|
||||
@@ -90,6 +95,7 @@
|
||||
"image_auto_parse": "图片自动索引",
|
||||
"image_auto_parse_tips": "调用 VLM 自动标注文档里的图片,并生成额外的检索索引",
|
||||
"image_training_queue": "图片处理排队",
|
||||
"images_creating": "正在创建",
|
||||
"immediate_sync": "立即同步",
|
||||
"import.Auto mode Estimated Price Tips": "需调用文本理解模型,需要消耗较多AI 积分:{{price}} 积分/1K tokens",
|
||||
"import.Embedding Estimated Price Tips": "仅使用索引模型,消耗少量 AI 积分:{{price}} 积分/1K tokens",
|
||||
@@ -104,6 +110,8 @@
|
||||
"index_size": "索引大小",
|
||||
"index_size_tips": "向量化时内容的长度,系统会自动按该大小对分块进行进一步的分割。",
|
||||
"input_required_field_to_select_baseurl": "请先输入必填信息",
|
||||
"insert_images": "新增图片",
|
||||
"insert_images_success": "新增图片成功,需等待训练完成才会展示",
|
||||
"is_open_schedule": "启用定时同步",
|
||||
"keep_image": "保留图片",
|
||||
"loading": "加载中...",
|
||||
@@ -135,6 +143,7 @@
|
||||
"process.Image_Index": "图片索引生成",
|
||||
"process.Is_Ready": "已就绪",
|
||||
"process.Is_Ready_Count": "{{count}} 组已就绪",
|
||||
"process.Parse_Image": "图片解析中",
|
||||
"process.Parsing": "内容解析中",
|
||||
"process.Vectorizing": "索引向量化",
|
||||
"process.Waiting": "排队中",
|
||||
@@ -176,11 +185,14 @@
|
||||
"the_knowledge_base_has_indexes_that_are_being_trained_or_being_rebuilt": "知识库有训练中或正在重建的索引",
|
||||
"total_num_files": "共 {{total}} 个文件",
|
||||
"training.Error": "{{count}} 组异常",
|
||||
"training.Image mode": "图片处理",
|
||||
"training.Normal": "正常",
|
||||
"training_mode": "处理方式",
|
||||
"training_ready": "{{count}} 组",
|
||||
"uploading_progress": "上传中: {{num}}%",
|
||||
"vector_model_max_tokens_tip": "每个分块数据,最大长度为 3000 tokens",
|
||||
"vllm_model": "图片理解模型",
|
||||
"vlm_model_required_warning": "需要图片理解模型",
|
||||
"website_dataset": "Web 站点同步",
|
||||
"website_dataset_desc": "通过爬虫,批量爬取网页数据构建知识库",
|
||||
"website_info": "网站信息",
|
||||
|
||||
@@ -1,9 +1,33 @@
|
||||
{
|
||||
"Action": "请选择要上传的图片",
|
||||
"All images import failed": "所有图片导入失败",
|
||||
"Dataset_ID_not_found": "数据集ID不存在",
|
||||
"Failed_to_get_token": "获取令牌失败",
|
||||
"Image_ID_copied": "已复制ID",
|
||||
"Image_Preview": "图片预览",
|
||||
"Image_dataset_requires_VLM_model_to_be_configured": "图片数据集需要配置图片理解模型(VLM)才能使用,请先在模型配置中添加支持图片理解的模型",
|
||||
"Image_does_not_belong_to_current_team": "图片不属于当前团队",
|
||||
"Image_file_does_not_exist": "图片不存在",
|
||||
"Loading_image": "加载图片中...",
|
||||
"Loading_image failed": "预览加载失败",
|
||||
"Only_support_uploading_one_image": "仅支持上传一张图片",
|
||||
"image_description_tip": "请输入图片的描述内容",
|
||||
"Please select the image to upload": "请选择要上传的图片",
|
||||
"Please wait for all files to upload": "请等待所有文件上传完成",
|
||||
"bucket_chat": "对话文件",
|
||||
"bucket_file": "知识库文件",
|
||||
"click_to_view_raw_source": "点击查看来源",
|
||||
"common.Some images failed to process": "部分图片处理失败",
|
||||
"common.dataset_data_input_image_support_format": "支持 .jpg, .jpeg, .png, .gif, .webp 格式",
|
||||
"count.core.dataset.collection.Create Success": "成功导入 {{count}} 张图片",
|
||||
"delete_image": "删除图片",
|
||||
"file_name": "文件名",
|
||||
"file_size": "文件大小",
|
||||
"image": "图片",
|
||||
"image_collection": "图片集合",
|
||||
"image_description": "图片描述",
|
||||
"please_upload_image_first": "请先上传图片",
|
||||
"reached_max_file_count": "已达到最大文件数量",
|
||||
"release_the_mouse_to_upload_the_file": "松开鼠标上传文件",
|
||||
"select_and_drag_file_tip": "点击或拖动文件到此处上传",
|
||||
"select_file_amount_limit": "最多选择 {{max}} 个文件",
|
||||
@@ -12,7 +36,9 @@
|
||||
"support_file_type": "支持 {{fileType}} 类型文件",
|
||||
"support_max_count": "最多支持 {{maxCount}} 个文件",
|
||||
"support_max_size": "单个文件最大 {{maxSize}}",
|
||||
"total_files": "共{{selectFiles.length}}个文件",
|
||||
"upload_error_description": "单次只支持上传多个文件或者一个文件夹",
|
||||
"upload_failed": "上传异常",
|
||||
"reached_max_file_count": "已达到最大文件数量",
|
||||
"upload_error_description": "单次只支持上传多个文件或者一个文件夹"
|
||||
}
|
||||
"upload_file_error": "请上传图片",
|
||||
"uploading": "正在上传..."
|
||||
}
|
||||
|
||||
@@ -6,8 +6,12 @@
|
||||
"accept": "接受",
|
||||
"action": "操作",
|
||||
"assign_permission": "權限變更",
|
||||
"audit_log": "審計",
|
||||
"change_department_name": "部門編輯",
|
||||
"change_member_name": "成員改名",
|
||||
"change_member_name_self": "變更成員名",
|
||||
"change_notification_settings": "變更通知接收途徑",
|
||||
"change_password": "更改密碼",
|
||||
"confirm_delete_from_org": "確認將 {{username}} 移出部門?",
|
||||
"confirm_delete_from_team": "確認將 {{username}} 移出團隊?",
|
||||
"confirm_delete_group": "確認刪除群組?",
|
||||
@@ -15,22 +19,50 @@
|
||||
"confirm_forbidden": "確認停用",
|
||||
"confirm_leave_team": "確認離開該團隊? \n結束後,您在該團隊所有的資源轉讓給團隊所有者。",
|
||||
"copy_link": "複製連結",
|
||||
"create_api_key": "創建api密鑰",
|
||||
"create_app": "創建應用",
|
||||
"create_app_copy": "創建應用副本",
|
||||
"create_app_folder": "創建應用文件夾",
|
||||
"create_app_publish_channel": "創建分享渠道",
|
||||
"create_data": "插入數據",
|
||||
"create_dataset": "創建知識庫",
|
||||
"create_dataset_folder": "創建知識庫文件夾",
|
||||
"create_department": "創建子部門",
|
||||
"create_group": "建立群組",
|
||||
"create_invitation_link": "建立邀請連結",
|
||||
"create_invoice": "開發票",
|
||||
"create_org": "建立部門",
|
||||
"create_sub_org": "建立子部門",
|
||||
"dataset.api_file": "API 匯入",
|
||||
"dataset.common_dataset": "知識庫",
|
||||
"dataset.external_file": "外部文件",
|
||||
"dataset.feishu_dataset": "飛書多維表格",
|
||||
"dataset.folder_dataset": "資料夾",
|
||||
"dataset.website_dataset": "網站同步",
|
||||
"dataset.yuque_dataset": "語雀知識庫",
|
||||
"delete": "刪除",
|
||||
"delete_api_key": "刪除api密鑰",
|
||||
"delete_app": "刪除工作台應用",
|
||||
"delete_app_collaborator": "應用權限刪除",
|
||||
"delete_app_publish_channel": "刪除發布渠道",
|
||||
"delete_collection": "刪除集合",
|
||||
"delete_data": "刪除數據",
|
||||
"delete_dataset": "刪除知識庫",
|
||||
"delete_dataset_collaborator": "知識庫權限刪除",
|
||||
"delete_department": "刪除子部門",
|
||||
"delete_from_org": "移出部門",
|
||||
"delete_from_team": "移出團隊",
|
||||
"delete_group": "刪除群組",
|
||||
"delete_org": "刪除部門",
|
||||
"department": "部門",
|
||||
"edit_info": "編輯訊息",
|
||||
"edit_member": "編輯使用者",
|
||||
"edit_member_tip": "成員名",
|
||||
"edit_org_info": "編輯部門資訊",
|
||||
"expires": "過期時間",
|
||||
"export_app_chat_log": "導出應用聊天記錄",
|
||||
"export_bill_records": "導出賬單記錄",
|
||||
"export_dataset": "導出知識庫",
|
||||
"export_members": "導出成員",
|
||||
"forbid_hint": "停用後,該邀請連結將失效。該操作不可撤銷,是否確認停用?",
|
||||
"forbid_success": "停用成功",
|
||||
@@ -56,28 +88,69 @@
|
||||
"log_assign_permission": "【{{name}}】更新了【{{objectName}}】的權限:[應用創建:【{{appCreate}}】, 知識庫:【{{datasetCreate}}】, API密鑰:【{{apiKeyCreate}}】, 管理:【{{manage}}】]",
|
||||
"log_change_department": "【{{name}}】更新了部門【{{departmentName}}】",
|
||||
"log_change_member_name": "【{{name}}】將成員【{{memberName}}】重命名為【{{newName}}】",
|
||||
"log_change_member_name_self": "【{{name}}】變更自己的成員名為【{{newName}}】",
|
||||
"log_change_notification_settings": "【{{name}}】進行了變更通知接收途徑操作",
|
||||
"log_change_password": "【{{name}}】進行了變更密碼操作",
|
||||
"log_create_api_key": "【{{name}}】創建了名為【{{keyName}}】的api密鑰",
|
||||
"log_create_app": "【{{name}}】創建了名為【{{appName}}】的【{{appType}}】",
|
||||
"log_create_app_copy": "【{{name}}】給名為【{{appName}}】的【{{appType}}】創建了一個副本",
|
||||
"log_create_app_folder": "【{{name}}】創建了名為【{{folderName}}】的文件夾",
|
||||
"log_create_app_publish_channel": "【{{name}}】給名為【{{appName}}】的【{{appType}}】創建了名為【{{channelName}}】的渠道",
|
||||
"log_create_collection": "【{{name}}】在名為【{{datasetName}}】的【{{datasetType}}】創建了名為【{{collectionName}}】的集合",
|
||||
"log_create_data": "【{{name}}】在名為【{{datasetName}}】的【{{datasetType}}】往名為【{{collectionName}}】的集合插入數據",
|
||||
"log_create_dataset": "【{{name}}】創建了名為【{{datasetName}}】的【{{datasetType}}】",
|
||||
"log_create_dataset_folder": "【{{name}}】創建了名為{{folderName}}】的文件夾",
|
||||
"log_create_department": "【{{name}}】創建了部門【{{departmentName}}】",
|
||||
"log_create_group": "【{{name}}】創建了群組【{{groupName}}】",
|
||||
"log_create_invitation_link": "【{{name}}】創建了邀請鏈接【{{link}}】",
|
||||
"log_create_invoice": "【{{name}}】進行了開發票操作",
|
||||
"log_delete_api_key": "【{{name}}】刪除了名為【{{keyName}}】的api密鑰",
|
||||
"log_delete_app": "【{{name}}】將名為【{{appName}}】的【{{appType}}】刪除",
|
||||
"log_delete_app_collaborator": "【{{name}}】將名為【{{appName}}】的【{{appType}}】中名為【{{itemValueName}}】的【{{itemName}}】權限刪除",
|
||||
"log_delete_app_publish_channel": "【{{name}}】名為【{{appName}}】的【{{appType}}】刪除了名為【{{channelName}}】的渠道",
|
||||
"log_delete_collection": "【{{name}}】在名為【{{datasetName}}】的【{{datasetType}}】刪除了名為【{{collectionName}}】的集合",
|
||||
"log_delete_data": "【{{name}}】在名為【{{datasetName}}】的【{{datasetType}}】在名為【{{collectionName}}】的集合刪除數據",
|
||||
"log_delete_dataset": "【{{name}}】刪除了名為【{{datasetName}}】的【{{datasetType}}】",
|
||||
"log_delete_dataset_collaborator": "【{{name}}】將名為【{{datasetName}}】的【{{datasetType}}】中名為【itemValueName】的【itemName】權限刪除",
|
||||
"log_delete_department": "{{name}} 刪除了部門 {{departmentName}}",
|
||||
"log_delete_group": "{{name}} 刪除了群組 {{groupName}}",
|
||||
"log_details": "詳情",
|
||||
"log_export_app_chat_log": "【{{name}}】導出了名為【{{appName}}】的【{{appType}}】的聊天記錄",
|
||||
"log_export_bill_records": "【{{name}}】導出了賬單記錄",
|
||||
"log_export_dataset": "【{{name}}】導出了名為【{{datasetName}}】的【{{datasetType}}】",
|
||||
"log_join_team": "【{{name}}】通過邀請鏈接【{{link}}】加入團隊",
|
||||
"log_kick_out_team": "{{name}} 移除了成員 {{memberName}}",
|
||||
"log_login": "【{{name}}】登錄了系統",
|
||||
"log_move_app": "【{{name}}】將名為【{{appName}}】的【{{appType}}】移動到【{{targetFolderName}}】",
|
||||
"log_move_dataset": "【{{name}}】將名為【{{datasetName}}】的【{{datasetType}}】移動到【{{targetFolderName}}】",
|
||||
"log_recover_team_member": "【{{name}}】恢復了成員【{{memberName}}】",
|
||||
"log_relocate_department": "【{{name}}】移動了部門【{{departmentName}}】",
|
||||
"log_retrain_collection": "【{{name}}】在名為【{{datasetName}}】的【{{datasetType}}】重新訓練了名為【{{collectionName}}】的集合",
|
||||
"log_search_test": "【{{name}}】在名為【{{datasetName}}】的【{{datasetType}}】執行搜索測試操作",
|
||||
"log_set_invoice_header": "【{{name}}】進行了設置發票抬頭操作",
|
||||
"log_time": "操作時間",
|
||||
"log_transfer_app_ownership": "【{{name}}】將名為【{{appName}}】的【{{appType}}】的所有權從【{{oldOwnerName}}】轉移到【{{newOwnerName}}】",
|
||||
"log_transfer_dataset_ownership": "【{{name}}】將名為【{{datasetName}}】的【{{datasetType}}】的所有權從【{{oldOwnerName}}】轉移到【{{newOwnerName}}】",
|
||||
"log_type": "操作類型",
|
||||
"log_update_api_key": "【{{name}}】更新了名為【{{keyName}}】的api密鑰",
|
||||
"log_update_app_collaborator": "【{{name}}】將名為【{{appName}}】的【{{appType}}】的合作者更新為:組織:【{{orgList}}】,群組:【{{groupList}}】,成員【{{tmbList}}】;權限更新為:讀權限:【{{readPermission}}】,寫權限:【{{writePermission}}】,管理員權限:【{{managePermission}}】",
|
||||
"log_update_app_info": "【{{name}}】更新了名為【{{appName}}】的【{{appType}}】:【{{newItemNames}}】為【{{newItemValues}}】",
|
||||
"log_update_app_publish_channel": "【{{name}}】給名為【{{appName}}】的【{{appType}}】更新了名為【{{channelName}}】的渠道",
|
||||
"log_update_collection": "【{{name}}】在名為【{{datasetName}}】的【{{datasetType}}】更新了名為【{{collectionName}}】的集合",
|
||||
"log_update_data": "【{{name}}】在名為【{{datasetName}}】的【{{datasetType}}】在名為【{{collectionName}}】的集合更新數據",
|
||||
"log_update_dataset": "【{{name}}】更新了名為【{{datasetName}}】的【{{datasetType}}】",
|
||||
"log_update_dataset_collaborator": "【{{name}}】將名為【{{datasetName}}】的【{{datasetType}}】的合作者更新為:組織:【{{orgList}}】,群組:【{{groupList}}】,成員【{{tmbList}}】;權限更新為:【{{readPermission}}】,【{{writePermission}}】,【{{managePermission}}】",
|
||||
"log_update_publish_app": "【{{name}}】【{{operationName}}】名為【{{appName}}】的【{{appType}}】",
|
||||
"log_user": "操作人員",
|
||||
"login": "登入",
|
||||
"manage_member": "管理成員",
|
||||
"member": "成員",
|
||||
"member_group": "所屬成員組",
|
||||
"move_app": "應用位置移動",
|
||||
"move_dataset": "移動知識庫",
|
||||
"move_member": "移動成員",
|
||||
"move_org": "行動部門",
|
||||
"notification_recieve": "團隊通知接收",
|
||||
"operation_log": "紀錄",
|
||||
"org": "組織",
|
||||
"org_description": "介紹",
|
||||
"org_name": "部門名稱",
|
||||
@@ -92,6 +165,7 @@
|
||||
"permission_manage": "管理員",
|
||||
"permission_manage_tip": "可以管理成員、建立群組、管理所有群組、為群組和成員分配權限",
|
||||
"please_bind_contact": "請綁定聯繫方式",
|
||||
"purchase_plan": "升級套餐",
|
||||
"recover_team_member": "成員恢復",
|
||||
"relocate_department": "部門移動",
|
||||
"remark": "備註",
|
||||
@@ -99,17 +173,40 @@
|
||||
"restore_tip": "確認將 {{username}} 加入團隊嗎?\n僅恢復該成員賬號可用性及相關權限,無法恢復賬號下資源。",
|
||||
"restore_tip_title": "恢復確認",
|
||||
"retain_admin_permissions": "保留管理員權限",
|
||||
"retrain_collection": "重新訓練集合",
|
||||
"save_and_publish": "儲存並發布",
|
||||
"search_log": "搜索日誌",
|
||||
"search_member": "搜索成員",
|
||||
"search_member_group_name": "搜尋成員/群組名稱",
|
||||
"search_org": "搜索部門",
|
||||
"search_test": "搜索測試",
|
||||
"set_invoice_header": "設置發票抬頭",
|
||||
"set_name_avatar": "團隊頭像",
|
||||
"sync_immediately": "立即同步",
|
||||
"sync_member_failed": "同步成員失敗",
|
||||
"sync_member_success": "同步成員成功",
|
||||
"total_team_members": "共 {{amount}} 名成員",
|
||||
"transfer_app_ownership": "轉移應用程式所有權",
|
||||
"transfer_dataset_ownership": "轉移知識庫所有權",
|
||||
"transfer_ownership": "轉讓所有者",
|
||||
"type.Folder": "資料夾",
|
||||
"type.Http plugin": "HTTP 外掛",
|
||||
"type.Plugin": "外掛",
|
||||
"type.Simple bot": "簡易應用程式",
|
||||
"type.Tool": "工具",
|
||||
"type.Tool set": "工具集",
|
||||
"type.Workflow bot": "工作流程",
|
||||
"unlimited": "無限制",
|
||||
"update": "更新",
|
||||
"update_api_key": "更新api密鑰",
|
||||
"update_app_collaborator": "應用權限更改",
|
||||
"update_app_info": "應用信息修改",
|
||||
"update_app_publish_channel": "更新發布渠道",
|
||||
"update_collection": "更新集合",
|
||||
"update_data": "更新數據",
|
||||
"update_dataset": "更新知識庫",
|
||||
"update_dataset_collaborator": "知識庫權限更改",
|
||||
"update_publish_app": "應用更新",
|
||||
"used_times_limit": "有效人數",
|
||||
"user_name": "使用者名稱",
|
||||
"user_team_invite_member": "邀請成員",
|
||||
|
||||
@@ -198,6 +198,9 @@
|
||||
"type.MCP_tools_url": "MCP 地址",
|
||||
"type.Plugin": "外掛",
|
||||
"type.Simple bot": "簡易應用程式",
|
||||
"type.Folder": "資料夾",
|
||||
"type.Tool set": "工具集",
|
||||
"type.Tool": "工具",
|
||||
"type.Workflow bot": "工作流程",
|
||||
"type.error.Workflow data is empty": "沒有獲取到工作流數據",
|
||||
"type.error.workflowresponseempty": "響應內容為空",
|
||||
@@ -238,4 +241,4 @@
|
||||
"workflow.user_file_input_desc": "使用者上傳的檔案和圖片連結",
|
||||
"workflow.user_select": "使用者選擇",
|
||||
"workflow.user_select_tip": "這個模組可以設定多個選項,供對話時選擇。不同選項可以導向不同的工作流程支線"
|
||||
}
|
||||
}
|
||||
@@ -71,13 +71,13 @@
|
||||
"response_embedding_model_tokens": "向量模型 Tokens",
|
||||
"response_hybrid_weight": "語義檢索 : 全文檢索 = {{emb}} : {{text}}",
|
||||
"response_rerank_tokens": "重排模型 Tokens",
|
||||
"search_results": "搜索結果",
|
||||
"select": "選取",
|
||||
"select_file": "上傳檔案",
|
||||
"select_file_img": "上傳檔案 / 圖片",
|
||||
"select_img": "上傳圖片",
|
||||
"source_cronJob": "定時執行",
|
||||
"stream_output": "串流輸出",
|
||||
"to_dataset": "前往知識庫",
|
||||
"unsupported_file_type": "不支援的檔案類型",
|
||||
"upload": "上傳",
|
||||
"variable_invisable_in_share": "自定義變數在免登入連結中不可見",
|
||||
|
||||
@@ -215,6 +215,7 @@
|
||||
"core.app.Interval timer run": "排程執行",
|
||||
"core.app.Interval timer tip": "可排程執行應用程式",
|
||||
"core.app.Make a brief introduction of your app": "為您的 AI 應用程式寫一段介紹",
|
||||
"core.app.name": "名稱",
|
||||
"core.app.Name and avatar": "頭像與名稱",
|
||||
"core.app.Publish": "發布",
|
||||
"core.app.Publish Confirm": "確認發布應用程式?這將立即更新所有發布管道的應用程式狀態。",
|
||||
@@ -402,7 +403,6 @@
|
||||
"core.chat.response.module model": "模型",
|
||||
"core.chat.response.module name": "模型名稱",
|
||||
"core.chat.response.module query": "問題/搜尋詞",
|
||||
"core.chat.response.module quoteList": "引用內容",
|
||||
"core.chat.response.module similarity": "相似度",
|
||||
"core.chat.response.module temperature": "溫度",
|
||||
"core.chat.response.module time": "執行時長",
|
||||
@@ -433,7 +433,6 @@
|
||||
"core.dataset.Text collection": "文字資料集",
|
||||
"core.dataset.apiFile": "API 檔案",
|
||||
"core.dataset.collection.Click top config website": "點選設定網站",
|
||||
"core.dataset.collection.Collection name": "資料集名稱",
|
||||
"core.dataset.collection.Collection raw text": "資料集內容",
|
||||
"core.dataset.collection.Empty Tip": "資料集是空的",
|
||||
"core.dataset.collection.QA Prompt": "問答拆分提示詞",
|
||||
@@ -450,7 +449,6 @@
|
||||
"core.dataset.collection.metadata.metadata": "中繼資料",
|
||||
"core.dataset.collection.metadata.read source": "檢視原始內容",
|
||||
"core.dataset.collection.metadata.source": "資料來源",
|
||||
"core.dataset.collection.metadata.source name": "來源名稱",
|
||||
"core.dataset.collection.metadata.source size": "來源大小",
|
||||
"core.dataset.collection.status.active": "已就緒",
|
||||
"core.dataset.collection.status.error": "訓練異常",
|
||||
@@ -554,7 +552,7 @@
|
||||
"core.dataset.training.Agent queue": "問答訓練排隊中",
|
||||
"core.dataset.training.Auto mode": "補充索引",
|
||||
"core.dataset.training.Auto mode Tip": "透過子索引以及呼叫模型產生相關問題與摘要,來增加資料區塊的語意豐富度,更有利於檢索。需要消耗更多的儲存空間並增加 AI 呼叫次數。",
|
||||
"core.dataset.training.Chunk mode": "分塊存儲",
|
||||
"core.dataset.training.Chunk mode": "分塊儲存",
|
||||
"core.dataset.training.Full": "預計 20 分鐘以上",
|
||||
"core.dataset.training.Leisure": "閒置",
|
||||
"core.dataset.training.QA mode": "問答對提取",
|
||||
@@ -787,7 +785,6 @@
|
||||
"dataset.dataset_name": "知識庫名稱",
|
||||
"dataset.deleteFolderTips": "確認刪除此資料夾及其包含的所有知識庫?刪除後資料無法復原,請確認!",
|
||||
"dataset.test.noResult": "搜尋結果為空",
|
||||
"dataset_data_import_q_placeholder": "最多 {{maxToken}} 字。",
|
||||
"dataset_data_input_a": "答案",
|
||||
"dataset_data_input_chunk": "常規模式",
|
||||
"dataset_data_input_chunk_content": "內容",
|
||||
@@ -801,7 +798,6 @@
|
||||
"delete_success": "刪除成功",
|
||||
"delete_warning": "刪除警告",
|
||||
"embedding_model_not_config": "偵測到沒有可用的索引模型",
|
||||
"error.Create failed": "建立失敗",
|
||||
"error.code_error": "驗證碼錯誤",
|
||||
"error.fileNotFound": "找不到檔案",
|
||||
"error.inheritPermissionError": "繼承權限錯誤",
|
||||
@@ -1207,6 +1203,7 @@
|
||||
"templateTags.Writing": "文字創作",
|
||||
"template_market": "模板市場",
|
||||
"textarea_variable_picker_tip": "輸入「/」以選擇變數",
|
||||
"to_dataset": "前往知識庫",
|
||||
"ui.textarea.Magnifying": "放大",
|
||||
"un_used": "未使用",
|
||||
"unauth_token": "憑證已過期,請重新登入",
|
||||
@@ -1305,4 +1302,4 @@
|
||||
"zoomin_tip_mac": "縮小 ⌘ -",
|
||||
"zoomout_tip": "放大 ctrl +",
|
||||
"zoomout_tip_mac": "放大 ⌘ +"
|
||||
}
|
||||
}
|
||||
@@ -26,16 +26,21 @@
|
||||
"collection.training_type": "處理模式",
|
||||
"collection_data_count": "資料量",
|
||||
"collection_metadata_custom_pdf_parse": "PDF 增強解析",
|
||||
"collection_name": "數據集名稱",
|
||||
"collection_not_support_retraining": "此集合類型不支援重新調整參數",
|
||||
"collection_not_support_sync": "該集合不支援同步",
|
||||
"collection_sync": "立即同步",
|
||||
"collection_sync_confirm_tip": "確認開始同步資料?\n系統將會拉取最新資料進行比較,如果內容不相同,則會建立一個新的集合並刪除舊的集合,請確認!",
|
||||
"collection_tags": "集合標籤",
|
||||
"common.dataset.data.Input Error Tip": "[圖片數據集] 處理過程錯誤:",
|
||||
"common.error.unKnow": "未知錯誤",
|
||||
"common_dataset": "通用資料集",
|
||||
"common_dataset_desc": "通過導入文件、網頁鏈接或手動錄入形式構建知識庫",
|
||||
"condition": "條件",
|
||||
"config_sync_schedule": "設定定時同步",
|
||||
"confirm_import_images": "共 {{num}} 張圖片 | 確認創建",
|
||||
"confirm_to_rebuild_embedding_tip": "確定要為資料集切換索引嗎?\n切換索引是一個重要的操作,需要對您資料集內所有資料重新建立索引,可能需要較長時間,請確保帳號內剩餘點數充足。\n\n此外,您還需要注意修改使用此資料集的應用程式,避免與其他索引模型資料集混用。",
|
||||
"core.dataset.Image collection": "圖片數據集",
|
||||
"core.dataset.import.Adjust parameters": "調整參數",
|
||||
"custom_data_process_params": "自訂",
|
||||
"custom_data_process_params_desc": "自訂資料處理規則",
|
||||
@@ -88,6 +93,7 @@
|
||||
"image_auto_parse": "圖片自動索引",
|
||||
"image_auto_parse_tips": "呼叫 VLM 自動標註文件裡的圖片,並生成額外的檢索索引",
|
||||
"image_training_queue": "圖片處理排隊",
|
||||
"images_creating": "正在創建",
|
||||
"immediate_sync": "立即同步",
|
||||
"import.Auto mode Estimated Price Tips": "需呼叫文字理解模型,將消耗較多 AI 點數:{{price}} 點數 / 1K tokens",
|
||||
"import.Embedding Estimated Price Tips": "僅使用索引模型,消耗少量 AI 點數:{{price}} 點數 / 1K tokens",
|
||||
@@ -102,6 +108,8 @@
|
||||
"index_size": "索引大小",
|
||||
"index_size_tips": "向量化時內容的長度,系統會自動按該大小對分塊進行進一步的分割。",
|
||||
"input_required_field_to_select_baseurl": "請先輸入必填信息",
|
||||
"insert_images": "新增圖片",
|
||||
"insert_images_success": "新增圖片成功,需等待訓練完成才會展示",
|
||||
"is_open_schedule": "啟用定時同步",
|
||||
"keep_image": "保留圖片",
|
||||
"loading": "加載中...",
|
||||
@@ -133,6 +141,7 @@
|
||||
"process.Image_Index": "圖片索引生成",
|
||||
"process.Is_Ready": "已就緒",
|
||||
"process.Is_Ready_Count": "{{count}} 組已就緒",
|
||||
"process.Parse_Image": "圖片解析中",
|
||||
"process.Parsing": "內容解析中",
|
||||
"process.Vectorizing": "索引向量化",
|
||||
"process.Waiting": "排隊中",
|
||||
@@ -174,11 +183,13 @@
|
||||
"the_knowledge_base_has_indexes_that_are_being_trained_or_being_rebuilt": "資料集有索引正在訓練或重建中",
|
||||
"total_num_files": "共 {{total}} 個文件",
|
||||
"training.Error": "{{count}} 組異常",
|
||||
"training.Image mode": "圖片處理",
|
||||
"training.Normal": "正常",
|
||||
"training_mode": "分段模式",
|
||||
"training_ready": "{{count}} 組",
|
||||
"vector_model_max_tokens_tip": "每個分塊資料,最大長度為 3000 tokens",
|
||||
"vllm_model": "圖片理解模型",
|
||||
"vlm_model_required_warning": "需要圖片理解模型",
|
||||
"website_dataset": "網站同步",
|
||||
"website_dataset_desc": "通過爬蟲,批量爬取網頁數據構建知識庫",
|
||||
"website_info": "網站資訊",
|
||||
|
||||
@@ -1,9 +1,31 @@
|
||||
{
|
||||
"Action": "請選擇要上傳的圖片",
|
||||
"All images import failed": "所有圖片導入失敗",
|
||||
"Dataset_ID_not_found": "數據集ID不存在",
|
||||
"Failed_to_get_token": "獲取令牌失敗",
|
||||
"Image_ID_copied": "已復制ID",
|
||||
"Image_Preview": "圖片預覽",
|
||||
"Image_dataset_requires_VLM_model_to_be_configured": "圖片數據集需要配置圖片理解模型(VLM)才能使用,請先在模型配置中添加支持圖片理解的模型",
|
||||
"Image_does_not_belong_to_current_team": "圖片不屬於當前團隊",
|
||||
"Image_file_does_not_exist": "圖片不存在",
|
||||
"Loading_image": "加載圖片中...",
|
||||
"Loading_image_failed": "預覽加載失敗",
|
||||
"Only_support_uploading_one_image": "僅支持上傳一張圖片",
|
||||
"image_description_tip": "請輸入圖片的描述內容",
|
||||
"Please select the image to upload": "請選擇要上傳的圖片",
|
||||
"Please select the image to upload select the image to upload": "",
|
||||
"Please wait for all files to upload": "請等待所有文件上傳完成",
|
||||
"bucket_chat": "對話檔案",
|
||||
"bucket_file": "知識庫檔案",
|
||||
"click_to_view_raw_source": "點選檢視原始來源",
|
||||
"dataset_data_input_image_support_format": "支持 .jpg, .jpeg, .png, .gif, .webp 格式",
|
||||
"delete_image": "刪除圖片",
|
||||
"file_name": "檔案名稱",
|
||||
"file_size": "檔案大小",
|
||||
"image": "圖片",
|
||||
"image_collection": "圖片集合",
|
||||
"please_upload_image_first": "請先上傳圖片",
|
||||
"reached_max_file_count": "已達檔案數量上限",
|
||||
"release_the_mouse_to_upload_the_file": "放開滑鼠以上傳檔案",
|
||||
"select_and_drag_file_tip": "點選或拖曳檔案至此處上傳",
|
||||
"select_file_amount_limit": "最多可選擇 {{max}} 個檔案",
|
||||
@@ -12,7 +34,9 @@
|
||||
"support_file_type": "支援 {{fileType}} 格式的檔案",
|
||||
"support_max_count": "最多可支援 {{maxCount}} 個檔案",
|
||||
"support_max_size": "單一檔案大小上限為 {{maxSize}}",
|
||||
"total_files": "共{{selectFiles.length}}個文件",
|
||||
"upload_error_description": "單次僅支援上傳多個檔案或一個資料夾",
|
||||
"upload_failed": "上傳失敗",
|
||||
"reached_max_file_count": "已達檔案數量上限",
|
||||
"upload_error_description": "單次僅支援上傳多個檔案或一個資料夾"
|
||||
}
|
||||
"upload_file_error": "請上傳圖片",
|
||||
"uploading": "正在上傳..."
|
||||
}
|
||||
|
||||