V4.8.15 feature (#3331)

* feat: add customize toolkit (#3205)

* chaoyang

* fix-auth

* add toolkit

* add order

* plugin usage

* fix

* delete console:

* Fix: Fix fullscreen preview top positioning and improve Markdown rendering logic (#3247)

* 完成任务:修复全屏预览顶部固定问题,优化 Markdown 渲染逻辑

* 有问题修改

* 问题再修改

* 修正问题

* fix: plugin standalone display issue (#3254)

* 4.8.15 test (#3246)

* o1 config

* perf: system plugin code

* 调整系统插件代码。增加html 渲染安全配置。 (#3258)

* perf: base64 picker

* perf: list app or dataset

* perf: plugin config code

* 小窗适配等问题 (#3257)

* 小窗适配等问题

* git问题

* 小窗剩余问题

* feat: system plugin auth and lock version (#3265)

* feat: system plugin auth and lock version

* update comment

* 4.8.15 test (#3267)

* tmp log

* perf: login direct

* perf: iframe html code

* remove log

* fix: plugin standalone display (#3277)

* refactor: 页面拆分&i18n拆分 (#3281)

* refactor: account组件拆成独立页面

* script: 新增i18n json文件创建脚本

* refactor: 页面i18n拆分

* i18n: add en&hant

* 4.8.15 test (#3285)

* tmp log

* remove log

* fix: watch avatar refresh

* perf: i18n code

* fix(plugin): use intro instead of userguide (#3290)

* Universal SSO (#3292)

* tmp log

* remove log

* feat: common oauth

* readme

* perf: sso provider

* remove sso code

* perf: refresh plugins

* feat: add api dataset (#3272)

* add api-dataset

* fix api-dataset

* fix api dataset

* fix ts

* perf: create collection code (#3301)

* tmp log

* remove log

* perf: i18n change

* update version doc

* feat: question guide from chatId

* perf: create collection code

* fix: request api

* fix: request api

* fix: tts auth and response type (#3303)

* perf: md splitter

* fix: tts auth and response type

* fix: api file dataset (#3307)

* perf: api dataset init (#3310)

* perf: collection schema

* perf: api dataset init

* refactor: 团队管理独立页面 (#3302)

* ui: 团队管理独立页面

* 代码优化

* fix

* perf: sync collection and ui check (#3314)

* perf: sync collection

* remove script

* perf: update api server

* perf: api dataset parent

* perf: team ui

* perf: team 18n

* update team ui

* perf: ui check

* perf: i18n

* fix: debug variables & cronjob & system plugin callback load (#3315)

* fix: debug variables & cronjob & system plugin callback load

* fix type

* fix

* fix

* fix: plugin dataset quote;perf: system variables init (#3316)

* fix: plugin dataset quote

* perf: system variables init

* perf: node templates ui;fix: dataset import ui (#3318)

* fix: dataset import ui

* perf: node templates ui

* perf: ui refresh

* feat:套餐改名和套餐跳转配置 (#3309)

* fixing:except Sidebar

* 去除了多余的代码

* 修正了套餐说明的代码

* 修正了误删除的show_git代码

* 修正了名字部分等代码

* 修正了问题,遗留了其他和ui讨论不一致的部分

* 4.8.15 test (#3319)

* remove log

* pref: bill ui

* pref: bill ui

* perf: log

* html渲染文档 (#3270)

* html渲染文档

* 文档有点小问题

* feat: doc (#3322)

* 集合重训练 (#3282)

* rebaser

* 一点补充

* 小问题

* 其他问题修正,删除集合保留文件的参数还没找到...

* reTraining

* delete uesless

* 删除了一行错误代码

* 集合重训练部分

* fixing

* 删除console代码

* feat: navbar item config (#3326)

* perf: custom navbar code;perf: retraining code;feat: api dataset and dataset api doc (#3329)

* feat: api dataset and dataset api doc

* perf: retraining code

* perf: custom navbar code

* fix: ts (#3330)

* fix: ts

* fix: ts

* retraining ui

* perf: api collection filter

* perf: retrining button

---------

Co-authored-by: heheer <heheer@sealos.io>
Co-authored-by: Jiangween <145003935+Jiangween@users.noreply.github.com>
Co-authored-by: papapatrick <109422393+Patrickill@users.noreply.github.com>
This commit is contained in:
Archer
2024-12-06 10:56:53 +08:00
committed by GitHub
parent b188544386
commit 1aebe5f185
307 changed files with 7383 additions and 3981 deletions

View File

@@ -73,7 +73,7 @@ const CollectionPageContextProvider = ({ children }: { children: ReactNode }) =>
// website config
const { openConfirm: openWebSyncConfirm, ConfirmModal: ConfirmWebSyncModal } = useConfirm({
content: t('common:core.dataset.collection.Start Sync Tip')
content: t('dataset:start_sync_website_tip')
});
const {
isOpen: isOpenWebsiteModal,

View File

@@ -1,4 +1,4 @@
import React, { useCallback, useRef } from 'react';
import React from 'react';
import {
Box,
Flex,
@@ -55,7 +55,6 @@ const Header = ({}: {}) => {
const { parentId = '' } = router.query as { parentId: string };
const { isPc } = useSystem();
const lastSearch = useRef('');
const { searchText, setSearchText, total, getData, pageNum, onOpenWebsiteModal } =
useContextSelector(CollectionPageContext, (v) => v);
@@ -386,6 +385,34 @@ const Header = ({}: {}) => {
]}
/>
)}
{/* apiDataset */}
{datasetDetail?.type === DatasetTypeEnum.apiDataset && (
<Flex
px={3.5}
py={2}
borderRadius={'sm'}
cursor={'pointer'}
bg={'primary.500'}
overflow={'hidden'}
color={'white'}
onClick={() =>
router.replace({
query: {
...router.query,
currentTab: TabEnum.import,
source: ImportDataSourceEnum.apiDataset
}
})
}
>
<Flex h={'20px'} alignItems={'center'}>
<MyIcon name={'common/folderImport'} mr={2} w={'18px'} h={'18px'} color={'white'} />
</Flex>
<Box h={'20px'} fontSize={'sm'} fontWeight={'500'}>
{t('dataset:add_file')}
</Box>
</Flex>
)}
</Box>
)}

View File

@@ -28,7 +28,8 @@ import { useEditTitle } from '@/web/common/hooks/useEditTitle';
import {
DatasetCollectionTypeEnum,
DatasetStatusEnum,
DatasetCollectionSyncResultMap
DatasetCollectionSyncResultMap,
DatasetTypeEnum
} from '@fastgpt/global/core/dataset/constants';
import { getCollectionIcon } from '@fastgpt/global/core/dataset/utils';
import { TabEnum } from '../../index';
@@ -41,7 +42,6 @@ import MyBox from '@fastgpt/web/components/common/MyBox';
import { useContextSelector } from 'use-context-selector';
import { CollectionPageContext } from './Context';
import { DatasetPageContext } from '@/web/core/dataset/context/datasetPageContext';
import { useI18n } from '@/web/context/I18n';
import { formatTime2YMDHM } from '@fastgpt/global/common/string/time';
import MyTag from '@fastgpt/web/components/common/Tag/index';
import {
@@ -60,7 +60,6 @@ const CollectionCard = () => {
const router = useRouter();
const { toast } = useToast();
const { t } = useTranslation();
const { datasetT } = useI18n();
const { datasetDetail, loadDatasetDetail } = useContextSelector(DatasetPageContext, (v) => v);
const { feConfigs } = useSystemStore();
@@ -68,9 +67,6 @@ const CollectionCard = () => {
content: t('common:dataset.Confirm to delete the file'),
type: 'delete'
});
const { openConfirm: openSyncConfirm, ConfirmModal: ConfirmSyncModal } = useConfirm({
content: t('common:core.dataset.collection.Start Sync Tip')
});
const { onOpenModal: onOpenEditTitleModal, EditModal: EditTitleModal } = useEditTitle({
title: t('common:Rename')
@@ -89,7 +85,7 @@ const CollectionCard = () => {
const status = (() => {
if (collection.trainingAmount > 0) {
return {
statusText: t('dataset.collections.Collection Embedding', {
statusText: t('common:dataset.collections.Collection Embedding', {
total: collection.trainingAmount
}),
colorSchema: 'gray'
@@ -134,6 +130,9 @@ const CollectionCard = () => {
}
);
const { openConfirm: openSyncConfirm, ConfirmModal: ConfirmSyncModal } = useConfirm({
content: t('dataset:collection_sync_confirm_tip')
});
const { runAsync: onclickStartSync, loading: isSyncing } = useRequest2(postLinkCollectionSync, {
onSuccess(res: DatasetCollectionSyncResultEnum) {
getData(pageNum);
@@ -195,11 +194,11 @@ const CollectionCard = () => {
<Thead draggable={false}>
<Tr>
<Th py={4}>{t('common:common.Name')}</Th>
<Th py={4}>{datasetT('collection.Training type')}</Th>
<Th py={4}>{t('dataset:collection.Training type')}</Th>
<Th py={4}>{t('common:dataset.collections.Data Amount')}</Th>
<Th py={4}>{datasetT('collection.Create update time')}</Th>
<Th py={4}>{t('dataset:collection.Create update time')}</Th>
<Th py={4}>{t('common:common.Status')}</Th>
<Th py={4}>{datasetT('Enable')}</Th>
<Th py={4}>{t('dataset:Enable')}</Th>
<Th py={4} />
</Tr>
</Thead>
@@ -219,14 +218,14 @@ const CollectionCard = () => {
if (collection.type === DatasetCollectionTypeEnum.folder) {
router.push({
query: {
...router.query,
datasetId: datasetDetail._id,
parentId: collection._id
}
});
} else {
router.push({
query: {
...router.query,
datasetId: datasetDetail._id,
collectionId: collection._id,
currentTab: TabEnum.dataCard
}
@@ -311,7 +310,8 @@ const CollectionCard = () => {
menuList={[
{
children: [
...(collection.type === DatasetCollectionTypeEnum.link
...(collection.type === DatasetCollectionTypeEnum.link ||
datasetDetail.type === DatasetTypeEnum.apiDataset
? [
{
label: (
@@ -321,7 +321,7 @@ const CollectionCard = () => {
w={'0.9rem'}
mr={2}
/>
{t('common:core.dataset.collection.Sync')}
{t('dataset:collection_sync')}
</Flex>
),
onClick: () =>

View File

@@ -28,6 +28,11 @@ import MyDivider from '@fastgpt/web/components/common/MyDivider';
import Markdown from '@/components/Markdown';
import { useMemoizedFn } from 'ahooks';
import { useScrollPagination } from '@fastgpt/web/hooks/useScrollPagination';
import { TabEnum } from './NavBar';
import {
DatasetCollectionTypeEnum,
ImportDataSourceEnum
} from '@fastgpt/global/core/dataset/constants';
const DataCard = () => {
const theme = useTheme();
@@ -137,20 +142,37 @@ const DataCard = () => {
<TagsPopOver currentCollection={collection} />
)}
</Box>
{datasetDetail.type !== 'websiteDataset' && !!collection?.chunkSize && (
<Button
ml={2}
variant={'whitePrimary'}
size={['sm', 'md']}
onClick={() => {
router.push({
query: {
datasetId,
currentTab: TabEnum.import,
source: ImportDataSourceEnum.reTraining,
collectionId
}
});
}}
>
{t('dataset:retain_collection')}
</Button>
)}
{canWrite && (
<Box>
<Button
ml={2}
variant={'whitePrimary'}
size={['sm', 'md']}
onClick={() => {
if (!collection) return;
setEditDataId('');
}}
>
{t('common:dataset.Insert Data')}
</Button>
</Box>
<Button
ml={2}
variant={'whitePrimary'}
size={['sm', 'md']}
isDisabled={!collection}
onClick={() => {
setEditDataId('');
}}
>
{t('common:dataset.Insert Data')}
</Button>
)}
</Flex>
<Box justifyContent={'center'} px={6} pos={'relative'} w={'100%'}>

View File

@@ -86,6 +86,10 @@ const DatasetImportContextProvider = ({ children }: { children: React.ReactNode
// step
const modeSteps: Record<ImportDataSourceEnum, { title: string }[]> = {
[ImportDataSourceEnum.reTraining]: [
{ title: t('dataset:core.dataset.import.Adjust parameters') },
{ title: t('common:core.dataset.import.Upload data') }
],
[ImportDataSourceEnum.fileLocal]: [
{
title: t('common:core.dataset.import.Select file')
@@ -140,6 +144,17 @@ const DatasetImportContextProvider = ({ children }: { children: React.ReactNode
{
title: t('common:core.dataset.import.Upload data')
}
],
[ImportDataSourceEnum.apiDataset]: [
{
title: t('common:core.dataset.import.Select file')
},
{
title: t('common:core.dataset.import.Data Preprocessing')
},
{
title: t('common:core.dataset.import.Upload data')
}
]
};
const steps = modeSteps[source];

View File

@@ -260,7 +260,7 @@ function DataProcess({ showPreviewChunks = true }: { showPreviewChunks: boolean
</Button>
</Flex>
</Box>
<Box flex={'1 0 0'} w={['auto', '0']} h={['auto', '100%']} overflow={'auto'} pl={[0, 3]}>
<Box flex={'1 0 0'} w={['auto', '0']} h={['auto', '100%']} pl={[0, 3]}>
<Preview showPreviewChunks={showPreviewChunks} />
</Box>

View File

@@ -1,4 +1,4 @@
import React, { useMemo } from 'react';
import React, { useMemo, useRef } from 'react';
import { QuestionOutlineIcon } from '@chakra-ui/icons';
import {
Box,
@@ -17,29 +17,34 @@ import {
import { ImportDataSourceEnum } from '@fastgpt/global/core/dataset/constants';
import { useTranslation } from 'next-i18next';
import MyIcon from '@fastgpt/web/components/common/Icon';
import { useRequest } from '@fastgpt/web/hooks/useRequest';
import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
import { useToast } from '@fastgpt/web/hooks/useToast';
import { useRouter } from 'next/router';
import { TabEnum } from '../../../index';
import {
postCreateDatasetApiDatasetCollection,
postCreateDatasetCsvTableCollection,
postCreateDatasetExternalFileCollection,
postCreateDatasetFileCollection,
postCreateDatasetLinkCollection,
postCreateDatasetTextCollection
postCreateDatasetTextCollection,
postReTrainingDatasetFileCollection
} from '@/web/core/dataset/api';
import MyTag from '@fastgpt/web/components/common/Tag/index';
import { useI18n } from '@/web/context/I18n';
import { useContextSelector } from 'use-context-selector';
import { DatasetPageContext } from '@/web/core/dataset/context/datasetPageContext';
import { DatasetImportContext, type ImportFormType } from '../Context';
const Upload = () => {
const { t } = useTranslation();
const { fileT } = useI18n();
const { toast } = useToast();
const router = useRouter();
const { collectionId = '' } = router.query as {
collectionId: string;
};
const datasetDetail = useContextSelector(DatasetPageContext, (v) => v.datasetDetail);
const retrainNewCollectionId = useRef('');
const { importSource, parentId, sources, setSources, processParamsForm, chunkSize } =
useContextSelector(DatasetImportContext, (v) => v);
@@ -71,8 +76,8 @@ const Upload = () => {
}
}, [waitingFilesCount, totalFilesCount, allFinished, t]);
const { mutate: startUpload, isLoading } = useRequest({
mutationFn: async ({ mode, customSplitChar, qaPrompt, webSelector }: ImportFormType) => {
const { runAsync: startUpload, loading: isLoading } = useRequest2(
async ({ mode, customSplitChar, qaPrompt, webSelector }: ImportFormType) => {
if (sources.length === 0) return;
const filterWaitingSources = sources.filter((item) => item.createStatus === 'waiting');
@@ -100,7 +105,13 @@ const Upload = () => {
name: item.sourceName
};
if (importSource === ImportDataSourceEnum.fileLocal && item.dbFileId) {
if (importSource === ImportDataSourceEnum.reTraining) {
const res = await postReTrainingDatasetFileCollection({
...commonParams,
collectionId
});
retrainNewCollectionId.current = res.collectionId;
} else if (importSource === ImportDataSourceEnum.fileLocal && item.dbFileId) {
await postCreateDatasetFileCollection({
...commonParams,
fileId: item.dbFileId
@@ -131,6 +142,11 @@ const Upload = () => {
externalFileId: item.externalFileId,
filename: item.sourceName
});
} else if (importSource === ImportDataSourceEnum.apiDataset && item.apiFileId) {
await postCreateDatasetApiDatasetCollection({
...commonParams,
apiFileId: item.apiFileId
});
}
setSources((state) =>
@@ -145,40 +161,46 @@ const Upload = () => {
);
}
},
onSuccess() {
if (!sources.some((file) => file.errorMsg !== undefined)) {
toast({
title: t('common:core.dataset.import.Import success'),
status: 'success'
});
}
// close import page
router.replace({
query: {
...router.query,
currentTab: TabEnum.collectionCard
{
onSuccess() {
if (!sources.some((file) => file.errorMsg !== undefined)) {
toast({
title:
importSource === ImportDataSourceEnum.reTraining
? t('dataset:retrain_task_submitted')
: t('common:core.dataset.import.Import success'),
status: 'success'
});
}
});
},
onError(error) {
setSources((state) =>
state.map((source) =>
source.createStatus === 'creating'
? {
...source,
createStatus: 'waiting',
errorMsg: error.message || fileT('upload_failed')
}
: source
)
);
},
errorToast: fileT('upload_failed')
});
// Close import page
router.replace({
query: {
datasetId: datasetDetail._id,
currentTab: retrainNewCollectionId.current ? TabEnum.dataCard : TabEnum.collectionCard,
collectionId: retrainNewCollectionId.current
}
});
},
onError(error) {
setSources((state) =>
state.map((source) =>
source.createStatus === 'creating'
? {
...source,
createStatus: 'waiting',
errorMsg: error.message || t('file:upload_failed')
}
: source
)
);
},
errorToast: t('file:upload_failed')
}
);
return (
<Box>
<Box h={'100%'} overflow={'auto'}>
<TableContainer>
<Table variant={'simple'} fontSize={'sm'} draggable={false}>
<Thead draggable={false}>

View File

@@ -24,7 +24,7 @@ const Preview = ({ showPreviewChunks }: { showPreviewChunks: boolean }) => {
<MyIcon name={'core/dataset/fileCollection'} w={'20px'} />
<Box fontSize={'md'}>{t('common:core.dataset.import.Sources list')}</Box>
</Flex>
<Box mt={3} flex={'1 0 0'} width={'100%'} overflowY={'auto'}>
<Box mt={3} flex={'1 0 0'} h={['auto', 0]} width={'100%'} overflowY={'auto'}>
<Grid w={'100%'} gap={3} gridTemplateColumns={['1fr', '1fr', '1fr', '1fr', '1fr 1fr']}>
{sources.map((source) => (
<Flex

View File

@@ -1,17 +1,15 @@
import React, { useMemo } from 'react';
import { Box, Flex } from '@chakra-ui/react';
import React from 'react';
import { Box } from '@chakra-ui/react';
import { ImportSourceItemType } from '@/web/core/dataset/type';
import { useQuery } from '@tanstack/react-query';
import MyRightDrawer from '@fastgpt/web/components/common/MyDrawer/MyRightDrawer';
import { getPreviewChunks } from '@/web/core/dataset/api';
import { ImportDataSourceEnum } from '@fastgpt/global/core/dataset/constants';
import { splitText2Chunks } from '@fastgpt/global/common/string/textSplitter';
import { useToast } from '@fastgpt/web/hooks/useToast';
import { getErrText } from '@fastgpt/global/common/error/utils';
import { useContextSelector } from 'use-context-selector';
import { DatasetImportContext } from '../Context';
import { importType2ReadType } from '@fastgpt/global/core/dataset/read';
import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
import { DatasetPageContext } from '@/web/core/dataset/context/datasetPageContext';
import { getPreviewSourceReadType } from '../utils';
const PreviewChunks = ({
previewSource,
@@ -20,11 +18,11 @@ const PreviewChunks = ({
previewSource: ImportSourceItemType;
onClose: () => void;
}) => {
const { toast } = useToast();
const { importSource, chunkSize, chunkOverlapRatio, processParamsForm } = useContextSelector(
DatasetImportContext,
(v) => v
);
const datasetId = useContextSelector(DatasetPageContext, (v) => v.datasetId);
const { data = [], loading: isLoading } = useRequest2(
async () => {
@@ -41,28 +39,24 @@ const PreviewChunks = ({
a: ''
}));
}
if (importSource === ImportDataSourceEnum.csvTable) {
return getPreviewChunks({
type: importType2ReadType(importSource),
sourceId:
previewSource.dbFileId || previewSource.link || previewSource.externalFileUrl || '',
chunkSize,
overlapRatio: chunkOverlapRatio,
customSplitChar: processParamsForm.getValues('customSplitChar'),
selector: processParamsForm.getValues('webSelector'),
isQAImport: true
});
}
return getPreviewChunks({
type: importType2ReadType(importSource),
datasetId,
type: getPreviewSourceReadType(previewSource),
sourceId:
previewSource.dbFileId || previewSource.link || previewSource.externalFileUrl || '',
previewSource.dbFileId ||
previewSource.link ||
previewSource.externalFileUrl ||
previewSource.apiFileId ||
'',
chunkSize,
overlapRatio: chunkOverlapRatio,
customSplitChar: processParamsForm.getValues('customSplitChar'),
selector: processParamsForm.getValues('webSelector'),
isQAImport: false
isQAImport: importSource === ImportDataSourceEnum.csvTable,
externalFileId: previewSource.externalFileId
});
},
{

View File

@@ -1,7 +1,6 @@
import React from 'react';
import { Box } from '@chakra-ui/react';
import { ImportSourceItemType } from '@/web/core/dataset/type';
import { useQuery } from '@tanstack/react-query';
import { getPreviewFileContent } from '@/web/common/file/api';
import MyRightDrawer from '@fastgpt/web/components/common/MyDrawer/MyRightDrawer';
import { ImportDataSourceEnum } from '@fastgpt/global/core/dataset/constants';
@@ -9,7 +8,9 @@ import { useToast } from '@fastgpt/web/hooks/useToast';
import { getErrText } from '@fastgpt/global/common/error/utils';
import { useContextSelector } from 'use-context-selector';
import { DatasetImportContext } from '../Context';
import { importType2ReadType } from '@fastgpt/global/core/dataset/read';
import { DatasetPageContext } from '@/web/core/dataset/context/datasetPageContext';
import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
import { getPreviewSourceReadType } from '../utils';
const PreviewRawText = ({
previewSource,
@@ -20,32 +21,34 @@ const PreviewRawText = ({
}) => {
const { toast } = useToast();
const { importSource, processParamsForm } = useContextSelector(DatasetImportContext, (v) => v);
const datasetId = useContextSelector(DatasetPageContext, (v) => v.datasetId);
const { data, isLoading } = useQuery(
['previewSource', previewSource.dbFileId, previewSource.link, previewSource.externalFileUrl],
() => {
const { data, loading: isLoading } = useRequest2(
async () => {
if (importSource === ImportDataSourceEnum.fileCustom && previewSource.rawText) {
return {
previewContent: previewSource.rawText.slice(0, 3000)
};
}
if (importSource === ImportDataSourceEnum.csvTable && previewSource.dbFileId) {
return getPreviewFileContent({
type: importType2ReadType(importSource),
sourceId: previewSource.dbFileId,
isQAImport: true
});
}
return getPreviewFileContent({
type: importType2ReadType(importSource),
datasetId,
type: getPreviewSourceReadType(previewSource),
sourceId:
previewSource.dbFileId || previewSource.link || previewSource.externalFileUrl || '',
isQAImport: false,
selector: processParamsForm.getValues('webSelector')
previewSource.dbFileId ||
previewSource.link ||
previewSource.externalFileUrl ||
previewSource.apiFileId ||
'',
isQAImport: importSource === ImportDataSourceEnum.csvTable,
selector: processParamsForm.getValues('webSelector'),
externalFileId: previewSource.externalFileId
});
},
{
refreshDeps: [previewSource.dbFileId, previewSource.link, previewSource.externalFileUrl],
manual: false,
onError(err) {
toast({
status: 'warning',

View File

@@ -0,0 +1,280 @@
import { useContextSelector } from 'use-context-selector';
import { DatasetImportContext } from '../Context';
import React, { useCallback, useMemo, useState } from 'react';
import dynamic from 'next/dynamic';
import Loading from '@fastgpt/web/components/common/MyLoading';
import { Box, Button, Checkbox, Flex } from '@chakra-ui/react';
import { DatasetPageContext } from '@/web/core/dataset/context/datasetPageContext';
import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
import { getApiDatasetFileList, getApiDatasetFileListExistId } from '@/web/core/dataset/api';
import MyIcon from '@fastgpt/web/components/common/Icon';
import { useTranslation } from 'react-i18next';
import { ParentTreePathItemType } from '@fastgpt/global/common/parentFolder/type';
import FolderPath from '@/components/common/folder/Path';
import { getSourceNameIcon } from '@fastgpt/global/core/dataset/utils';
import MyBox from '@fastgpt/web/components/common/MyBox';
import { APIFileItem } from '@fastgpt/global/core/dataset/apiDataset';
import SearchInput from '@fastgpt/web/components/common/Input/SearchInput';
import { useMount } from 'ahooks';
const DataProcess = dynamic(() => import('../commonProgress/DataProcess'), {
loading: () => <Loading fixed={false} />
});
const Upload = dynamic(() => import('../commonProgress/Upload'));
const APIDatasetCollection = () => {
const activeStep = useContextSelector(DatasetImportContext, (v) => v.activeStep);
return (
<>
{activeStep === 0 && <CustomAPIFileInput />}
{activeStep === 1 && <DataProcess showPreviewChunks={true} />}
{activeStep === 2 && <Upload />}
</>
);
};
export default React.memo(APIDatasetCollection);
const CustomAPIFileInput = () => {
const { t } = useTranslation();
const datasetDetail = useContextSelector(DatasetPageContext, (v) => v.datasetDetail);
const goToNext = useContextSelector(DatasetImportContext, (v) => v.goToNext);
const sources = useContextSelector(DatasetImportContext, (v) => v.sources);
const setSources = useContextSelector(DatasetImportContext, (v) => v.setSources);
const [selectFiles, setSelectFiles] = useState<APIFileItem[]>([]);
const [parent, setParent] = useState<ParentTreePathItemType>({
parentId: '',
parentName: ''
});
const [searchKey, setSearchKey] = useState('');
const { data: fileList = [], loading } = useRequest2(
async () =>
datasetDetail?.apiServer
? getApiDatasetFileList({
datasetId: datasetDetail._id,
parentId: parent?.parentId,
searchKey: searchKey
})
: [],
{
refreshDeps: [datasetDetail._id, datasetDetail.apiServer, parent, searchKey],
throttleWait: 500,
manual: false
}
);
const { data: existIdList = [] } = useRequest2(
() => getApiDatasetFileListExistId({ datasetId: datasetDetail._id }),
{
manual: false
}
);
// Init selected files
useMount(() => {
setSelectFiles(sources.map((item) => item.apiFile).filter(Boolean) as APIFileItem[]);
});
const { runAsync: onclickNext, loading: onNextLoading } = useRequest2(
async () => {
// Computed all selected files
const getFilesRecursively = async (files: APIFileItem[]): Promise<APIFileItem[]> => {
const allFiles: APIFileItem[] = [];
for (const file of files) {
if (file.type === 'folder') {
const folderFiles = await getApiDatasetFileList({
datasetId: datasetDetail._id,
parentId: file?.id
});
const subFiles = await getFilesRecursively(folderFiles);
allFiles.push(...subFiles);
} else {
allFiles.push(file);
}
}
return allFiles;
};
const allFiles = await getFilesRecursively(selectFiles);
setSources(
allFiles
.filter((item) => !existIdList.includes(item.id))
.map((item) => ({
id: item.id,
apiFileId: item.id,
apiFile: item,
createStatus: 'waiting',
sourceName: item.name,
icon: getSourceNameIcon({ sourceName: item.name }) as any
}))
);
},
{
onSuccess() {
goToNext();
}
}
);
const handleItemClick = useCallback(
(item: APIFileItem) => {
if (item.type === 'folder') {
return setParent({
parentId: item.id,
parentName: item.name
});
}
const isCurrentlySelected = selectFiles.some((file) => file.id === item.id);
if (isCurrentlySelected) {
setSelectFiles((state) => state.filter((file) => file.id !== item.id));
} else {
setSelectFiles((state) => [...state, item]);
}
},
[selectFiles, setSelectFiles]
);
const handleSelectAll = useCallback(() => {
const isAllSelected = fileList.length === selectFiles.length;
if (isAllSelected) {
setSelectFiles([]);
} else {
setSelectFiles(fileList);
}
}, [fileList, selectFiles]);
const paths = useMemo(() => [parent || { parentId: '', parentName: '' }], [parent]);
return (
<MyBox isLoading={loading} position="relative" h="full">
<Flex flexDirection={'column'} h="full">
<Flex justifyContent={'space-between'}>
<FolderPath
paths={paths}
onClick={(parentId) => {
if (parentId !== parent?.parentId) {
setParent({
parentId,
parentName: ''
});
}
}}
/>
<Box w={'240px'}>
<SearchInput
value={searchKey}
onChange={(e) => setSearchKey(e.target.value)}
placeholder={t('common:core.workflow.template.Search')}
/>
</Box>
</Flex>
<Box flex={1} overflowY="auto" mb={16}>
<Box ml={2} mt={3}>
<Flex
alignItems={'center'}
py={3}
cursor={'pointer'}
bg={'myGray.50'}
pl={7}
rounded={'8px'}
fontSize={'sm'}
fontWeight={'medium'}
color={'myGray.900'}
onClick={(e) => {
if (!(e.target as HTMLElement).closest('.checkbox')) {
handleSelectAll();
}
}}
>
<Checkbox
className="checkbox"
mr={2}
isChecked={fileList.length === selectFiles.length}
onChange={handleSelectAll}
/>
{t('common:Select_all')}
</Flex>
{fileList.map((item) => {
const isFolder = item.type === 'folder';
const isExists = existIdList.includes(item.id);
const isChecked = isExists || selectFiles.some((file) => file.id === item.id);
return (
<Flex
key={item.id}
py={3}
_hover={{ bg: 'primary.50' }}
pl={7}
cursor={'pointer'}
onClick={(e) => {
if (isExists) return;
if (!(e.target as HTMLElement).closest('.checkbox')) {
handleItemClick(item);
}
}}
>
<Checkbox
className="checkbox"
mr={2.5}
isChecked={isChecked}
isDisabled={isExists}
onChange={(e) => {
e.stopPropagation();
if (isExists) return;
if (isChecked) {
setSelectFiles((state) => state.filter((file) => file.id !== item.id));
} else {
setSelectFiles((state) => [...state, item]);
}
}}
/>
<MyIcon
name={
!isFolder
? (getSourceNameIcon({ sourceName: item.name }) as any)
: 'common/folderFill'
}
w={'18px'}
mr={1.5}
/>
<Box fontSize={'sm'} fontWeight={'medium'} color={'myGray.900'}>
{item.name}
</Box>
</Flex>
);
})}
</Box>
</Box>
<Box
position="absolute"
display={'flex'}
justifyContent={'end'}
bottom={0}
left={0}
right={0}
p={4}
>
<Button
isDisabled={selectFiles.length === 0}
isLoading={onNextLoading}
onClick={onclickNext}
>
{selectFiles.length > 0
? `${t('common:core.dataset.import.Total files', { total: selectFiles.length })} | `
: ''}
{t('common:common.Next Step')}
</Button>
</Box>
</Flex>
</MyBox>
);
};

View File

@@ -21,7 +21,6 @@ import Loading from '@fastgpt/web/components/common/MyLoading';
import { useContextSelector } from 'use-context-selector';
import { DatasetImportContext } from '../Context';
import { getFileIcon } from '@fastgpt/global/common/file/icon';
import { useI18n } from '@/web/context/I18n';
import { SmallAddIcon } from '@chakra-ui/icons';
const DataProcess = dynamic(() => import('../commonProgress/DataProcess'), {
@@ -45,7 +44,6 @@ export default React.memo(ExternalFileCollection);
const CustomLinkInput = () => {
const { t } = useTranslation();
const { datasetT, commonT } = useI18n();
const { goToNext, sources, setSources } = useContextSelector(DatasetImportContext, (v) => v);
const { register, reset, handleSubmit, control } = useForm<{
list: {
@@ -93,9 +91,9 @@ const CustomLinkInput = () => {
<Table bg={'white'}>
<Thead>
<Tr bg={'myGray.50'}>
<Th>{datasetT('external_url')}</Th>
<Th>{datasetT('external_id')}</Th>
<Th>{datasetT('filename')}</Th>
<Th>{t('dataset:external_url')}</Th>
<Th>{t('dataset:external_id')}</Th>
<Th>{t('dataset:filename')}</Th>
<Th></Th>
</Tr>
</Thead>
@@ -159,7 +157,7 @@ const CustomLinkInput = () => {
});
}}
>
{commonT('add_new')}
{t('common:add_new')}
</Button>
<Button
isDisabled={list.filter((item) => !!item.externalFileUrl).length === 0}

View File

@@ -0,0 +1,65 @@
import React from 'react';
import { useContextSelector } from 'use-context-selector';
import { DatasetImportContext } from '../Context';
import dynamic from 'next/dynamic';
import DataProcess from '../commonProgress/DataProcess';
import { useRouter } from 'next/router';
import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
import { getDatasetCollectionById } from '@/web/core/dataset/api';
import MyBox from '@fastgpt/web/components/common/MyBox';
import { ImportProcessWayEnum } from '@/web/core/dataset/constants';
import { getCollectionIcon } from '@fastgpt/global/core/dataset/utils';
const Upload = dynamic(() => import('../commonProgress/Upload'));
const ReTraining = () => {
const router = useRouter();
const { collectionId = '' } = router.query as {
collectionId: string;
};
const activeStep = useContextSelector(DatasetImportContext, (v) => v.activeStep);
const setSources = useContextSelector(DatasetImportContext, (v) => v.setSources);
const processParamsForm = useContextSelector(DatasetImportContext, (v) => v.processParamsForm);
const { loading } = useRequest2(() => getDatasetCollectionById(collectionId), {
refreshDeps: [collectionId],
manual: false,
onSuccess: (collection) => {
setSources([
{
dbFileId: collection.fileId,
link: collection.rawLink,
apiFileId: collection.apiFileId,
createStatus: 'waiting',
icon: getCollectionIcon(collection.type, collection.name),
id: collection._id,
isUploading: false,
sourceName: collection.name,
uploadedFileRate: 100
}
]);
processParamsForm.reset({
mode: collection.trainingType,
way: ImportProcessWayEnum.auto,
embeddingChunkSize: collection.chunkSize,
qaChunkSize: collection.chunkSize,
customSplitChar: collection.chunkSplitter,
qaPrompt: collection.qaPrompt,
webSelector: collection.metadata?.webSelector
});
}
});
return (
<MyBox isLoading={loading} h={'100%'} overflow={'auto'}>
{activeStep === 0 && <DataProcess showPreviewChunks={true} />}
{activeStep === 1 && <Upload />}
</MyBox>
);
};
export default React.memo(ReTraining);

View File

@@ -10,20 +10,24 @@ const FileLink = dynamic(() => import('./diffSource/FileLink'));
const FileCustomText = dynamic(() => import('./diffSource/FileCustomText'));
const TableLocal = dynamic(() => import('./diffSource/TableLocal'));
const ExternalFileCollection = dynamic(() => import('./diffSource/ExternalFile'));
const APIDatasetCollection = dynamic(() => import('./diffSource/APIDataset'));
const ReTraining = dynamic(() => import('./diffSource/ReTraining'));
const ImportDataset = () => {
const importSource = useContextSelector(DatasetImportContext, (v) => v.importSource);
const ImportComponent = useMemo(() => {
if (importSource === ImportDataSourceEnum.reTraining) return ReTraining;
if (importSource === ImportDataSourceEnum.fileLocal) return FileLocal;
if (importSource === ImportDataSourceEnum.fileLink) return FileLink;
if (importSource === ImportDataSourceEnum.fileCustom) return FileCustomText;
if (importSource === ImportDataSourceEnum.csvTable) return TableLocal;
if (importSource === ImportDataSourceEnum.externalFile) return ExternalFileCollection;
if (importSource === ImportDataSourceEnum.apiDataset) return APIDatasetCollection;
}, [importSource]);
return ImportComponent ? (
<Box flex={'1 0 0'} overflow={'auto'} position={'relative'}>
<Box flex={'1 0 0'} overflow={'auto'}>
<ImportComponent />
</Box>
) : null;

View File

@@ -0,0 +1,23 @@
import { ImportSourceItemType } from '@/web/core/dataset/type';
import { DatasetSourceReadTypeEnum } from '@fastgpt/global/core/dataset/constants';
export const getPreviewSourceReadType = (previewSource: ImportSourceItemType) => {
if (previewSource.dbFileId) {
return DatasetSourceReadTypeEnum.fileLocal;
}
if (previewSource.link) {
return DatasetSourceReadTypeEnum.link;
}
if (previewSource.apiFileId) {
return DatasetSourceReadTypeEnum.apiFile;
}
if (previewSource.externalFileId) {
return DatasetSourceReadTypeEnum.externalFile;
}
return DatasetSourceReadTypeEnum.fileLocal;
};
export default function Dom() {
return <></>;
}

View File

@@ -0,0 +1,91 @@
import React from 'react';
import { ModalFooter, ModalBody, Input, Button, Flex } from '@chakra-ui/react';
import MyModal from '@fastgpt/web/components/common/MyModal/index';
import { useTranslation } from 'next-i18next';
import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
import { useForm } from 'react-hook-form';
import { useToast } from '@fastgpt/web/hooks/useToast';
import { APIFileServer } from '@fastgpt/global/core/dataset/apiDataset';
export type EditAPIDatasetInfoFormType = {
id: string;
apiServer?: APIFileServer;
};
const EditAPIDatasetInfoModal = ({
onClose,
onEdit,
title,
...defaultForm
}: EditAPIDatasetInfoFormType & {
title: string;
onClose: () => void;
onEdit: (data: EditAPIDatasetInfoFormType) => any;
}) => {
const { t } = useTranslation();
const { toast } = useToast();
const { register, handleSubmit } = useForm<EditAPIDatasetInfoFormType>({
defaultValues: defaultForm
});
const { runAsync: onSave, loading } = useRequest2(
(data: EditAPIDatasetInfoFormType) => onEdit(data),
{
onSuccess: (res) => {
toast({
title: t('common:common.Update Success'),
status: 'success'
});
onClose();
}
}
);
return (
<MyModal isOpen onClose={onClose} w={'450px'} iconSrc="modal/edit" title={title}>
<ModalBody>
<Flex>
<Flex
alignItems={'center'}
flex={['', '0 0 110px']}
color={'myGray.900'}
fontWeight={500}
fontSize={'sm'}
>
{t('dataset:api_url')}
</Flex>
<Input
bg={'myWhite.600'}
placeholder={t('dataset:api_url')}
maxLength={200}
{...register('apiServer.baseUrl', { required: true })}
/>
</Flex>
<Flex mt={6}>
<Flex
alignItems={'center'}
flex={['', '0 0 110px']}
color={'myGray.900'}
fontWeight={500}
fontSize={'sm'}
>
Authorization
</Flex>
<Input
bg={'myWhite.600'}
placeholder={t('dataset:request_headers')}
maxLength={200}
{...register('apiServer.authorization')}
/>
</Flex>
</ModalBody>
<ModalFooter>
<Button isLoading={loading} onClick={handleSubmit(onSave)} px={6}>
{t('common:common.Confirm')}
</Button>
</ModalFooter>
</MyModal>
);
};
export default EditAPIDatasetInfoModal;

View File

@@ -21,7 +21,7 @@ import QuestionTip from '@fastgpt/web/components/common/MyTooltip/QuestionTip';
import FormLabel from '@fastgpt/web/components/common/MyBox/FormLabel';
import MyIcon from '@fastgpt/web/components/common/Icon';
import { DatasetPermissionList } from '@fastgpt/global/support/permission/dataset/constant';
import MemberManager from '../../component/MemberManager';
import MemberManager from '../../../component/MemberManager';
import {
getCollaboratorList,
postUpdateDatasetCollaborators,
@@ -29,6 +29,9 @@ import {
} from '@/web/core/dataset/api/collaborator';
import DatasetTypeTag from '@/components/core/dataset/DatasetTypeTag';
import dynamic from 'next/dynamic';
import EditAPIDatasetInfoModal, {
EditAPIDatasetInfoFormType
} from './components/EditApiServiceModal';
import { EditResourceInfoFormType } from '@/components/common/Modal/EditResourceModal';
const EditResourceModal = dynamic(() => import('@/components/common/Modal/EditResourceModal'));
@@ -37,6 +40,7 @@ const Info = ({ datasetId }: { datasetId: string }) => {
const { datasetDetail, loadDatasetDetail, updateDataset, rebuildingCount, trainingCount } =
useContextSelector(DatasetPageContext, (v) => v);
const [editedDataset, setEditedDataset] = useState<EditResourceInfoFormType>();
const [editedAPIDataset, setEditedAPIDataset] = useState<EditAPIDatasetInfoFormType>();
const refetchDatasetTraining = useContextSelector(
DatasetPageContext,
(v) => v.refetchDatasetTraining
@@ -126,7 +130,7 @@ const Info = ({ datasetId }: { datasetId: string }) => {
useEffect(() => {
reset(datasetDetail);
}, [datasetDetail._id]);
}, [datasetDetail, datasetDetail._id, reset]);
return (
<Box w={'100%'} h={'100%'} p={6}>
@@ -174,12 +178,7 @@ const Info = ({ datasetId }: { datasetId: string }) => {
<MyDivider my={4} h={'2px'} maxW={'500px'} />
<Box overflow={'hidden'}>
<Flex justify={'space-between'} alignItems={'center'} fontSize={'mini'} h={'24px'}>
<Box fontWeight={'500'} color={'myGray.900'} userSelect={'none'}>
{t('common:common.base_config')}
</Box>
</Flex>
<Flex mt={3} w={'100%'} flexDir={'column'}>
<Flex w={'100%'} flexDir={'column'}>
<FormLabel fontSize={'mini'} fontWeight={'500'}>
{t('common:core.dataset.Dataset ID')}
</FormLabel>
@@ -267,6 +266,31 @@ const Info = ({ datasetId }: { datasetId: string }) => {
</Box>
</>
)}
{datasetDetail.type === DatasetTypeEnum.apiDataset && (
<>
<Box w={'100%'} alignItems={'center'} pt={4}>
<Flex justifyContent={'space-between'} mb={1}>
<FormLabel fontSize={'mini'} fontWeight={'500'}>
{t('dataset:api_url')}
</FormLabel>
<MyIcon
name={'edit'}
w={'14px'}
_hover={{ color: 'primary.600' }}
cursor={'pointer'}
onClick={() =>
setEditedAPIDataset({
id: datasetDetail._id,
apiServer: datasetDetail.apiServer
})
}
/>
</Flex>
<Box fontSize={'mini'}>{datasetDetail.apiServer?.baseUrl}</Box>
</Box>
</>
)}
</Box>
{datasetDetail.permission.hasManagePer && (
@@ -321,6 +345,19 @@ const Info = ({ datasetId }: { datasetId: string }) => {
}
/>
)}
{editedAPIDataset && (
<EditAPIDatasetInfoModal
{...editedAPIDataset}
title={t('common:dataset.Edit API Service')}
onClose={() => setEditedAPIDataset(undefined)}
onEdit={(data) =>
updateDataset({
id: datasetId,
apiServer: data.apiServer
})
}
/>
)}
</Box>
);
};

View File

@@ -67,7 +67,7 @@ const MetaDataCard = ({ datasetId }: { datasetId: string }) => {
value: collection.rawTextLength ?? '-'
},
{
label: t('common:core.dataset.collection.metadata.Training Type'),
label: t('dataset:collection.Training type'),
value: t(TrainingTypeMap[collection.trainingType]?.label as any)
},
{

View File

@@ -23,7 +23,7 @@ import { useSystem } from '@fastgpt/web/hooks/useSystem';
const CollectionCard = dynamic(() => import('./components/CollectionCard/index'));
const DataCard = dynamic(() => import('./components/DataCard'));
const Test = dynamic(() => import('./components/Test'));
const Info = dynamic(() => import('./components/Info'));
const Info = dynamic(() => import('./components/Info/index'));
const Import = dynamic(() => import('./components/Import'));
export enum TabEnum {

View File

@@ -7,7 +7,7 @@ import { getErrText } from '@fastgpt/global/common/error/utils';
import { useToast } from '@fastgpt/web/hooks/useToast';
import { useRouter } from 'next/router';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import { useRequest } from '@fastgpt/web/hooks/useRequest';
import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
import Avatar from '@fastgpt/web/components/common/Avatar';
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
import MyModal from '@fastgpt/web/components/common/MyModal';
@@ -20,10 +20,12 @@ import AIModelSelector from '@/components/Select/AIModelSelector';
import { useSystem } from '@fastgpt/web/hooks/useSystem';
import QuestionTip from '@fastgpt/web/components/common/MyTooltip/QuestionTip';
import ComplianceTip from '@/components/common/ComplianceTip/index';
import MyIcon from '@fastgpt/web/components/common/Icon';
import { getDocPath } from '@/web/common/system/doc';
export type CreateDatasetType =
| DatasetTypeEnum.dataset
| DatasetTypeEnum.externalFile
| DatasetTypeEnum.apiDataset
| DatasetTypeEnum.websiteDataset;
const CreateModal = ({
@@ -44,7 +46,7 @@ const CreateModal = ({
const databaseNameMap = useMemo(() => {
return {
[DatasetTypeEnum.dataset]: t('dataset:common_dataset'),
[DatasetTypeEnum.externalFile]: t('dataset:external_file'),
[DatasetTypeEnum.apiDataset]: t('dataset:api_file'),
[DatasetTypeEnum.websiteDataset]: t('dataset:website_dataset')
};
}, [t]);
@@ -52,7 +54,7 @@ const CreateModal = ({
const iconMap = useMemo(() => {
return {
[DatasetTypeEnum.dataset]: 'core/dataset/commonDatasetColor',
[DatasetTypeEnum.externalFile]: 'core/dataset/externalDatasetColor',
[DatasetTypeEnum.apiDataset]: 'core/dataset/externalDatasetColor',
[DatasetTypeEnum.websiteDataset]: 'core/dataset/websiteDatasetColor'
};
}, []);
@@ -90,7 +92,7 @@ const CreateModal = ({
maxW: 300,
maxH: 300
});
setValue('avatar', src);
setValue('avatar' as const, src);
} catch (err: any) {
toast({
title: getErrText(err, t('common:common.avatar.Select Failed')),
@@ -102,17 +104,16 @@ const CreateModal = ({
);
/* create a new kb and router to it */
const { mutate: onclickCreate, isLoading: creating } = useRequest({
mutationFn: async (data: CreateDatasetParams) => {
const id = await postCreateDataset(data);
return id;
},
successToast: t('common:common.Create Success'),
errorToast: t('common:common.Create Failed'),
onSuccess(id) {
router.push(`/dataset/detail?datasetId=${id}`);
const { run: onclickCreate, loading: creating } = useRequest2(
async (data: CreateDatasetParams) => await postCreateDataset(data),
{
successToast: t('common:common.Create Success'),
errorToast: t('common:common.Create Failed'),
onSuccess(id) {
router.push(`/dataset/detail?datasetId=${id}`);
}
}
});
);
return (
<MyModal
@@ -129,9 +130,26 @@ const CreateModal = ({
>
<ModalBody py={6} px={9}>
<Box>
<Box color={'myGray.900'} fontWeight={500} fontSize={'sm'}>
{t('common:common.Set Name')}
</Box>
<Flex justify={'space-between'}>
<Box color={'myGray.900'} fontWeight={500} fontSize={'sm'}>
{t('common:common.Set Name')}
</Box>
{type === DatasetTypeEnum.apiDataset && (
<Flex
as={'span'}
alignItems={'center'}
color={'primary.600'}
fontSize={'sm'}
cursor={'pointer'}
onClick={() =>
window.open(getDocPath('/docs/guide/knowledge_base/api_dataset/'), '_blank')
}
>
<MyIcon name={'book'} w={4} mr={0.5} />
{t('common:Instructions')}
</Flex>
)}
</Flex>
<Flex mt={'12px'} alignItems={'center'}>
<MyTooltip label={t('common:common.avatar.Select Avatar')}>
<Avatar
@@ -185,7 +203,7 @@ const CreateModal = ({
value: item.model
}))}
onchange={(e) => {
setValue('vectorModel', e);
setValue('vectorModel' as const, e);
}}
/>
</Box>
@@ -218,12 +236,50 @@ const CreateModal = ({
value: item.model
}))}
onchange={(e) => {
setValue('agentModel', e);
setValue('agentModel' as const, e);
}}
/>
</Box>
</Flex>
)}
{type === DatasetTypeEnum.apiDataset && (
<>
<Flex mt={6}>
<Flex
alignItems={'center'}
flex={['', '0 0 110px']}
color={'myGray.900'}
fontWeight={500}
fontSize={'sm'}
>
{t('dataset:api_url')}
</Flex>
<Input
bg={'myWhite.600'}
placeholder={t('dataset:api_url')}
maxLength={200}
{...register('apiServer.baseUrl', { required: true })}
/>
</Flex>
<Flex mt={6}>
<Flex
alignItems={'center'}
flex={['', '0 0 110px']}
color={'myGray.900'}
fontWeight={500}
fontSize={'sm'}
>
Authorization
</Flex>
<Input
bg={'myWhite.600'}
placeholder={t('dataset:request_headers')}
maxLength={200}
{...register('apiServer.authorization')}
/>
</Flex>
</>
)}
</ModalBody>
<ModalFooter px={9}>

View File

@@ -11,7 +11,6 @@ import { useConfirm } from '@fastgpt/web/hooks/useConfirm';
import { useRequest, useRequest2 } from '@fastgpt/web/hooks/useRequest';
import { DatasetItemType } from '@fastgpt/global/core/dataset/type';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import { useToast } from '@fastgpt/web/hooks/useToast';
import { checkTeamExportDatasetLimit } from '@/web/support/user/team/api';
import { downloadFetch } from '@/web/common/system/utils';
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
@@ -28,7 +27,6 @@ import {
import EmptyTip from '@fastgpt/web/components/common/EmptyTip';
import { useFolderDrag } from '@/components/common/folder/useFolderDrag';
import MyBox from '@fastgpt/web/components/common/MyBox';
import { useI18n } from '@/web/context/I18n';
import { useTranslation } from 'next-i18next';
import { useUserStore } from '@/web/support/user/useUserStore';
import { useSystem } from '@fastgpt/web/hooks/useSystem';
@@ -40,7 +38,6 @@ function List() {
const { setLoading } = useSystemStore();
const { isPc } = useSystem();
const { t } = useTranslation();
const { commonT } = useI18n();
const { loadAndGetTeamMembers } = useUserStore();
const {
loadMyDatasets,
@@ -326,7 +323,7 @@ function List() {
children: [
{
icon: 'edit',
label: commonT('dataset.Edit Info'),
label: t('common:dataset.Edit Info'),
onClick: () =>
setEditedDataset({
id: dataset._id,
@@ -410,7 +407,7 @@ function List() {
{editedDataset && (
<EditResourceModal
{...editedDataset}
title={commonT('dataset.Edit Info')}
title={t('common:dataset.Edit Info')}
onClose={() => setEditedDataset(undefined)}
onEdit={async (data) => {
await onUpdateDataset({

View File

@@ -20,6 +20,10 @@ const SideTag = ({ type, ...props }: { type: `${DatasetTypeEnum}` } & FlexProps)
[DatasetTypeEnum.externalFile]: {
icon: 'core/dataset/externalDatasetOutline',
label: t('dataset:external_file')
},
[DatasetTypeEnum.apiDataset]: {
icon: 'core/dataset/externalDatasetOutline',
label: t('dataset:api_file')
}
};
}, [t]);

View File

@@ -64,10 +64,7 @@ const Dataset = () => {
const onSelectDatasetType = useCallback(
(e: CreateDatasetType) => {
if (
!feConfigs?.isPlus &&
(e === DatasetTypeEnum.websiteDataset || e === DatasetTypeEnum.externalFile)
) {
if (!feConfigs?.isPlus && e === DatasetTypeEnum.websiteDataset) {
return toast({
status: 'warning',
title: t('common:common.system.Commercial version function')
@@ -107,7 +104,7 @@ const Dataset = () => {
overflowY={'auto'}
overflowX={'hidden'}
>
<Flex pt={[4, 6]} pl={3} pr={[3, 10]}>
<Flex pt={[4, 6]} pl={3} pr={folderDetail ? [3, 6] : [3, 8]}>
<Flex flexGrow={1} flexDirection="column">
<Flex alignItems={'center'} justifyContent={'space-between'}>
<ParentPaths
@@ -160,17 +157,17 @@ const Dataset = () => {
description: t('dataset:common_dataset_desc'),
onClick: () => onSelectDatasetType(DatasetTypeEnum.dataset)
},
{
icon: 'core/dataset/externalDatasetColor',
label: t('dataset:api_file'),
description: t('dataset:external_file_dataset_desc'),
onClick: () => onSelectDatasetType(DatasetTypeEnum.apiDataset)
},
{
icon: 'core/dataset/websiteDatasetColor',
label: t('dataset:website_dataset'),
description: t('dataset:website_dataset_desc'),
onClick: () => onSelectDatasetType(DatasetTypeEnum.websiteDataset)
},
{
icon: 'core/dataset/externalDatasetColor',
label: t('dataset:external_file'),
description: t('dataset:external_file_dataset_desc'),
onClick: () => onSelectDatasetType(DatasetTypeEnum.externalFile)
}
]
},