4.6.7 first pr (#726)

This commit is contained in:
Archer
2024-01-10 23:35:04 +08:00
committed by GitHub
parent 414b693303
commit 006ad17c6a
186 changed files with 2996 additions and 1838 deletions

View File

@@ -40,8 +40,8 @@ const FeedbackModal = ({
onSuccess() {
onSuccess(ref.current?.value || t('core.chat.feedback.No Content'));
},
successToast: t('chat.Feedback Success'),
errorToast: t('chat.Feedback Failed')
successToast: t('core.chat.Feedback Success'),
errorToast: t('core.chat.Feedback Failed')
});
return (
@@ -49,17 +49,17 @@ const FeedbackModal = ({
isOpen={true}
onClose={onClose}
iconSrc="/imgs/modal/badAnswer.svg"
title={t('chat.Feedback Modal')}
title={t('core.chat.Feedback Modal')}
>
<ModalBody>
<Textarea ref={ref} rows={10} placeholder={t('chat.Feedback Modal Tip')} />
<Textarea ref={ref} rows={10} placeholder={t('core.chat.Feedback Modal Tip')} />
</ModalBody>
<ModalFooter>
<Button variant={'whiteBase'} mr={2} onClick={onClose}>
{t('Cancel')}
</Button>
<Button isLoading={isLoading} onClick={mutate}>
{t('chat.Feedback Submit')}
{t('core.chat.Feedback Submit')}
</Button>
</ModalFooter>
</MyModal>

View File

@@ -8,10 +8,11 @@ import MyIcon from '@fastgpt/web/components/common/Icon';
import { useRouter } from 'next/router';
import { useSelectFile } from '@/web/common/file/hooks/useSelectFile';
import { compressImgFileAndUpload } from '@/web/common/file/controller';
import { useToast } from '@/web/common/hooks/useToast';
import { customAlphabet } from 'nanoid';
import { IMG_BLOCK_KEY } from '@fastgpt/global/core/chat/constants';
import { addDays } from 'date-fns';
import { useRequest } from '@/web/common/hooks/useRequest';
import { MongoImageTypeEnum } from '@fastgpt/global/common/file/image/constants';
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 6);
enum FileTypeEnum {
@@ -45,7 +46,6 @@ const MessageInput = ({
resetInputVal: (val: string) => void;
}) => {
const { shareId } = useRouter().query as { shareId?: string };
const { toast } = useToast();
const {
isSpeaking,
isTransCription,
@@ -68,17 +68,18 @@ const MessageInput = ({
maxCount: 10
});
const uploadFile = useCallback(
async (file: FileItemType) => {
const { mutate: uploadFile } = useRequest({
mutationFn: async (file: FileItemType) => {
if (file.type === FileTypeEnum.image) {
try {
const src = await compressImgFileAndUpload({
type: MongoImageTypeEnum.chatImage,
file: file.rawFile,
maxW: 4329,
maxH: 4329,
maxSize: 1024 * 1024 * 5,
// 30 day expired.
expiredTime: addDays(new Date(), 30),
expiredTime: addDays(new Date(), 7),
shareId
});
setFileList((state) =>
@@ -94,16 +95,13 @@ const MessageInput = ({
} catch (error) {
setFileList((state) => state.filter((item) => item.id !== file.id));
console.log(error);
toast({
status: 'error',
title: t('common.Upload File Failed')
});
return Promise.reject(error);
}
}
},
[shareId, t, toast]
);
errorToast: t('common.Upload File Failed')
});
const onSelectFile = useCallback(
async (files: File[]) => {
if (!files || files.length === 0) {
@@ -219,7 +217,7 @@ ${images.map((img) => JSON.stringify({ src: img.src })).join('\n')}
visibility={isSpeaking && isTransCription ? 'visible' : 'hidden'}
>
<Spinner size={'sm'} mr={4} />
{t('chat.Converting to text')}
{t('core.chat.Converting to text')}
</Flex>
{/* file preview */}

View File

@@ -5,7 +5,7 @@ import MyModal from '../MyModal';
import { useTranslation } from 'next-i18next';
import type { SearchDataResponseItemType } from '@fastgpt/global/core/dataset/type';
import QuoteItem from '../core/dataset/QuoteItem';
import { RawSourceText } from '@/pages/dataset/detail/components/InputDataModal';
import RawSourceBox from '../core/dataset/RawSourceBox';
const QuoteModal = ({
rawSearch = [],
@@ -46,7 +46,7 @@ const QuoteModal = ({
title={
<Box>
{metadata ? (
<RawSourceText {...metadata} canView={false} />
<RawSourceBox {...metadata} canView={false} />
) : (
<>{t('core.chat.Quote Amount', { amount: rawSearch.length })}</>
)}

View File

@@ -19,7 +19,7 @@ const ReadFeedbackModal = ({
isOpen={true}
onClose={onClose}
iconSrc="/imgs/modal/readFeedback.svg"
title={t('chat.Feedback Modal')}
title={t('core.chat.Feedback Modal')}
>
<ModalBody>{content}</ModalBody>
<ModalFooter>

View File

@@ -92,7 +92,7 @@ const ResponseTags = ({
<>
{sourceList.length > 0 && (
<>
<ChatBoxDivider icon="core/chat/quoteFill" text={t('chat.Quote')} />
<ChatBoxDivider icon="core/chat/quoteFill" text={t('core.chat.Quote')} />
<Flex alignItems={'center'} flexWrap={'wrap'} gap={2}>
{sourceList.map((item) => (
<MyTooltip key={item.collectionId} label={t('core.chat.quote.Read Quote')}>

View File

@@ -46,7 +46,7 @@ const SelectMarkCollection = ({
paths={paths}
onClose={onClose}
setParentId={setParentId}
tips={t('chat.Select Mark Kb Desc')}
tips={t('core.chat.Select Mark Kb Desc')}
>
<ModalBody flex={'1 0 0'} overflowY={'auto'}>
<Grid
@@ -164,19 +164,23 @@ const SelectMarkCollection = ({
<InputDataModal
onClose={onClose}
collectionId={adminMarkData.collectionId}
dataId={adminMarkData.dataId}
defaultValue={{
id: adminMarkData.dataId,
q: adminMarkData.q,
a: adminMarkData.a,
indexes: [getDefaultIndex({ dataId: `${Date.now()}` })]
a: adminMarkData.a
}}
onSuccess={(data) => {
if (!data.q || !adminMarkData.datasetId || !adminMarkData.collectionId || !data.id) {
if (
!data.q ||
!adminMarkData.datasetId ||
!adminMarkData.collectionId ||
!data.dataId
) {
return onClose();
}
onSuccess({
dataId: data.id,
dataId: data.dataId,
datasetId: adminMarkData.datasetId,
collectionId: adminMarkData.collectionId,
q: data.q,

View File

@@ -910,14 +910,15 @@ const ChatBox = (
)}
{/* admin mark content */}
{showMarkIcon && item.adminFeedback && (
<Box>
<Box fontSize={'sm'}>
<ChatBoxDivider
icon="core/app/markLight"
text={t('chat.Admin Mark Content')}
text={t('core.chat.Admin Mark Content')}
/>
<Box whiteSpace={'pre'}>{`${item.adminFeedback.q || ''}${
item.adminFeedback.a ? `\n${item.adminFeedback.a}` : ''
}`}</Box>
<Box whiteSpace={'pre'}>
<Box color={'black'}>{item.adminFeedback.q}</Box>
<Box color={'myGray.600'}>{item.adminFeedback.a}</Box>
</Box>
</Box>
)}
</Card>
@@ -996,6 +997,7 @@ const ChatBox = (
setAdminMarkData={(e) => setAdminMarkData({ ...e, chatItemId: adminMarkData.chatItemId })}
onClose={() => setAdminMarkData(undefined)}
onSuccess={(adminFeedback) => {
console.log(adminMarkData);
if (!appId || !chatId || !adminMarkData.chatItemId) return;
updateChatAdminFeedback({
appId,
@@ -1003,6 +1005,7 @@ const ChatBox = (
chatItemId: adminMarkData.chatItemId,
...adminFeedback
});
// update dom
setChatHistory((state) =>
state.map((chatItem) =>
@@ -1234,7 +1237,7 @@ function ChatController({
{!!onDelete && (
<>
{onRetry && (
<MyTooltip label={t('chat.retry')}>
<MyTooltip label={t('core.chat.retry')}>
<MyIcon
{...controlIconStyle}
name={'common/retryLight'}
@@ -1301,7 +1304,7 @@ function ChatController({
</MyTooltip>
))}
{!!onMark && (
<MyTooltip label={t('chat.Mark')}>
<MyTooltip label={t('core.chat.Mark')}>
<MyIcon
{...controlIconStyle}
name={'core/app/markLight'}

View File

@@ -30,7 +30,7 @@ const Navbar = ({ unread }: { unread: number }) => {
{
label: t('navbar.Chat'),
icon: 'core/chat/chatLight',
activeIcon: 'chatcore/dataset/chatFillFill',
activeIcon: 'core/chat/chatFill',
link: `/chat?appId=${lastChatAppId}&chatId=${lastChatId}`,
activeLink: ['/chat']
},
@@ -77,6 +77,12 @@ const Navbar = ({ unread }: { unread: number }) => {
h: '58px',
borderRadius: 'md'
};
const hoverStyle: LinkProps = {
_hover: {
bg: 'myGray.05',
color: 'primary.600'
}
};
return (
<Flex
@@ -146,10 +152,11 @@ const Navbar = ({ unread }: { unread: number }) => {
<Link
as={NextLink}
{...itemStyles}
{...hoverStyle}
prefetch
href={`/account?currentTab=inform`}
mb={0}
color={'#9096a5'}
color={'myGray.500'}
>
<Badge count={unread}>
<MyIcon name={'inform'} width={'22px'} height={'22px'} />
@@ -161,10 +168,11 @@ const Navbar = ({ unread }: { unread: number }) => {
<MyTooltip label={t('common.system.Use Helper')} placement={'right-end'}>
<Link
{...itemStyles}
{...hoverStyle}
href={feConfigs?.chatbotUrl || getDocPath('/docs/intro')}
target="_blank"
mb={0}
color={'#9096a5'}
color={'myGray.500'}
>
<MyIcon name={'common/courseLight'} width={'26px'} height={'26px'} />
</Link>
@@ -177,8 +185,9 @@ const Navbar = ({ unread }: { unread: number }) => {
href="https://github.com/labring/FastGPT"
target={'_blank'}
{...itemStyles}
{...hoverStyle}
mt={0}
color={'#9096a5'}
color={'myGray.500'}
>
<MyIcon name={'common/gitLight'} width={'22px'} height={'22px'} />
</Link>

View File

@@ -24,7 +24,7 @@ const QuestionGuide = ({ text }: { text: string }) => {
return questionGuides.length > 0 ? (
<Box mt={2}>
<ChatBoxDivider icon="core/chat/QGFill" text={t('chat.Question Guide Tips')} />
<ChatBoxDivider icon="core/chat/QGFill" text={t('core.chat.Question Guide Tips')} />
<Flex alignItems={'center'} flexWrap={'wrap'} gap={2}>
{questionGuides.map((text) => (
<Flex

View File

@@ -1,4 +1,4 @@
import React, { useState } from 'react';
import React, { WheelEventHandler, useState } from 'react';
import {
Box,
Image,
@@ -14,6 +14,17 @@ const MdImage = ({ src }: { src?: string }) => {
const [isLoading, setIsLoading] = useState(true);
const [succeed, setSucceed] = useState(false);
const { isOpen, onOpen, onClose } = useDisclosure();
const [scale, setScale] = useState(1);
const handleWheel: WheelEventHandler<HTMLImageElement> = (e) => {
setScale((prevScale) => {
const newScale = prevScale + e.deltaY * 0.5 * -0.01;
if (newScale < 0.5) return 0.5;
if (newScale > 10) return 10;
return newScale;
});
};
return (
<Skeleton
minH="100px"
@@ -48,6 +59,7 @@ const MdImage = ({ src }: { src?: string }) => {
<ModalOverlay />
<ModalContent boxShadow={'none'} maxW={'auto'} w="auto" bg={'transparent'}>
<Image
transform={`scale(${scale})`}
borderRadius={'md'}
src={src}
alt={''}
@@ -57,6 +69,7 @@ const MdImage = ({ src }: { src?: string }) => {
fallbackSrc={'/imgs/errImg.png'}
fallbackStrategy={'onError'}
objectFit={'contain'}
onWheel={handleWheel}
/>
</ModalContent>
<ModalCloseButton bg={'myWhite.500'} zIndex={999999} />

View File

@@ -343,7 +343,6 @@
margin: 10px 0;
}
.markdown {
text-align: justify;
tab-size: 4;
word-spacing: normal;
width: 100%;

View File

@@ -112,6 +112,17 @@ function A({ children, ...props }: any) {
}
const Markdown = ({ source, isChatting = false }: { source: string; isChatting?: boolean }) => {
const components = useMemo<any>(
() => ({
img: Image,
pre: 'div',
p: (pProps: any) => <p {...pProps} dir="auto" />,
code: Code,
a: A
}),
[]
);
const formatSource = source
.replace(/\\n/g, '\n&nbsp;')
.replace(/(http[s]?:\/\/[^\s。]+)([。,])/g, '$1 $2')
@@ -124,13 +135,7 @@ const Markdown = ({ source, isChatting = false }: { source: string; isChatting?:
`}
remarkPlugins={[RemarkMath, RemarkGfm, RemarkBreaks]}
rehypePlugins={[RehypeKatex]}
components={{
img: Image,
pre: 'div',
p: (pProps) => <p {...pProps} dir="auto" />,
code: Code,
a: A
}}
components={components}
linkTarget={'_blank'}
>
{formatSource}

View File

@@ -4,18 +4,19 @@ import MySelect, { type SelectProps } from './index';
import { useTranslation } from 'next-i18next';
import dynamic from 'next/dynamic';
import { useDisclosure } from '@chakra-ui/react';
import { feConfigs } from '@/web/common/system/staticData';
const PriceBox = dynamic(() => import('@/components/support/wallet/Price'));
const SelectAiModel = ({ list, ...props }: SelectProps) => {
const { t } = useTranslation();
const expandList = useMemo(
() =>
list.concat({
label: t('support.user.Price'),
value: 'price'
}),
[list, t]
);
const expandList = useMemo(() => {
return feConfigs.show_pay
? list.concat({
label: t('support.user.Price'),
value: 'price'
})
: list;
}, [list, t]);
const {
isOpen: isOpenPriceBox,

View File

@@ -0,0 +1,30 @@
import { Box, Flex, FlexProps } from '@chakra-ui/react';
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constant';
import MyIcon from '@fastgpt/web/components/common/Icon';
import { useTranslation } from 'next-i18next';
import React from 'react';
import { DatasetTypeMap } from '@fastgpt/global/core/dataset/constant';
const DatasetTypeTag = ({ type, ...props }: { type: `${DatasetTypeEnum}` } & FlexProps) => {
const { t } = useTranslation();
const item = DatasetTypeMap[type];
return (
<Flex
bg={'myGray.100'}
borderWidth={'1px'}
borderColor={'myGray.200'}
px={4}
py={'6px'}
borderRadius={'md'}
fontSize={'xs'}
{...props}
>
<MyIcon name={item.icon as any} w={'16px'} mr={2} color={'myGray.400'} />
<Box>{t(item.label)}</Box>
</Flex>
);
};
export default DatasetTypeTag;

View File

@@ -1,9 +1,6 @@
import React, { useMemo, useState } from 'react';
import { Box, Flex, Link, Progress } from '@chakra-ui/react';
import {
type InputDataType,
RawSourceText
} from '@/pages/dataset/detail/components/InputDataModal';
import RawSourceBox from '@/components/core/dataset/RawSourceBox';
import type { SearchDataResponseItemType } from '@fastgpt/global/core/dataset/type.d';
import NextLink from 'next/link';
import MyIcon from '@fastgpt/web/components/common/Icon';
@@ -11,9 +8,6 @@ import { useTranslation } from 'next-i18next';
import MyTooltip from '@/components/MyTooltip';
import dynamic from 'next/dynamic';
import MyBox from '@/components/common/MyBox';
import { getDatasetDataItemById } from '@/web/core/dataset/api';
import { useRequest } from '@/web/common/hooks/useRequest';
import { DatasetDataItemType } from '@fastgpt/global/core/dataset/type';
import { SearchScoreTypeEnum, SearchScoreTypeMap } from '@fastgpt/global/core/dataset/constant';
const InputDataModal = dynamic(() => import('@/pages/dataset/detail/components/InputDataModal'));
@@ -58,17 +52,7 @@ const QuoteItem = ({
linkToDataset?: boolean;
}) => {
const { t } = useTranslation();
const [editInputData, setEditInputData] = useState<InputDataType & { collectionId: string }>();
const { mutate: onclickEdit, isLoading } = useRequest({
mutationFn: async (id: string) => {
return getDatasetDataItemById(id);
},
onSuccess(data: DatasetDataItemType) {
setEditInputData(data);
},
errorToast: t('core.dataset.data.get data error')
});
const [editInputData, setEditInputData] = useState<{ dataId: string; collectionId: string }>();
const score = useMemo(() => {
if (!Array.isArray(quoteItem.score)) {
@@ -114,7 +98,6 @@ const QuoteItem = ({
return (
<>
<MyBox
isLoading={isLoading}
position={'relative'}
overflow={'hidden'}
fontSize={'sm'}
@@ -124,7 +107,7 @@ const QuoteItem = ({
display={'flex'}
flexDirection={'column'}
>
<Flex alignItems={'center'} mb={3}>
<Flex alignItems={'center'} mb={3} flexWrap={'wrap'} gap={3}>
{score?.primaryScore && (
<>
{canViewSource ? (
@@ -132,7 +115,6 @@ const QuoteItem = ({
<Flex
px={'12px'}
py={'5px'}
mr={4}
borderRadius={'md'}
color={'primary.700'}
bg={'primary.50'}
@@ -177,13 +159,13 @@ const QuoteItem = ({
{canViewSource &&
score.secondaryScore.map((item, i) => (
<MyTooltip key={item.type} label={t(SearchScoreTypeMap[item.type]?.desc)}>
<Box fontSize={'xs'} mr={3}>
<Box fontSize={'xs'}>
<Flex alignItems={'flex-start'} lineHeight={1.2} mb={1}>
<Box
px={'5px'}
borderWidth={'1px'}
borderRadius={'sm'}
mr={1}
mr={'2px'}
{...(scoreTheme[i] && scoreTheme[i])}
>
<Box transform={'scale(0.9)'}>#{item.index + 1}</Box>
@@ -223,7 +205,7 @@ const QuoteItem = ({
{quoteItem.q.length + (quoteItem.a?.length || 0)}
</Flex>
</MyTooltip>
<RawSourceText
<RawSourceBox
fontWeight={'bold'}
color={'black'}
sourceName={quoteItem.sourceName}
@@ -249,7 +231,12 @@ const QuoteItem = ({
_hover={{
color: 'primary.600'
}}
onClick={() => onclickEdit(quoteItem.id)}
onClick={() =>
setEditInputData({
dataId: quoteItem.id,
collectionId: quoteItem.collectionId
})
}
/>
</Box>
</MyTooltip>
@@ -271,7 +258,7 @@ const QuoteItem = ({
)}
</MyBox>
{editInputData && editInputData.id && (
{editInputData && (
<InputDataModal
onClose={() => setEditInputData(undefined)}
onSuccess={() => {
@@ -280,7 +267,7 @@ const QuoteItem = ({
onDelete={() => {
console.log('删除引用成功');
}}
defaultValue={editInputData}
dataId={editInputData.dataId}
collectionId={editInputData.collectionId}
/>
)}

View File

@@ -0,0 +1,68 @@
import React, { useMemo } from 'react';
import { Box, BoxProps, Image } from '@chakra-ui/react';
import { useToast } from '@/web/common/hooks/useToast';
import { getErrText } from '@fastgpt/global/common/error/utils';
import MyTooltip from '@/components/MyTooltip';
import { useTranslation } from 'next-i18next';
import { getFileAndOpen } from '@/web/core/dataset/utils';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import { getSourceNameIcon } from '@fastgpt/global/core/dataset/utils';
type Props = BoxProps & {
sourceName?: string;
sourceId?: string;
canView?: boolean;
};
const RawSourceBox = ({ sourceId, sourceName = '', canView = true, ...props }: Props) => {
const { t } = useTranslation();
const { toast } = useToast();
const { setLoading } = useSystemStore();
const canPreview = useMemo(() => !!sourceId && canView, [canView, sourceId]);
const icon = useMemo(() => getSourceNameIcon({ sourceId, sourceName }), [sourceId, sourceName]);
return (
<MyTooltip
label={canPreview ? t('file.Click to view file') || '' : ''}
shouldWrapChildren={false}
>
<Box
color={'myGray.600'}
display={'inline-flex'}
whiteSpace={'nowrap'}
{...(canPreview
? {
cursor: 'pointer',
textDecoration: 'underline',
onClick: async () => {
setLoading(true);
try {
await getFileAndOpen(sourceId as string);
} catch (error) {
toast({
title: t(getErrText(error, 'error.fileNotFound')),
status: 'error'
});
}
setLoading(false);
}
}
: {})}
{...props}
>
<Image src={icon} alt="" w={['14px', '16px']} mr={2} />
<Box
maxW={['200px', '300px']}
className={props.className ?? 'textEllipsis'}
wordBreak={'break-all'}
>
{sourceName || t('common.UnKnow Source')}
</Box>
</Box>
</MyTooltip>
);
};
export default RawSourceBox;

View File

@@ -38,7 +38,7 @@ const DatasetSelectContainer = ({
parentId: path.parentId,
parentName: path.parentName
}))}
FirstPathDom={t('chat.Select Mark Kb')}
FirstPathDom={t('core.chat.Select Mark Kb')}
onClick={(e) => {
setParentId(e);
}}

View File

@@ -12,6 +12,7 @@ import MyTooltip from '@/components/MyTooltip';
import Avatar from '@/components/Avatar';
import { postCreateTeam, putUpdateTeam } from '@/web/support/user/team/api';
import { CreateTeamProps } from '@fastgpt/global/support/user/team/controller.d';
import { MongoImageTypeEnum } from '@fastgpt/global/common/file/image/constants';
export type FormDataType = CreateTeamProps & {
id?: string;
@@ -50,6 +51,7 @@ function EditModal({
if (!file) return;
try {
const src = await compressImgFileAndUpload({
type: MongoImageTypeEnum.teamAvatar,
file,
maxW: 300,
maxH: 300

View File

@@ -17,14 +17,6 @@ import Markdown from '@/components/Markdown';
const Price = ({ onClose }: { onClose: () => void }) => {
const list = [
{
title: '知识库存储',
describe: '',
md: `
| 计费项 | 价格(¥) |
| --- | --- |
| 知识库索引数量 | 0/1000条/天 |`
},
{
title: '对话模型',
describe: '',

View File

@@ -0,0 +1,171 @@
import React, { useState } from 'react';
import MyModal from '@/components/MyModal';
import { useTranslation } from 'next-i18next';
import {
Box,
Flex,
ModalBody,
NumberInput,
NumberInputField,
NumberInputStepper,
NumberIncrementStepper,
NumberDecrementStepper,
ModalFooter,
Button
} from '@chakra-ui/react';
import { useQuery } from '@tanstack/react-query';
import { getTeamDatasetValidSub, postExpandTeamDatasetSub } from '@/web/support/wallet/sub/api';
import Markdown from '@/components/Markdown';
import MyTooltip from '@/components/MyTooltip';
import { QuestionOutlineIcon } from '@chakra-ui/icons';
import { useConfirm } from '@/web/common/hooks/useConfirm';
import { getMonthRemainingDays } from '@fastgpt/global/common/math/date';
import { useRequest } from '@/web/common/hooks/useRequest';
import { useRouter } from 'next/router';
import { feConfigs } from '@/web/common/system/staticData';
import { useToast } from '@/web/common/hooks/useToast';
import { formatTime2YMDHM } from '@fastgpt/global/common/string/time';
import MySelect from '@/components/Select';
const SubDatasetModal = ({ onClose }: { onClose: () => void }) => {
const datasetStoreFreeSize = feConfigs?.subscription?.datasetStoreFreeSize || 0;
const datasetStorePrice = feConfigs?.subscription?.datasetStorePrice || 0;
const { t } = useTranslation();
const { toast } = useToast();
const router = useRouter();
const { ConfirmModal, openConfirm } = useConfirm({});
const [datasetSize, setDatasetSize] = useState(0);
const [isRenew, setIsRenew] = useState('false');
const isExpand = datasetSize > 0;
const { data: datasetSub } = useQuery(['getTeamDatasetValidSub'], getTeamDatasetValidSub, {
onSuccess(res) {
setIsRenew(`${res?.sub?.renew}`);
}
});
const { mutate, isLoading } = useRequest({
mutationFn: () => postExpandTeamDatasetSub({ size: datasetSize, renew: isRenew === 'true' }),
onSuccess(res) {
if (isExpand) {
router.reload();
} else {
onClose();
}
},
successToast: isExpand ? t('support.wallet.Pay success') : t('common.Update success'),
errorToast: isExpand ? t('support.wallet.Pay error') : t('common.error.Update error')
});
return (
<MyModal
isOpen
iconSrc="/imgs/module/db.png"
title={t('support.wallet.subscription.Dataset store')}
>
<ModalBody>
<>
<Flex alignItems={'center'}>
{t('support.user.Price')}
<MyTooltip label={t('support.wallet.subscription.Dataset store price tip')}>
<QuestionOutlineIcon ml={1} />
</MyTooltip>
</Flex>
<Markdown
source={`
| 免费知识库 | ${datasetStoreFreeSize}条 |
| --- | --- |
| 额外知识库 | ${datasetStorePrice}元/1000条/月 |
`}
/>
</>
<Flex mt={4}>
<Box w={'100px'}>{t('support.wallet.subscription.Current dataset store')}: </Box>
<Box ml={2} fontWeight={'bold'} flex={1}>
{datasetSub?.sub?.datasetStoreAmount || 0}
{t('core.dataset.data.unit')}
</Box>
</Flex>
{datasetSub?.sub?.expiredTime && (
<Flex mt={3}>
<Box w={'100px'}>: </Box>
<Box ml={2}>{formatTime2YMDHM(datasetSub?.sub?.expiredTime)}</Box>
</Flex>
)}
<Flex mt={3} alignItems={'center'}>
<Box w={'100px'}>: </Box>
<MySelect
ml={2}
value={isRenew}
size={'sm'}
w={'150px'}
list={[
{ label: '自动续费', value: 'true' },
{ label: '不自动续费', value: 'false' }
]}
onchange={setIsRenew}
/>
</Flex>
<Box mt={4}>
<Box>{t('support.wallet.subscription.Expand size')}</Box>
<Flex alignItems={'center'} mt={1}>
<NumberInput
flex={1}
min={0}
step={1}
value={datasetSize}
position={'relative'}
onChange={(e) => {
setDatasetSize(Number(e));
}}
>
<NumberInputField value={datasetSize} step={1} min={0} />
<NumberInputStepper>
<NumberIncrementStepper />
<NumberDecrementStepper />
</NumberInputStepper>
</NumberInput>
<Box ml={2}>000{t('core.dataset.data.unit')}</Box>
</Flex>
</Box>
</ModalBody>
<ModalFooter>
<Button mr={3} variant={'whiteBase'} onClick={onClose}>
{t('common.Close')}
</Button>
<Button
isLoading={isLoading}
onClick={() => {
if (isExpand) {
const currentMonthPrice = (
datasetSize *
datasetStorePrice *
(getMonthRemainingDays() / 30)
).toFixed(2);
const totalSize = (datasetSub?.sub?.datasetStoreAmount || 0) / 1000 + datasetSize;
openConfirm(
mutate,
undefined,
`本次扩容预估扣除 ${currentMonthPrice} 元。次月起,每月 1 号将会扣除 ${
totalSize * datasetStorePrice
} 元(共${totalSize * 1000}条)。请确保账号余额充足。`
)();
} else {
mutate('');
}
}}
>
{t('common.Confirm')}
</Button>
</ModalFooter>
<ConfirmModal />
</MyModal>
);
};
export default SubDatasetModal;

View File

@@ -1,5 +1,5 @@
import {
DatasetCollectionTrainingModeEnum,
TrainingModeEnum,
DatasetCollectionTypeEnum,
DatasetTypeEnum
} from '@fastgpt/global/core/dataset/constant';

View File

@@ -30,7 +30,7 @@ export type InsertOneDatasetDataProps = PushDatasetDataChunkProps & {
export type PushDatasetDataProps = {
collectionId: string;
data: PushDatasetDataChunkProps[];
mode: `${TrainingModeEnum}`;
trainingMode: `${TrainingModeEnum}`;
prompt?: string;
billId?: string;
};

View File

@@ -1,4 +1,4 @@
import React, { useCallback, useRef } from 'react';
import React, { useCallback, useMemo, useRef } from 'react';
import {
Box,
Flex,
@@ -8,7 +8,8 @@ import {
Divider,
Select,
Input,
Link
Link,
Progress
} from '@chakra-ui/react';
import { useForm } from 'react-hook-form';
import { UserUpdateParams } from '@/types/user';
@@ -22,7 +23,6 @@ import { compressImgFileAndUpload } from '@/web/common/file/controller';
import { feConfigs, systemVersion } from '@/web/common/system/staticData';
import { useTranslation } from 'next-i18next';
import { timezoneList } from '@fastgpt/global/common/time/timezone';
import Loading from '@/components/Loading';
import Avatar from '@/components/Avatar';
import MyIcon from '@fastgpt/web/components/common/Icon';
import MyTooltip from '@/components/MyTooltip';
@@ -32,20 +32,14 @@ import MySelect from '@/components/Select';
import { formatStorePrice2Read } from '@fastgpt/global/support/wallet/bill/tools';
import { putUpdateMemberName } from '@/web/support/user/team/api';
import { getDocPath } from '@/web/common/system/doc';
import { getTeamDatasetValidSub } from '@/web/support/wallet/sub/api';
import { MongoImageTypeEnum } from '@fastgpt/global/common/file/image/constants';
const TeamMenu = dynamic(() => import('@/components/support/user/team/TeamMenu'));
const PayModal = dynamic(() => import('./PayModal'), {
loading: () => <Loading fixed={false} />,
ssr: false
});
const UpdatePswModal = dynamic(() => import('./UpdatePswModal'), {
loading: () => <Loading fixed={false} />,
ssr: false
});
const OpenAIAccountModal = dynamic(() => import('./OpenAIAccountModal'), {
loading: () => <Loading fixed={false} />,
ssr: false
});
const PayModal = dynamic(() => import('./PayModal'));
const UpdatePswModal = dynamic(() => import('./UpdatePswModal'));
const OpenAIAccountModal = dynamic(() => import('./OpenAIAccountModal'));
const SubDatasetModal = dynamic(() => import('@/components/support/wallet/SubDatasetModal'));
const UserInfo = () => {
const theme = useTheme();
@@ -69,6 +63,11 @@ const UserInfo = () => {
onOpen: onOpenUpdatePsw
} = useDisclosure();
const { isOpen: isOpenOpenai, onClose: onCloseOpenai, onOpen: onOpenOpenai } = useDisclosure();
const {
isOpen: isOpenSubDatasetModal,
onClose: onCloseSubDatasetModal,
onOpen: onOpenSubDatasetModal
} = useDisclosure();
const { File, onOpen: onOpenSelectFile } = useSelectFile({
fileType: '.jpg,.png',
@@ -97,6 +96,7 @@ const UserInfo = () => {
if (!file || !userInfo) return;
try {
const src = await compressImgFileAndUpload({
type: MongoImageTypeEnum.userAvatar,
file,
maxW: 300,
maxH: 300
@@ -122,6 +122,27 @@ const UserInfo = () => {
}
});
const { data: datasetSub = { maxSize: 0, usedSize: 0 } } = useQuery(
['getTeamDatasetValidSub'],
getTeamDatasetValidSub
);
const datasetUsageMap = useMemo(() => {
const rate = datasetSub.usedSize / datasetSub.maxSize;
const colorScheme = (() => {
if (rate < 0.5) return 'green';
if (rate < 0.8) return 'yellow';
return 'red';
})();
return {
colorScheme,
value: rate * 100,
maxSize: datasetSub.maxSize,
usedSize: datasetSub.usedSize
};
}, [datasetSub.maxSize, datasetSub.usedSize]);
return (
<Box
display={['block', 'flex']}
@@ -233,21 +254,48 @@ const UserInfo = () => {
{t('user.Change')}
</Button>
</Flex>
<Box mt={6} whiteSpace={'nowrap'} w={['85%', '300px']}>
<Flex alignItems={'center'}>
<Box flex={'0 0 80px'} fontSize={'md'}>
{t('user.team.Balance')}:&nbsp;
{feConfigs.isPlus && (
<>
<Box mt={6} whiteSpace={'nowrap'} w={['85%', '300px']}>
<Flex alignItems={'center'}>
<Box flex={'0 0 80px'} fontSize={'md'}>
{t('user.team.Balance')}:&nbsp;
</Box>
<Box flex={1}>
<strong>{formatStorePrice2Read(userInfo?.team?.balance).toFixed(3)}</strong>
</Box>
{feConfigs?.show_pay && userInfo?.team?.canWrite && (
<Button size={['sm', 'md']} ml={5} onClick={onOpenPayModal}>
{t('user.Pay')}
</Button>
)}
</Flex>
</Box>
<Box flex={1}>
<strong>{formatStorePrice2Read(userInfo?.team?.balance).toFixed(3)}</strong>
<Box mt={6} whiteSpace={'nowrap'} w={['85%', '300px']}>
<Flex alignItems={'center'}>
<Box flex={'1 0 0'} fontSize={'md'}>
{t('support.user.team.Dataset usage')}:&nbsp;{datasetUsageMap.usedSize}/
{datasetSub.maxSize}
</Box>
<Button size={'sm'} onClick={onOpenSubDatasetModal}>
{t('support.wallet.Buy more')}
</Button>
</Flex>
<Box mt={1}>
<Progress
value={datasetUsageMap.value}
colorScheme={datasetUsageMap.colorScheme}
borderRadius={'md'}
isAnimated
hasStripe
borderWidth={'1px'}
borderColor={'borderColor.base'}
/>
</Box>
</Box>
{feConfigs?.show_pay && userInfo?.team?.canWrite && (
<Button size={['sm', 'md']} ml={5} onClick={onOpenPayModal}>
{t('user.Pay')}
</Button>
)}
</Flex>
</Box>
</>
)}
{feConfigs?.docUrl && (
<Link
href={getDocPath('/docs/intro')}
@@ -344,9 +392,10 @@ const UserInfo = () => {
onClose={onCloseOpenai}
/>
)}
{isOpenSubDatasetModal && <SubDatasetModal onClose={onCloseSubDatasetModal} />}
<File onSelect={onSelectFile} />
</Box>
);
};
export default UserInfo;
export default React.memo(UserInfo);

View File

@@ -46,12 +46,12 @@ const BillTable = () => {
}}
>
<Flex alignItems={'center'} justifyContent={'space-between'}>
<Box>{item.title}</Box>
<Box fontWeight={'bold'}>{item.title}</Box>
<Box ml={2} color={'myGray.500'}>
{formatTimeToChatTime(item.time)}
</Box>
</Flex>
<Box fontSize={'sm'} color={'myGray.600'}>
<Box fontSize={'sm'} color={'myGray.600'} whiteSpace={'pre-wrap'}>
{item.content}
</Box>
{!item.read && (

View File

@@ -3,7 +3,7 @@ import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { uploadFile } from '@fastgpt/service/common/file/gridfs/controller';
import { getUploadModel, removeFilesByPaths } from '@fastgpt/service/common/file/upload/multer';
import { getUploadModel } from '@fastgpt/service/common/file/multer';
/**
* Creates the multer uploader
@@ -16,12 +16,13 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
let filePaths: string[] = [];
try {
const { userId, teamId, tmbId } = await authCert({ req, authToken: true });
const { files, bucketName, metadata } = await upload.doUpload(req, res);
filePaths = files.map((file) => file.path);
await connectToDatabase();
const { userId, teamId, tmbId } = await authCert({ req, authToken: true });
if (!bucketName) {
throw new Error('bucketName is empty');
@@ -53,8 +54,6 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
error
});
}
removeFilesByPaths(filePaths);
}
export const config = {

View File

@@ -8,15 +8,13 @@ import { UploadImgProps } from '@fastgpt/global/common/file/api';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
await connectToDatabase();
const { base64Img, expiredTime, metadata, shareId } = req.body as UploadImgProps;
const { shareId, ...body } = req.body as UploadImgProps;
const { teamId } = await authCertOrShareId({ req, shareId, authToken: true });
const data = await uploadMongoImg({
teamId,
base64Img,
expiredTime,
metadata
...body
});
jsonRes(res, { data });

View File

@@ -59,39 +59,44 @@ const defaultFeConfigs: FastGPTFeConfigsType = {
};
export async function getInitConfig() {
if (global.systemInitd) return;
global.systemInitd = true;
try {
if (global.feConfigs) return;
await connectToDatabase();
initGlobal();
await initSystemConfig();
await Promise.all([
initGlobal(),
initSystemConfig(),
getSimpleModeTemplates(),
getSystemVersion(),
getSystemPlugin()
]);
console.log({
simpleModeTemplates: global.simpleModeTemplates,
communityPlugins: global.communityPlugins
});
} catch (error) {
console.error('Load init config error', error);
global.systemInitd = false;
if (!global.feConfigs) {
exit(1);
}
}
await getSimpleModeTemplates();
}
getSystemVersion();
getSystemPlugin();
export function initGlobal() {
if (global.communityPlugins) return;
console.log({
feConfigs: global.feConfigs,
systemEnv: global.systemEnv,
chatModels: global.chatModels,
qaModels: global.qaModels,
cqModels: global.cqModels,
extractModels: global.extractModels,
qgModels: global.qgModels,
vectorModels: global.vectorModels,
reRankModels: global.reRankModels,
audioSpeechModels: global.audioSpeechModels,
whisperModel: global.whisperModel,
simpleModeTemplates: global.simpleModeTemplates,
communityPlugins: global.communityPlugins
});
global.communityPlugins = [];
global.simpleModeTemplates = [];
global.qaQueueLen = global.qaQueueLen ?? 0;
global.vectorQueueLen = global.vectorQueueLen ?? 0;
// init tikToken
getTikTokenEnc();
initHttpAgent();
}
export async function initSystemConfig() {
@@ -137,19 +142,24 @@ export async function initSystemConfig() {
global.reRankModels = config.reRankModels;
global.audioSpeechModels = config.audioSpeechModels;
global.whisperModel = config.whisperModel;
}
export function initGlobal() {
global.communityPlugins = [];
global.simpleModeTemplates = [];
global.qaQueueLen = global.qaQueueLen ?? 0;
global.vectorQueueLen = global.vectorQueueLen ?? 0;
// init tikToken
getTikTokenEnc();
initHttpAgent();
console.log({
feConfigs: global.feConfigs,
systemEnv: global.systemEnv,
chatModels: global.chatModels,
qaModels: global.qaModels,
cqModels: global.cqModels,
extractModels: global.extractModels,
qgModels: global.qgModels,
vectorModels: global.vectorModels,
reRankModels: global.reRankModels,
audioSpeechModels: global.audioSpeechModels,
whisperModel: global.whisperModel
});
}
export function getSystemVersion() {
if (global.systemVersion) return;
try {
if (process.env.NODE_ENV === 'development') {
global.systemVersion = process.env.npm_package_version || '0.0.0';

View File

@@ -1,31 +0,0 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { initSystemConfig } from './getInitData';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
await authCert({ req, authRoot: true });
await initSystemConfig();
console.log(`refresh config`);
console.log({
chatModels: global.chatModels,
qaModels: global.qaModels,
cqModels: global.cqModels,
extractModels: global.extractModels,
qgModels: global.qgModels,
vectorModels: global.vectorModels,
reRankModels: global.reRankModels,
audioSpeechModels: global.audioSpeechModels,
whisperModel: global.whisperModel,
feConfigs: global.feConfigs,
systemEnv: global.systemEnv
});
} catch (error) {
console.log(error);
}
jsonRes(res);
}

View File

@@ -29,6 +29,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
await MongoChatItem.findOneAndUpdate(
{
chatId,
dataId: chatItemId
},
{

View File

@@ -0,0 +1,88 @@
/*
Create one dataset collection
*/
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import type { LinkCreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api.d';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
import { TrainingModeEnum, DatasetCollectionTypeEnum } from '@fastgpt/global/core/dataset/constant';
import { checkDatasetLimit } from '@fastgpt/service/support/permission/limit/dataset';
import { predictDataLimitLength } from '@fastgpt/global/core/dataset/utils';
import { createTrainingBill } from '@fastgpt/service/support/wallet/bill/controller';
import { BillSourceEnum } from '@fastgpt/global/support/wallet/bill/constants';
import { getQAModel, getVectorModel } from '@/service/core/ai/model';
import { reloadCollectionChunks } from '@fastgpt/service/core/dataset/collection/utils';
import { startQueue } from '@/service/utils/tools';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
const {
link,
trainingType = TrainingModeEnum.chunk,
chunkSize = 512,
chunkSplitter,
qaPrompt,
...body
} = req.body as LinkCreateDatasetCollectionParams;
const { teamId, tmbId, dataset } = await authDataset({
req,
authToken: true,
authApiKey: true,
datasetId: body.datasetId,
per: 'w'
});
// 1. check dataset limit
await checkDatasetLimit({
teamId,
freeSize: global.feConfigs?.subscription?.datasetStoreFreeSize,
insertLen: predictDataLimitLength(trainingType, new Array(10))
});
// 2. create collection
const collectionId = await createOneCollection({
...body,
name: link,
teamId,
tmbId,
type: DatasetCollectionTypeEnum.link,
trainingType,
chunkSize,
chunkSplitter,
qaPrompt,
rawLink: link
});
// 3. create bill and start sync
const { billId } = await createTrainingBill({
teamId,
tmbId,
appName: 'core.dataset.collection.Sync Collection',
billSource: BillSourceEnum.training,
vectorModel: getVectorModel(dataset.vectorModel).name,
agentModel: getQAModel(dataset.agentModel).name
});
await reloadCollectionChunks({
collectionId,
tmbId,
billId
});
startQueue();
jsonRes(res, {
data: { collectionId }
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}

View File

@@ -0,0 +1,90 @@
/*
Create one dataset collection
*/
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import type { TextCreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api.d';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
import { TrainingModeEnum, DatasetCollectionTypeEnum } from '@fastgpt/global/core/dataset/constant';
import { splitText2Chunks } from '@fastgpt/global/common/string/textSplitter';
import { checkDatasetLimit } from '@fastgpt/service/support/permission/limit/dataset';
import { predictDataLimitLength } from '@fastgpt/global/core/dataset/utils';
import { pushDataToDatasetCollection } from '@/service/core/dataset/data/controller';
import { hashStr } from '@fastgpt/global/common/string/tools';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
const {
text,
trainingType = TrainingModeEnum.chunk,
chunkSize = 512,
chunkSplitter,
qaPrompt,
...body
} = req.body as TextCreateDatasetCollectionParams;
const { teamId, tmbId } = await authDataset({
req,
authToken: true,
authApiKey: true,
datasetId: body.datasetId,
per: 'w'
});
// 1. split text to chunks
const { chunks } = splitText2Chunks({
text,
chunkLen: chunkSize,
overlapRatio: trainingType === TrainingModeEnum.chunk ? 0.2 : 0,
customReg: chunkSplitter ? [chunkSplitter] : [],
countTokens: false
});
// 2. check dataset limit
await checkDatasetLimit({
teamId,
freeSize: global.feConfigs?.subscription?.datasetStoreFreeSize,
insertLen: predictDataLimitLength(trainingType, chunks)
});
// 3. create collection
const collectionId = await createOneCollection({
...body,
teamId,
tmbId,
type: DatasetCollectionTypeEnum.virtual,
trainingType,
chunkSize,
chunkSplitter,
qaPrompt,
hashRawText: hashStr(text),
rawTextLength: text.length
});
// 4. push chunks to training queue
const insertResults = await pushDataToDatasetCollection({
teamId,
tmbId,
collectionId,
trainingMode: trainingType,
data: chunks.map((text, index) => ({
q: text,
chunkIndex: index
}))
});
jsonRes(res, {
data: { collectionId, results: insertResults }
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}

View File

@@ -5,7 +5,6 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import type { CreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api.d';
import { authUserNotVisitor } from '@fastgpt/service/support/permission/auth/user';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
@@ -14,13 +13,12 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
await connectToDatabase();
const body = req.body as CreateDatasetCollectionParams;
// auth. not visitor and dataset is public
const { teamId, tmbId } = await authUserNotVisitor({ req, authToken: true });
await authDataset({
const { teamId, tmbId } = await authDataset({
req,
authToken: true,
authApiKey: true,
datasetId: body.datasetId,
per: 'r'
per: 'w'
});
jsonRes(res, {

View File

@@ -4,13 +4,12 @@ import { connectToDatabase } from '@/service/mongo';
import { findCollectionAndChild } from '@fastgpt/service/core/dataset/collection/utils';
import { delCollectionRelevantData } from '@fastgpt/service/core/dataset/data/controller';
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
import { MongoDatasetCollection } from '@fastgpt/service/core/dataset/collection/schema';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
const { collectionId } = req.query as { collectionId: string };
const { id: collectionId } = req.query as { id: string };
if (!collectionId) {
throw new Error('CollectionIdId is required');
@@ -19,6 +18,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
await authDatasetCollection({
req,
authToken: true,
authApiKey: true,
collectionId,
per: 'w'
});

View File

@@ -22,6 +22,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
const { collection, canWrite } = await authDatasetCollection({
req,
authToken: true,
authApiKey: true,
collectionId: id,
per: 'r'
});

View File

@@ -11,7 +11,6 @@ import { DatasetCollectionTypeEnum } from '@fastgpt/global/core/dataset/constant
import { startQueue } from '@/service/utils/tools';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
import { DatasetDataCollectionName } from '@fastgpt/service/core/dataset/data/schema';
import { authUserRole } from '@fastgpt/service/support/permission/auth/user';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
@@ -27,12 +26,19 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
simple = false
} = req.body as GetDatasetCollectionsProps;
searchText = searchText?.replace(/'/g, '');
pageSize = Math.min(pageSize, 30);
// auth dataset and get my role
const { tmbId } = await authDataset({ req, authToken: true, datasetId, per: 'r' });
const { canWrite } = await authUserRole({ req, authToken: true });
const { teamId, tmbId, canWrite } = await authDataset({
req,
authToken: true,
authApiKey: true,
datasetId,
per: 'r'
});
const match = {
teamId: new Types.ObjectId(teamId),
datasetId: new Types.ObjectId(datasetId),
parentId: parentId ? new Types.ObjectId(parentId) : null,
...(selectFolder ? { type: DatasetCollectionTypeEnum.folder } : {}),
@@ -85,9 +91,9 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
}
}
},
{ $project: { _id: 1 } }
{ $count: 'count' }
],
as: 'trainings'
as: 'trainingCount'
}
},
// count collection total data
@@ -103,9 +109,9 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
}
}
},
{ $project: { _id: 1 } }
{ $count: 'count' }
],
as: 'datas'
as: 'dataCount'
}
},
{
@@ -117,10 +123,14 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
type: 1,
status: 1,
updateTime: 1,
dataAmount: { $size: '$datas' },
trainingAmount: { $size: '$trainings' },
fileId: 1,
rawLink: 1
rawLink: 1,
dataAmount: {
$ifNull: [{ $arrayElemAt: ['$dataCount.count', 0] }, 0]
},
trainingAmount: {
$ifNull: [{ $arrayElemAt: ['$trainingCount.count', 0] }, 0]
}
}
},
{
@@ -144,7 +154,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
);
if (data.find((item) => item.trainingAmount > 0)) {
startQueue(1);
startQueue();
}
// count collections

View File

@@ -38,7 +38,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
return Promise.reject(DatasetErrEnum.unLinkCollection);
}
const { rawText, isSameRawText } = await getCollectionAndRawText({
const { title, rawText, isSameRawText } = await getCollectionAndRawText({
collection
});
@@ -68,7 +68,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
tmbId: collection.tmbId,
parentId: collection.parentId,
datasetId: collection.datasetId._id,
name: collection.name,
name: title || collection.name,
type: collection.type,
trainingType: collection.trainingType,
chunkSize: collection.chunkSize,

View File

@@ -16,7 +16,13 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
}
// 凭证校验
await authDatasetCollection({ req, authToken: true, collectionId: id, per: 'w' });
await authDatasetCollection({
req,
authToken: true,
authApiKey: true,
collectionId: id,
per: 'w'
});
const updateFields: Record<string, any> = {
...(parentId !== undefined && { parentId: parentId || null }),

View File

@@ -16,12 +16,28 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
type,
avatar,
vectorModel = global.vectorModels[0].model,
agentModel
agentModel = global.qaModels[0].model
} = req.body as CreateDatasetParams;
// 凭证校验
// auth
const { teamId, tmbId } = await authUserNotVisitor({ req, authToken: true });
// check model valid
const vectorModelStore = global.vectorModels.find((item) => item.model === vectorModel);
const agentModelStore = global.qaModels.find((item) => item.model === agentModel);
if (!vectorModelStore || !agentModelStore) {
throw new Error('vectorModel or qaModel is invalid');
}
// check limit
const authCount = await MongoDataset.countDocuments({
teamId,
type: DatasetTypeEnum.dataset
});
if (authCount >= 50) {
throw new Error('每个团队上限 50 个知识库');
}
const { _id } = await MongoDataset.create({
name,
teamId,

View File

@@ -8,8 +8,8 @@ import { delDatasetDataByDataId } from '@fastgpt/service/core/dataset/data/contr
export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
const { dataId } = req.query as {
dataId: string;
const { id: dataId } = req.query as {
id: string;
};
if (!dataId) {
@@ -17,9 +17,18 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
}
// 凭证校验
await authDatasetData({ req, authToken: true, dataId, per: 'w' });
const { datasetData } = await authDatasetData({
req,
authToken: true,
authApiKey: true,
dataId,
per: 'w'
});
await delDatasetDataByDataId(dataId);
await delDatasetDataByDataId({
collectionId: datasetData.collectionId,
mongoDataId: dataId
});
jsonRes(res, {
data: 'success'

View File

@@ -13,12 +13,18 @@ export type Response = {
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
const { dataId } = req.query as {
dataId: string;
const { id: dataId } = req.query as {
id: string;
};
// 凭证校验
const { datasetData } = await authDatasetData({ req, authToken: true, dataId, per: 'r' });
const { datasetData } = await authDatasetData({
req,
authToken: true,
authApiKey: true,
dataId,
per: 'r'
});
jsonRes(res, {
data: datasetData

View File

@@ -16,6 +16,7 @@ import { authTeamBalance } from '@/service/support/permission/auth/bill';
import { pushGenerateVectorBill } from '@/service/support/wallet/bill/push';
import { InsertOneDatasetDataProps } from '@/global/core/dataset/api';
import { simpleText } from '@fastgpt/global/common/string/tools';
import { checkDatasetLimit } from '@fastgpt/service/support/permission/limit/dataset';
export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
@@ -39,6 +40,12 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
per: 'w'
});
await checkDatasetLimit({
teamId,
freeSize: global.feConfigs?.subscription?.datasetStoreFreeSize,
insertLen: 1
});
// auth collection and get dataset
const [
{

View File

@@ -17,8 +17,10 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
collectionId
} = req.body as GetDatasetDataListProps;
pageSize = Math.min(pageSize, 30);
// 凭证校验
await authDatasetCollection({ req, authToken: true, collectionId, per: 'r' });
await authDatasetCollection({ req, authToken: true, authApiKey: true, collectionId, per: 'r' });
searchText = searchText.replace(/'/g, '');
@@ -32,7 +34,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
};
const [data, total] = await Promise.all([
MongoDatasetData.find(match, '_id datasetId collectionId q a chunkIndex indexes')
MongoDatasetData.find(match, '_id datasetId collectionId q a chunkIndex')
.sort({ chunkIndex: 1, updateTime: -1 })
.skip((pageNum - 1) * pageSize)
.limit(pageSize)

View File

@@ -2,38 +2,30 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { MongoDatasetTraining } from '@fastgpt/service/core/dataset/training/schema';
import { withNextCors } from '@fastgpt/service/common/middle/cors';
import { TrainingModeEnum, TrainingTypeMap } from '@fastgpt/global/core/dataset/constant';
import { startQueue } from '@/service/utils/tools';
import { countPromptTokens } from '@fastgpt/global/common/string/tiktoken';
import type { PushDataResponse } from '@/global/core/api/datasetRes.d';
import type { PushDatasetDataProps } from '@/global/core/dataset/api.d';
import { PushDatasetDataChunkProps } from '@fastgpt/global/core/dataset/api';
import { getQAModel, getVectorModel } from '@/service/core/ai/model';
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
import { getCollectionWithDataset } from '@fastgpt/service/core/dataset/controller';
import { simpleText } from '@fastgpt/global/common/string/tools';
import { checkDatasetLimit } from '@fastgpt/service/support/permission/limit/dataset';
import { predictDataLimitLength } from '@fastgpt/global/core/dataset/utils';
import { pushDataToDatasetCollection } from '@/service/core/dataset/data/controller';
export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
const { collectionId, data, mode = TrainingModeEnum.chunk } = req.body as PushDatasetDataProps;
const { collectionId, data } = req.body as PushDatasetDataProps;
if (!collectionId || !Array.isArray(data)) {
throw new Error('collectionId or data is empty');
}
if (!TrainingTypeMap[mode]) {
throw new Error(`Mode is not ${Object.keys(TrainingTypeMap).join(', ')}`);
}
if (data.length > 200) {
throw new Error('Data is too long, max 200');
}
// 凭证校验
const { teamId, tmbId } = await authDatasetCollection({
const { teamId, tmbId, collection } = await authDatasetCollection({
req,
authToken: true,
authApiKey: true,
@@ -41,6 +33,13 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
per: 'w'
});
// auth dataset limit
await checkDatasetLimit({
teamId,
freeSize: global.feConfigs?.subscription?.datasetStoreFreeSize,
insertLen: predictDataLimitLength(collection.trainingType, data)
});
jsonRes<PushDataResponse>(res, {
data: await pushDataToDatasetCollection({
...req.body,
@@ -56,141 +55,6 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
}
});
export async function pushDataToDatasetCollection({
teamId,
tmbId,
collectionId,
data,
mode,
prompt,
billId
}: {
teamId: string;
tmbId: string;
} & PushDatasetDataProps): Promise<PushDataResponse> {
const { datasetId, model, maxToken, weight } = await checkModelValid({
mode,
collectionId
});
// format q and a, remove empty char
data.forEach((item) => {
item.q = simpleText(item.q);
item.a = simpleText(item.a);
item.indexes = item.indexes
?.map((index) => {
return {
...index,
text: simpleText(index.text)
};
})
.filter(Boolean);
});
// filter repeat or equal content
const set = new Set();
const filterResult: Record<string, PushDatasetDataChunkProps[]> = {
success: [],
overToken: [],
repeat: [],
error: []
};
data.forEach((item) => {
if (!item.q) {
filterResult.error.push(item);
return;
}
const text = item.q + item.a;
// count q token
const token = countPromptTokens(item.q);
if (token > maxToken) {
filterResult.overToken.push(item);
return;
}
if (set.has(text)) {
console.log('repeat', item);
filterResult.repeat.push(item);
} else {
filterResult.success.push(item);
set.add(text);
}
});
// 插入记录
const insertRes = await MongoDatasetTraining.insertMany(
filterResult.success.map((item, i) => ({
teamId,
tmbId,
datasetId,
collectionId,
billId,
mode,
prompt,
model,
q: item.q,
a: item.a,
chunkIndex: item.chunkIndex ?? i,
weight: weight ?? 0,
indexes: item.indexes
}))
);
insertRes.length > 0 && startQueue();
delete filterResult.success;
return {
insertLen: insertRes.length,
...filterResult
};
}
export async function checkModelValid({
mode,
collectionId
}: {
mode: `${TrainingModeEnum}`;
collectionId: string;
}) {
const {
datasetId: { _id: datasetId, vectorModel, agentModel }
} = await getCollectionWithDataset(collectionId);
if (mode === TrainingModeEnum.chunk) {
if (!collectionId) return Promise.reject(`CollectionId is empty`);
const vectorModelData = getVectorModel(vectorModel);
if (!vectorModelData) {
return Promise.reject(`Model ${vectorModel} is inValid`);
}
return {
datasetId,
maxToken: vectorModelData.maxToken * 1.5,
model: vectorModelData.model,
weight: vectorModelData.weight
};
}
if (mode === TrainingModeEnum.qa) {
const qaModelData = getQAModel(agentModel);
if (!qaModelData) {
return Promise.reject(`Model ${agentModel} is inValid`);
}
return {
datasetId,
maxToken: qaModelData.maxContext * 0.8,
model: qaModelData.model,
weight: 0
};
}
return Promise.reject(`Mode ${mode} is inValid`);
}
export const config = {
api: {
bodyParser: {

View File

@@ -11,7 +11,7 @@ import { UpdateDatasetDataProps } from '@/global/core/dataset/api';
export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
const { id, q = '', a, indexes } = req.body as UpdateDatasetDataProps;
const { id, q = '', a, indexes = [] } = req.body as UpdateDatasetDataProps;
// auth data permission
const {
@@ -23,6 +23,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
} = await authDatasetData({
req,
authToken: true,
authApiKey: true,
dataId: id,
per: 'w'
});

View File

@@ -20,6 +20,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
const { dataset, canWrite, isOwner } = await authDataset({
req,
authToken: true,
authApiKey: true,
datasetId,
per: 'r'
});

View File

@@ -15,7 +15,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
// 凭证校验
const { teamId, tmbId, teamOwner, role, canWrite } = await authUserRole({
req,
authToken: true
authToken: true,
authApiKey: true
});
const datasets = await MongoDataset.find({

View File

@@ -3,14 +3,11 @@ import { jsonRes } from '@fastgpt/service/common/response';
import { request } from '@fastgpt/service/common/api/plusRequest';
import type { Method } from 'axios';
import { setCookie } from '@fastgpt/service/support/permission/controller';
import { getInitConfig } from '../common/system/getInitData';
import { FastGPTProUrl } from '@fastgpt/service/common/system/constants';
import { connectToDatabase } from '@/service/mongo';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
if (!FastGPTProUrl) {
await getInitConfig();
}
await connectToDatabase();
const method = (req.method || 'POST') as Method;
const { path = [], ...query } = req.query as any;

View File

@@ -0,0 +1,39 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { getTeamDatasetValidSub } from '@fastgpt/service/support/wallet/sub/utils';
import { getVectorCountByTeamId } from '@fastgpt/service/common/vectorStore/controller';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
// 凭证校验
const { teamId } = await authCert({
req,
authToken: true
});
const [{ sub, maxSize }, usedSize] = await Promise.all([
getTeamDatasetValidSub({
teamId,
freeSize: global.feConfigs?.subscription?.datasetStoreFreeSize
}),
getVectorCountByTeamId(teamId)
]);
jsonRes(res, {
data: {
sub,
maxSize,
usedSize
}
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}

View File

@@ -2,7 +2,8 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { withNextCors } from '@fastgpt/service/common/middle/cors';
import { getUploadModel, removeFilesByPaths } from '@fastgpt/service/common/file/upload/multer';
import { getUploadModel } from '@fastgpt/service/common/file/multer';
import { removeFilesByPaths } from '@fastgpt/service/common/file/utils';
import fs from 'fs';
import { getAIApi } from '@fastgpt/service/core/ai/config';
import { pushWhisperBill } from '@/service/support/wallet/bill/push';

View File

@@ -35,19 +35,17 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
const { tokens, vectors } = await getVectorsByText({ input: query, model });
jsonRes(res, {
data: {
object: 'list',
data: vectors.map((item, index) => ({
object: 'embedding',
index: index,
embedding: item
})),
model,
usage: {
prompt_tokens: tokens,
total_tokens: tokens
}
res.json({
object: 'list',
data: vectors.map((item, index) => ({
object: 'embedding',
index: index,
embedding: item
})),
model,
usage: {
prompt_tokens: tokens,
total_tokens: tokens
}
});

View File

@@ -22,6 +22,7 @@ import MyModal from '@/components/MyModal';
import { useAppStore } from '@/web/core/app/store/useAppStore';
import PermissionRadio from '@/components/support/permission/Radio';
import { useTranslation } from 'next-i18next';
import { MongoImageTypeEnum } from '@fastgpt/global/common/file/image/constants';
const InfoModal = ({
defaultApp,
@@ -45,7 +46,6 @@ const InfoModal = ({
setValue,
getValues,
formState: { errors },
reset,
handleSubmit
} = useForm({
defaultValues: defaultApp
@@ -102,6 +102,7 @@ const InfoModal = ({
if (!file) return;
try {
const src = await compressImgFileAndUpload({
type: MongoImageTypeEnum.appAvatar,
file,
maxW: 300,
maxH: 300
@@ -187,4 +188,4 @@ const InfoModal = ({
);
};
export default InfoModal;
export default React.memo(InfoModal);

View File

@@ -81,7 +81,7 @@ const Logs = ({ appId }: { appId: string }) => {
cursor={'pointer'}
onClick={onOpenMarkDesc}
>
{t('chat.Read Mark Description')}
{t('core.chat.Read Mark Description')}
</Box>
</Box>
</>
@@ -202,9 +202,9 @@ const Logs = ({ appId }: { appId: string }) => {
<MyModal
isOpen={isOpenMarkDesc}
onClose={onCloseMarkDesc}
title={t('chat.Mark Description Title')}
title={t('core.chat.Mark Description Title')}
>
<ModalBody whiteSpace={'pre-wrap'}>{t('chat.Mark Description')}</ModalBody>
<ModalBody whiteSpace={'pre-wrap'}>{t('core.chat.Mark Description')}</ModalBody>
</MyModal>
</Flex>
);

View File

@@ -26,6 +26,7 @@ import Avatar from '@/components/Avatar';
import MyTooltip from '@/components/MyTooltip';
import MyModal from '@/components/MyModal';
import { useTranslation } from 'next-i18next';
import { MongoImageTypeEnum } from '@fastgpt/global/common/file/image/constants';
type FormType = {
avatar: string;
@@ -59,6 +60,7 @@ const CreateModal = ({ onClose, onSuccess }: { onClose: () => void; onSuccess: (
if (!file) return;
try {
const src = await compressImgFileAndUpload({
type: MongoImageTypeEnum.appAvatar,
file,
maxW: 300,
maxH: 300

View File

@@ -35,7 +35,7 @@ const ChatHeader = ({
() =>
chatContentReplaceBlock(history[history.length - 2]?.value)?.slice(0, 8) ||
appName ||
t('chat.New Chat'),
t('core.chat.New Chat'),
[appName, history]
);
@@ -56,8 +56,8 @@ const ChatHeader = ({
<MyIcon name={'history'} w={'14px'} />
<Box ml={1}>
{history.length === 0
? t('chat.Fresh Chat')
: t('chat.History Amount', { amount: history.length })}
? t('core.chat.New Chat')
: t('core.chat.History Amount', { amount: history.length })}
</Box>
</Tag>
{!!chatModels && chatModels.length > 0 && (

View File

@@ -74,18 +74,20 @@ const ChatHistorySlider = ({
// custom title edit
const { onOpenModal, EditModal: EditTitleModal } = useEditTitle({
title: t('chat.Custom History Title'),
placeholder: t('chat.Custom History Title Description')
title: t('core.chat.Custom History Title'),
placeholder: t('core.chat.Custom History Title Description')
});
const { openConfirm, ConfirmModal } = useConfirm({
content: isShare
? t('chat.Confirm to clear share chat history')
: t('chat.Confirm to clear history')
? t('core.chat.Confirm to clear share chat history')
: t('core.chat.Confirm to clear history')
});
const concatHistory = useMemo<HistoryItemType[]>(
() =>
!activeChatId ? [{ id: activeChatId, title: t('chat.New Chat') }].concat(history) : history,
!activeChatId
? [{ id: activeChatId, title: t('core.chat.New Chat') }].concat(history)
: history,
[activeChatId, history, t]
);
@@ -144,7 +146,7 @@ const ChatHistorySlider = ({
mr={2}
list={[
{ label: 'App', id: TabEnum.app },
{ label: 'chat.History', id: TabEnum.history }
{ label: t('core.chat.History'), id: TabEnum.history }
]}
activeId={currentTab}
onChange={(e) => setCurrentTab(e as `${TabEnum}`)}
@@ -160,7 +162,7 @@ const ChatHistorySlider = ({
overflow={'hidden'}
onClick={() => onChangeChat()}
>
{t('chat.New Chat')}
{t('core.chat.New Chat')}
</Button>
{(isPc || isShare) && (
@@ -240,7 +242,7 @@ const ChatHistorySlider = ({
}}
>
<MyIcon mr={2} name={'core/chat/setTopLight'} w={'16px'}></MyIcon>
{item.top ? t('chat.Unpin') : t('chat.Pin')}
{item.top ? t('core.chat.Unpin') : t('core.chat.Pin')}
</MenuItem>
)}
{onSetCustomTitle && (
@@ -336,7 +338,7 @@ const ChatHistorySlider = ({
borderRadius={'50%'}
aria-label={''}
/>
{t('chat.Exit Chat')}
{t('core.chat.Exit Chat')}
</Flex>
)}
<EditTitleModal />

View File

@@ -35,7 +35,7 @@ const SliderApps = ({ appId }: { appId: string }) => {
borderRadius={'50%'}
aria-label={''}
/>
{t('chat.Exit Chat')}
{t('core.chat.Exit Chat')}
</Flex>
</Box>
<Box flex={'1 0 0'} h={0} px={5} overflow={'overlay'}>

View File

@@ -15,7 +15,7 @@ const ToolMenu = ({ history }: { history: ChatItemType[] }) => {
() => [
{
icon: 'core/chat/chatLight',
label: t('chat.New Chat'),
label: t('core.chat.New Chat'),
onClick: () => {
router.replace({
query: {

View File

@@ -86,7 +86,7 @@ const Chat = ({ appId, chatId }: { appId: string; chatId: string }) => {
const newTitle =
chatContentReplaceBlock(prompts[0].content).slice(0, 20) ||
prompts[1]?.value?.slice(0, 20) ||
t('chat.New Chat');
t('core.chat.New Chat');
// new chat
if (completionChatId !== chatId) {
@@ -166,7 +166,7 @@ const Chat = ({ appId, chatId }: { appId: string; chatId: string }) => {
setLastChatAppId('');
setLastChatId('');
toast({
title: getErrText(e, t('chat.Failed to initialize chat')),
title: getErrText(e, t('core.chat.Failed to initialize chat')),
status: 'error'
});
if (e?.code === 501) {
@@ -210,7 +210,7 @@ const Chat = ({ appId, chatId }: { appId: string; chatId: string }) => {
if (apps.length === 0) {
toast({
status: 'error',
title: t('chat.You need to a chat app')
title: t('core.chat.You need to a chat app')
});
router.replace('/app/list');
} else {

View File

@@ -88,7 +88,7 @@ const OutLink = ({
const newTitle =
chatContentReplaceBlock(prompts[0].content).slice(0, 20) ||
prompts[1]?.value?.slice(0, 20) ||
t('chat.New Chat');
t('core.chat.New Chat');
// new chat
if (completionChatId !== chatId) {

View File

@@ -32,16 +32,17 @@ import { useRouter } from 'next/router';
import MyIcon from '@fastgpt/web/components/common/Icon';
import MyInput from '@/components/MyInput';
import { useLoading } from '@/web/common/hooks/useLoading';
import InputDataModal, { RawSourceText, type InputDataType } from '../components/InputDataModal';
import InputDataModal from '../components/InputDataModal';
import RawSourceBox from '@/components/core/dataset/RawSourceBox';
import type { DatasetDataListItemType } from '@/global/core/dataset/type.d';
import { TabEnum } from '..';
import { useUserStore } from '@/web/support/user/useUserStore';
import { TeamMemberRoleEnum } from '@fastgpt/global/support/user/team/constant';
import { getDefaultIndex } from '@fastgpt/global/core/dataset/utils';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import {
DatasetCollectionTypeMap,
DatasetCollectionTrainingTypeMap
TrainingModeEnum,
TrainingTypeMap
} from '@fastgpt/global/core/dataset/constant';
import { formatTime2YMDHM } from '@fastgpt/global/common/string/time';
import { formatFileSize } from '@fastgpt/global/common/file/tools';
@@ -90,7 +91,7 @@ const DataCard = () => {
}
});
const [editInputData, setEditInputData] = useState<InputDataType>();
const [editDataId, setEditDataId] = useState<string>();
// get first page data
const getFirstData = useCallback(
@@ -154,7 +155,7 @@ const DataCard = () => {
},
{
label: t('core.dataset.collection.metadata.Training Type'),
value: t(DatasetCollectionTrainingTypeMap[collection.trainingType]?.label)
value: t(TrainingTypeMap[collection.trainingType]?.label)
},
{
label: t('core.dataset.collection.metadata.Chunk Size'),
@@ -193,7 +194,7 @@ const DataCard = () => {
/>
<Flex className="textEllipsis" flex={'1 0 0'} mr={[3, 5]} alignItems={'center'}>
<Box lineHeight={1.2}>
<RawSourceText
<RawSourceBox
sourceName={collection?.name}
sourceId={collection?.fileId || collection?.rawLink}
fontSize={['md', 'lg']}
@@ -216,10 +217,7 @@ const DataCard = () => {
size={['sm', 'md']}
onClick={() => {
if (!collection) return;
setEditInputData({
q: '',
indexes: [getDefaultIndex({ dataId: `${Date.now()}` })]
});
setEditDataId('');
}}
>
{t('dataset.Insert Data')}
@@ -297,12 +295,7 @@ const DataCard = () => {
}}
onClick={() => {
if (!collection) return;
setEditInputData({
id: item._id,
q: item.q,
a: item.a,
indexes: item.indexes
});
setEditDataId(item._id);
}}
>
<Flex zIndex={1} alignItems={'center'} justifyContent={'space-between'}>
@@ -424,11 +417,11 @@ const DataCard = () => {
</Flex>
)}
{editInputData !== undefined && collection && (
{editDataId !== undefined && collection && (
<InputDataModal
collectionId={collection._id}
defaultValue={editInputData}
onClose={() => setEditInputData(undefined)}
dataId={editDataId}
onClose={() => setEditDataId(undefined)}
onSuccess={() => getData(pageNum)}
onDelete={() => getData(pageNum)}
/>

View File

@@ -4,14 +4,8 @@ import { useSelectFile } from '@/web/common/file/hooks/useSelectFile';
import { useToast } from '@/web/common/hooks/useToast';
import { splitText2Chunks } from '@fastgpt/global/common/string/textSplitter';
import { simpleText } from '@fastgpt/global/common/string/tools';
import {
fileDownload,
readCsvContent,
readPdfContent,
readDocContent
} from '@/web/common/file/utils';
import { readFileRawText, readMdFile, readHtmlFile } from '@fastgpt/web/common/file/read';
import { getUploadMdImgController, uploadFiles } from '@/web/common/file/controller';
import { fileDownload, readCsvContent } from '@/web/common/file/utils';
import { getUploadBase64ImgController, uploadFiles } from '@/web/common/file/controller';
import { Box, Flex, useDisclosure, type BoxProps } from '@chakra-ui/react';
import React, { DragEvent, useCallback, useState } from 'react';
import { useTranslation } from 'next-i18next';
@@ -25,6 +19,8 @@ import { countPromptTokens } from '@fastgpt/global/common/string/tiktoken';
import { DatasetCollectionTypeEnum } from '@fastgpt/global/core/dataset/constant';
import type { PushDatasetDataChunkProps } from '@fastgpt/global/core/dataset/api.d';
import { UrlFetchResponse } from '@fastgpt/global/common/file/api.d';
import { readFileRawContent } from '@fastgpt/web/common/file/read/index';
import { MongoImageTypeEnum } from '@fastgpt/global/common/file/image/constants';
const UrlFetchModal = dynamic(() => import('./UrlFetchModal'));
const CreateFileModal = dynamic(() => import('./CreateFileModal'));
@@ -168,36 +164,22 @@ const FileSelect = ({
}
// parse and upload files
let text = await (async () => {
switch (extension) {
case 'txt':
return readFileRawText(file);
case 'md':
return readMdFile({
file,
uploadImgController: (base64Img) =>
getUploadMdImgController({ base64Img, metadata: { fileId } })
});
case 'html':
return readHtmlFile({
file,
uploadImgController: (base64Img) =>
getUploadMdImgController({ base64Img, metadata: { fileId } })
});
case 'pdf':
return readPdfContent(file);
case 'docx':
return readDocContent(file, {
let { rawText } = await readFileRawContent({
file,
uploadBase64Controller: (base64Img) =>
getUploadBase64ImgController({
base64Img,
type: MongoImageTypeEnum.docImage,
metadata: {
fileId
});
}
return '';
})();
}
})
});
if (text) {
text = simpleText(text);
if (rawText) {
rawText = simpleText(rawText);
const { chunks, tokens } = splitText2Chunks({
text,
text: rawText,
chunkLen,
overlapRatio,
customReg: customSplitChar ? [customSplitChar] : []
@@ -207,7 +189,7 @@ const FileSelect = ({
id: nanoid(),
filename: file.name,
icon,
rawText: text,
rawText,
tokens,
type: DatasetCollectionTypeEnum.file,
fileId,

View File

@@ -10,10 +10,7 @@ const CsvImport = dynamic(() => import('./Csv'), {});
import MyModal from '@/components/MyModal';
import Provider from './Provider';
import { useDatasetStore } from '@/web/core/dataset/store/dataset';
import {
DatasetCollectionTrainingModeEnum,
TrainingModeEnum
} from '@fastgpt/global/core/dataset/constant';
import { TrainingModeEnum } from '@fastgpt/global/core/dataset/constant';
export enum ImportTypeEnum {
chunk = 'chunk',
@@ -46,24 +43,21 @@ const ImportData = ({
chunkOverlapRatio: 0.2,
inputPrice: vectorModel?.inputPrice || 0,
outputPrice: 0,
mode: TrainingModeEnum.chunk,
collectionTrainingType: DatasetCollectionTrainingModeEnum.chunk
collectionTrainingType: TrainingModeEnum.chunk
},
[ImportTypeEnum.qa]: {
defaultChunkLen: agentModel?.maxContext * 0.55 || 8000,
chunkOverlapRatio: 0,
inputPrice: agentModel?.inputPrice || 0,
outputPrice: agentModel?.outputPrice || 0,
mode: TrainingModeEnum.qa,
collectionTrainingType: DatasetCollectionTrainingModeEnum.qa
collectionTrainingType: TrainingModeEnum.qa
},
[ImportTypeEnum.csv]: {
defaultChunkLen: 0,
chunkOverlapRatio: 0,
inputPrice: vectorModel?.inputPrice || 0,
outputPrice: 0,
mode: TrainingModeEnum.chunk,
collectionTrainingType: DatasetCollectionTrainingModeEnum.manual
collectionTrainingType: TrainingModeEnum.chunk
}
};
return map[importType];

View File

@@ -16,10 +16,7 @@ import { splitText2Chunks } from '@fastgpt/global/common/string/textSplitter';
import { hashStr } from '@fastgpt/global/common/string/tools';
import { useToast } from '@/web/common/hooks/useToast';
import { getErrText } from '@fastgpt/global/common/error/utils';
import {
DatasetCollectionTrainingModeEnum,
TrainingModeEnum
} from '@fastgpt/global/core/dataset/constant';
import { TrainingModeEnum } from '@fastgpt/global/core/dataset/constant';
import { Box, Flex, Image, useTheme } from '@chakra-ui/react';
import { CloseIcon } from '@chakra-ui/icons';
import DeleteIcon, { hoverDeleteStyles } from '@fastgpt/web/components/common/Icon/delete';
@@ -104,7 +101,6 @@ const Provider = ({
parentId,
inputPrice,
outputPrice,
mode,
collectionTrainingType,
vectorModel,
agentModel,
@@ -118,8 +114,7 @@ const Provider = ({
parentId: string;
inputPrice: number;
outputPrice: number;
mode: `${TrainingModeEnum}`;
collectionTrainingType: `${DatasetCollectionTrainingModeEnum}`;
collectionTrainingType: `${TrainingModeEnum}`;
vectorModel: string;
agentModel: string;
defaultChunkLen: number;
@@ -147,14 +142,14 @@ const Provider = ({
const totalTokens = useMemo(() => files.reduce((sum, file) => sum + file.tokens, 0), [files]);
const price = useMemo(() => {
if (mode === TrainingModeEnum.qa) {
if (collectionTrainingType === TrainingModeEnum.qa) {
const inputTotal = totalTokens * inputPrice;
const outputTotal = totalTokens * 0.5 * outputPrice;
return formatModelPrice2Read(inputTotal + outputTotal);
}
return formatModelPrice2Read(totalTokens * inputPrice);
}, [inputPrice, mode, outputPrice, totalTokens]);
}, [collectionTrainingType, inputPrice, outputPrice, totalTokens]);
/*
start upload data
@@ -169,7 +164,7 @@ const Provider = ({
for await (const file of files) {
// create training bill
const billId = await postCreateTrainingBill({
name: t('dataset.collections.Create Training Data', { filename: file.filename }),
name: file.filename,
vectorModel,
agentModel
});
@@ -180,11 +175,15 @@ const Provider = ({
parentId,
name: file.filename,
type: file.type,
trainingType: collectionTrainingType,
chunkSize: chunkLen,
chunkSplitter: customSplitChar,
qaPrompt: collectionTrainingType === TrainingModeEnum.qa ? prompt : '',
fileId: file.fileId,
rawLink: file.rawLink,
chunkSize: chunkLen,
trainingType: collectionTrainingType,
qaPrompt: mode === TrainingModeEnum.qa ? prompt : '',
rawTextLength: file.rawText.length,
hashRawText: hashStr(file.rawText),
metadata: file.metadata
@@ -195,8 +194,8 @@ const Provider = ({
const { insertLen } = await chunksUpload({
collectionId,
billId,
trainingMode: collectionTrainingType,
chunks,
mode,
onUploading: (insertLen) => {
setSuccessChunks((state) => state + insertLen);
},

View File

@@ -1,10 +1,9 @@
import React, { useCallback, useState, useMemo } from 'react';
import React, { useState, useMemo } from 'react';
import { useRouter } from 'next/router';
import { Box, Flex, Button, IconButton, Input, Textarea } from '@chakra-ui/react';
import { DeleteIcon } from '@chakra-ui/icons';
import { delDatasetById } from '@/web/core/dataset/api';
import { useSelectFile } from '@/web/common/file/hooks/useSelectFile';
import { useToast } from '@/web/common/hooks/useToast';
import { useDatasetStore } from '@/web/core/dataset/store/dataset';
import { useConfirm } from '@/web/common/hooks/useConfirm';
import { useForm } from 'react-hook-form';
@@ -17,6 +16,7 @@ import PermissionRadio from '@/components/support/permission/Radio';
import MySelect from '@/components/Select';
import { qaModelList } from '@/web/common/system/staticData';
import { useRequest } from '@/web/common/hooks/useRequest';
import { MongoImageTypeEnum } from '@fastgpt/global/common/file/image/constants';
const Info = ({ datasetId }: { datasetId: string }) => {
const { t } = useTranslation();
@@ -70,6 +70,7 @@ const Info = ({ datasetId }: { datasetId: string }) => {
const file = e[0];
if (!file) return Promise.resolve(null);
return compressImgFileAndUpload({
type: MongoImageTypeEnum.datasetAvatar,
file,
maxW: 300,
maxH: 300

View File

@@ -1,44 +1,38 @@
import React, { useMemo, useState } from 'react';
import { Box, Flex, Button, Textarea, BoxProps, Image, useTheme, Grid } from '@chakra-ui/react';
import { Box, Flex, Button, Textarea, useTheme, Grid } from '@chakra-ui/react';
import { useFieldArray, useForm } from 'react-hook-form';
import {
postInsertData2Dataset,
putDatasetDataById,
delOneDatasetDataById,
getDatasetCollectionById
getDatasetCollectionById,
getDatasetDataItemById
} from '@/web/core/dataset/api';
import { useToast } from '@/web/common/hooks/useToast';
import { getErrText } from '@fastgpt/global/common/error/utils';
import MyIcon from '@fastgpt/web/components/common/Icon';
import MyModal from '@/components/MyModal';
import MyTooltip from '@/components/MyTooltip';
import { QuestionOutlineIcon } from '@chakra-ui/icons';
import { useQuery } from '@tanstack/react-query';
import { useTranslation } from 'next-i18next';
import { getFileAndOpen } from '@/web/core/dataset/utils';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import { useRequest } from '@/web/common/hooks/useRequest';
import { countPromptTokens } from '@fastgpt/global/common/string/tiktoken';
import { useConfirm } from '@/web/common/hooks/useConfirm';
import { getDefaultIndex, getSourceNameIcon } from '@fastgpt/global/core/dataset/utils';
import { feConfigs, vectorModelList } from '@/web/common/system/staticData';
import { getDefaultIndex } from '@fastgpt/global/core/dataset/utils';
import { vectorModelList } from '@/web/common/system/staticData';
import { DatasetDataIndexTypeEnum } from '@fastgpt/global/core/dataset/constant';
import { DatasetDataIndexItemType } from '@fastgpt/global/core/dataset/type';
import SideTabs from '@/components/SideTabs';
import { useLoading } from '@/web/common/hooks/useLoading';
import DeleteIcon from '@fastgpt/web/components/common/Icon/delete';
import { defaultCollectionDetail } from '@/constants/dataset';
import { getDocPath } from '@/web/common/system/doc';
import RawSourceBox from '@/components/core/dataset/RawSourceBox';
import MyBox from '@/components/common/MyBox';
import { getErrText } from '@fastgpt/global/common/error/utils';
export type RawSourceTextProps = BoxProps & {
sourceName?: string;
sourceId?: string;
canView?: boolean;
};
export type InputDataType = {
id?: string;
q: string;
a?: string;
a: string;
indexes: (Omit<DatasetDataIndexItemType, 'dataId'> & {
dataId?: string; // pg data id
})[];
@@ -53,26 +47,25 @@ enum TabEnum {
const InputDataModal = ({
collectionId,
dataId,
defaultValue,
onClose,
onSuccess,
onDelete
}: {
collectionId: string;
defaultValue: InputDataType;
dataId?: string;
defaultValue?: { q: string; a?: string };
onClose: () => void;
onSuccess: (data: InputDataType) => void;
onSuccess: (data: InputDataType & { dataId: string }) => void;
onDelete?: () => void;
}) => {
const { t } = useTranslation();
const theme = useTheme();
const { toast } = useToast();
const { Loading } = useLoading();
const [currentTab, setCurrentTab] = useState(TabEnum.content);
const { register, handleSubmit, reset, control } = useForm<InputDataType>({
defaultValues: defaultValue
});
const { register, handleSubmit, reset, control } = useForm<InputDataType>();
const {
fields: indexes,
append: appendIndexes,
@@ -89,14 +82,15 @@ const InputDataModal = ({
id: TabEnum.index,
icon: 'kbTest'
},
...(defaultValue.id
...(dataId
? [{ label: t('dataset.data.edit.Delete'), id: TabEnum.delete, icon: 'delete' }]
: []),
{ label: t('dataset.data.edit.Course'), id: TabEnum.doc, icon: 'common/courseLight' }
];
const { ConfirmModal, openConfirm } = useConfirm({
content: t('dataset.data.Delete Tip')
content: t('dataset.data.Delete Tip'),
type: 'delete'
});
const { data: collection = defaultCollectionDetail } = useQuery(
@@ -105,6 +99,37 @@ const InputDataModal = ({
return getDatasetCollectionById(collectionId);
}
);
const { isFetching: isFetchingData } = useQuery(
['getDatasetDataItemById', dataId],
() => {
if (dataId) return getDatasetDataItemById(dataId);
return null;
},
{
onSuccess(res) {
if (res) {
reset({
q: res.q,
a: res.a,
indexes: res.indexes
});
} else if (defaultValue) {
reset({
q: defaultValue.q,
a: defaultValue.a,
indexes: [getDefaultIndex({ dataId: `${Date.now()}` })]
});
}
},
onError(err) {
toast({
status: 'error',
title: getErrText(err)
});
onClose();
}
}
);
const maxToken = useMemo(() => {
const vectorModel =
@@ -130,7 +155,7 @@ const InputDataModal = ({
const data = { ...e };
data.id = await postInsertData2Dataset({
const dataId = await postInsertData2Dataset({
collectionId: collection._id,
q: e.q,
a: e.a,
@@ -140,7 +165,10 @@ const InputDataModal = ({
)
});
return data;
return {
...data,
dataId
};
},
successToast: t('dataset.data.Input Success Tip'),
onSuccess(e) {
@@ -158,17 +186,18 @@ const InputDataModal = ({
// update
const { mutate: onUpdateData, isLoading: isUpdating } = useRequest({
mutationFn: async (e: InputDataType) => {
if (!e.id) return e;
if (!dataId) return e;
// not exactly same
await putDatasetDataById({
id: e.id,
q: e.q,
a: e.a,
indexes: e.indexes
id: dataId,
...e
});
return e;
return {
dataId,
...e
};
},
successToast: t('dataset.data.Update Success Tip'),
errorToast: t('common.error.unKnow'),
@@ -180,8 +209,8 @@ const InputDataModal = ({
// delete
const { mutate: onDeleteData, isLoading: isDeleting } = useRequest({
mutationFn: () => {
if (!onDelete || !defaultValue.id) return Promise.resolve(null);
return delOneDatasetDataById(defaultValue.id);
if (!onDelete || !dataId) return Promise.resolve(null);
return delOneDatasetDataById(dataId);
},
onSuccess() {
if (!onDelete) return;
@@ -192,13 +221,16 @@ const InputDataModal = ({
errorToast: t('common.error.unKnow')
});
const loading = useMemo(() => isImporting || isUpdating, [isImporting, isUpdating]);
const isLoading = useMemo(
() => isImporting || isUpdating || isFetchingData || isDeleting,
[isImporting, isUpdating, isFetchingData, isDeleting]
);
return (
<MyModal isOpen={true} isCentered w={'90vw'} maxW={'1440px'} h={'90vh'}>
<Flex h={'100%'}>
<MyBox isLoading={isLoading} display={'flex'} h={'100%'}>
<Box p={5} borderRight={theme.borders.base}>
<RawSourceText
<RawSourceBox
w={'200px'}
className="textEllipsis3"
whiteSpace={'pre-wrap'}
@@ -224,7 +256,7 @@ const InputDataModal = ({
<Flex flexDirection={'column'} py={3} flex={1} h={'100%'}>
<Box fontSize={'lg'} px={5} fontWeight={'bold'} mb={4}>
{currentTab === TabEnum.content && (
<>{defaultValue.id ? t('dataset.data.Update Data') : t('dataset.data.Input Data')}</>
<>{dataId ? t('dataset.data.Update Data') : t('dataset.data.Input Data')}</>
)}
{currentTab === TabEnum.index && <> {t('dataset.data.Index Edit')}</>}
</Box>
@@ -351,82 +383,24 @@ const InputDataModal = ({
)}
</Box>
<Flex justifyContent={'flex-end'} px={5} mt={4}>
<Button variant={'whitePrimary'} mr={3} isLoading={loading} onClick={onClose}>
<Button variant={'whiteBase'} mr={3} onClick={onClose}>
{t('common.Close')}
</Button>
<MyTooltip label={collection.canWrite ? '' : t('dataset.data.Can not edit')}>
<Button
isDisabled={!collection.canWrite}
isLoading={loading}
// @ts-ignore
onClick={handleSubmit(defaultValue.id ? onUpdateData : sureImportData)}
onClick={handleSubmit(dataId ? onUpdateData : sureImportData)}
>
{defaultValue.id ? t('common.Confirm Update') : t('common.Confirm Import')}
{dataId ? t('common.Confirm Update') : t('common.Confirm Import')}
</Button>
</MyTooltip>
</Flex>
</Flex>
</Flex>
</MyBox>
<ConfirmModal />
<Loading fixed={false} loading={isDeleting} />
</MyModal>
);
};
export default InputDataModal;
export function RawSourceText({
sourceId,
sourceName = '',
canView = true,
...props
}: RawSourceTextProps) {
const { t } = useTranslation();
const { toast } = useToast();
const { setLoading } = useSystemStore();
const canPreview = useMemo(() => !!sourceId && canView, [canView, sourceId]);
const icon = useMemo(() => getSourceNameIcon({ sourceId, sourceName }), [sourceId, sourceName]);
return (
<MyTooltip
label={canPreview ? t('file.Click to view file') || '' : ''}
shouldWrapChildren={false}
>
<Box
color={'myGray.600'}
display={'inline-flex'}
whiteSpace={'nowrap'}
{...(canPreview
? {
cursor: 'pointer',
textDecoration: 'underline',
onClick: async () => {
setLoading(true);
try {
await getFileAndOpen(sourceId as string);
} catch (error) {
toast({
title: t(getErrText(error, 'error.fileNotFound')),
status: 'error'
});
}
setLoading(false);
}
}
: {})}
{...props}
>
<Image src={icon} alt="" w={['14px', '16px']} mr={2} />
<Box
maxW={['200px', '300px']}
className={props.className ?? 'textEllipsis'}
wordBreak={'break-all'}
>
{sourceName || t('common.UnKnow Source')}
</Box>
</Box>
</MyTooltip>
);
}
export default React.memo(InputDataModal);

View File

@@ -233,7 +233,7 @@ const Test = ({ datasetId }: { datasetId: string }) => {
h={'100%'}
resize={'none'}
variant={'unstyled'}
maxLength={datasetDetail.vectorModel.maxToken}
maxLength={datasetDetail.vectorModel?.maxToken}
placeholder={t('core.dataset.test.Test Text Placeholder')}
onFocus={() => setIsFocus(true)}
{...register('inputText', {
@@ -314,7 +314,7 @@ const Test = ({ datasetId }: { datasetId: string }) => {
</Box>
</Box>
{/* result show */}
<Box p={4} h={['auto', '100%']} overflow={'overlay'} flex={'1 0 0'}>
<Box p={4} h={['auto', '100%']} overflow={'overlay'} flex={'1 0 0'} bg={'white'}>
<TestResults datasetTestItem={datasetTestItem} />
</Box>
@@ -384,6 +384,9 @@ const TestHistories = React.memo(function TestHistories({
}}
cursor={'pointer'}
fontSize={'sm'}
{...(item.id === datasetTestItem?.id && {
bg: 'primary.50'
})}
onClick={() => setDatasetTestItem(item)}
>
<Box flex={'0 0 80px'}>

View File

@@ -16,8 +16,6 @@ import { serviceSideProps } from '@/web/common/utils/i18n';
import { useTranslation } from 'next-i18next';
import { getTrainingQueueLen } from '@/web/core/dataset/api';
import MyTooltip from '@/components/MyTooltip';
import { QuestionOutlineIcon } from '@chakra-ui/icons';
import { feConfigs } from '@/web/common/system/staticData';
import Script from 'next/script';
import CollectionCard from './components/CollectionCard';
import { useDatasetStore } from '@/web/core/dataset/store/dataset';
@@ -29,6 +27,7 @@ import {
} from '@fastgpt/global/core/dataset/constant';
import { useConfirm } from '@/web/common/hooks/useConfirm';
import { useRequest } from '@/web/common/hooks/useRequest';
import DatasetTypeTag from '@/components/core/dataset/DatasetTypeTag';
const DataCard = dynamic(() => import('./components/DataCard'), {
ssr: false
@@ -150,50 +149,47 @@ const Detail = ({ datasetId, currentTab }: { datasetId: string; currentTab: `${T
{isPc ? (
<Flex
flexDirection={'column'}
p={4}
py={4}
h={'100%'}
flex={'0 0 200px'}
borderRight={theme.borders.base}
>
<Flex mb={4} alignItems={'center'}>
<Avatar src={datasetDetail.avatar} w={'34px'} borderRadius={'md'} />
<Box ml={2}>
<Box fontWeight={'bold'}>{datasetDetail.name}</Box>
</Box>
</Flex>
{DatasetTypeMap[datasetDetail.type] && (
<Flex alignItems={'center'} pl={2}>
<MyIcon
name={DatasetTypeMap[datasetDetail.type]?.icon as any}
mr={1}
w={'16px'}
/>
<Box flex={1}>{t(DatasetTypeMap[datasetDetail.type]?.label)}</Box>
{datasetDetail.type === DatasetTypeEnum.websiteDataset &&
datasetDetail.status === DatasetStatusEnum.active && (
<MyTooltip label={t('core.dataset.website.Start Sync')}>
<MyIcon
mt={1}
name={'common/refreshLight'}
w={'12px'}
color={'myGray.500'}
cursor={'pointer'}
onClick={() =>
openConfirmSync(
onUpdateDatasetWebsiteConfig,
undefined,
t('core.dataset.website.Confirm Create Tips')
)()
}
/>
</MyTooltip>
)}
<Box px={4} borderBottom={'1px'} borderColor={'myGray.200'} pb={4} mb={4}>
<Flex mb={4} alignItems={'center'}>
<Avatar src={datasetDetail.avatar} w={'34px'} borderRadius={'md'} />
<Box ml={2}>
<Box fontWeight={'bold'}>{datasetDetail.name}</Box>
</Box>
</Flex>
)}
{DatasetTypeMap[datasetDetail.type] && (
<Flex alignItems={'center'} pl={2} justifyContent={'space-between'}>
<DatasetTypeTag type={datasetDetail.type} />
{datasetDetail.type === DatasetTypeEnum.websiteDataset &&
datasetDetail.status === DatasetStatusEnum.active && (
<MyTooltip label={t('core.dataset.website.Start Sync')}>
<MyIcon
mt={1}
name={'common/refreshLight'}
w={'12px'}
color={'myGray.500'}
cursor={'pointer'}
onClick={() =>
openConfirmSync(
onUpdateDatasetWebsiteConfig,
undefined,
t('core.dataset.website.Confirm Create Tips')
)()
}
/>
</MyTooltip>
)}
</Flex>
)}
</Box>
<SideTabs
px={4}
flex={1}
mx={'auto'}
mt={3}
w={'100%'}
list={tabList}
activeId={currentTab}
@@ -201,7 +197,7 @@ const Detail = ({ datasetId, currentTab }: { datasetId: string; currentTab: `${T
setCurrentTab(e);
}}
/>
<Box>
<Box px={4}>
<Box mb={3}>
<Box fontSize={'sm'}>
{t('core.dataset.training.Agent queue')}({agentTrainingMap.tip})
@@ -229,6 +225,7 @@ const Detail = ({ datasetId, currentTab }: { datasetId: string; currentTab: `${T
/>
</Box>
</Box>
<Flex
alignItems={'center'}
cursor={'pointer'}

View File

@@ -19,6 +19,7 @@ import { useTranslation } from 'next-i18next';
import MyRadio from '@/components/common/MyRadio';
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constant';
import { feConfigs } from '@/web/common/system/staticData';
import { MongoImageTypeEnum } from '@fastgpt/global/common/file/image/constants';
const CreateModal = ({ onClose, parentId }: { onClose: () => void; parentId?: string }) => {
const { t } = useTranslation();
@@ -49,6 +50,7 @@ const CreateModal = ({ onClose, parentId }: { onClose: () => void; parentId?: st
if (!file) return;
try {
const src = await compressImgFileAndUpload({
type: MongoImageTypeEnum.datasetAvatar,
file,
maxW: 300,
maxH: 300
@@ -62,7 +64,7 @@ const CreateModal = ({ onClose, parentId }: { onClose: () => void; parentId?: st
});
}
},
[setValue, toast]
[setValue, t, toast]
);
/* create a new kb and router to it */

View File

@@ -22,7 +22,7 @@ import {
putDatasetById,
postCreateDataset
} from '@/web/core/dataset/api';
import { checkTeamExportDatasetLimit } from '@/web/support/user/api';
import { checkTeamExportDatasetLimit } from '@/web/support/user/team/api';
import { useTranslation } from 'next-i18next';
import Avatar from '@/components/Avatar';
import MyIcon from '@fastgpt/web/components/common/Icon';
@@ -44,6 +44,7 @@ import PermissionIconText from '@/components/support/permission/IconText';
import { PermissionTypeEnum } from '@fastgpt/global/support/permission/constant';
import { DatasetItemType } from '@fastgpt/global/core/dataset/type';
import ParentPaths from '@/components/common/ParentPaths';
import DatasetTypeTag from '@/components/core/dataset/DatasetTypeTag';
const CreateModal = dynamic(() => import('./component/CreateModal'), { ssr: false });
const MoveModal = dynamic(() => import('./component/MoveModal'), { ssr: false });
@@ -409,8 +410,9 @@ const Kb = () => {
<Box flex={1}>
<PermissionIconText permission={dataset.permission} color={'myGray.600'} />
</Box>
<MyIcon mr={1} name={dataset.icon as any} w={'12px'} />
<Box color={'myGray.500'}>{t(dataset.label)}</Box>
{dataset.type !== DatasetTypeEnum.folder && (
<DatasetTypeTag type={dataset.type} py={1} px={2} />
)}
</Flex>
</Box>
))}

View File

@@ -17,6 +17,7 @@ import { useConfirm } from '@/web/common/hooks/useConfirm';
import MyIcon from '@fastgpt/web/components/common/Icon';
import { CreateOnePluginParams } from '@fastgpt/global/core/plugin/controller';
import { customAlphabet } from 'nanoid';
import { MongoImageTypeEnum } from '@fastgpt/global/common/file/image/constants';
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 12);
export type FormType = CreateOnePluginParams & {
@@ -92,6 +93,7 @@ const CreateModal = ({
if (!file) return;
try {
const src = await compressImgFileAndUpload({
type: MongoImageTypeEnum.pluginAvatar,
file,
maxW: 300,
maxH: 300

View File

@@ -0,0 +1,18 @@
import { initSystemConfig } from '@/pages/api/common/system/getInitData';
import { generateQA } from '@/service/events/generateQA';
import { generateVector } from '@/service/events/generateVector';
import { setCron } from '@fastgpt/service/common/system/cron';
export const setUpdateSystemConfigCron = () => {
setCron('*/5 * * * *', () => {
initSystemConfig();
console.log('refresh system config');
});
};
export const setTrainingQueueCron = () => {
setCron('*/3 * * * *', () => {
generateVector();
generateQA();
});
};

View File

@@ -27,7 +27,7 @@ export function reRankRecall({ query, inputs }: PostReRankProps) {
return data;
})
.catch((err) => {
console.log(err);
console.log('rerank error:', err);
return [];
});

View File

@@ -14,7 +14,8 @@ import {
DatasetDataIndexTypeEnum,
DatasetSearchModeEnum,
DatasetSearchModeMap,
SearchScoreTypeEnum
SearchScoreTypeEnum,
TrainingModeEnum
} from '@fastgpt/global/core/dataset/constant';
import { getDefaultIndex } from '@fastgpt/global/core/dataset/utils';
import { jiebaSplit } from '@/service/common/string/jieba';
@@ -27,7 +28,173 @@ import {
} from '@fastgpt/global/core/dataset/type';
import { reRankRecall } from '../../ai/rerank';
import { countPromptTokens } from '@fastgpt/global/common/string/tiktoken';
import { hashStr } from '@fastgpt/global/common/string/tools';
import { hashStr, simpleText } from '@fastgpt/global/common/string/tools';
import type { PushDatasetDataProps } from '@/global/core/dataset/api.d';
import type { PushDataResponse } from '@/global/core/api/datasetRes';
import { PushDatasetDataChunkProps } from '@fastgpt/global/core/dataset/api';
import { MongoDatasetTraining } from '@fastgpt/service/core/dataset/training/schema';
import { startQueue } from '@/service/utils/tools';
import { getCollectionWithDataset } from '@fastgpt/service/core/dataset/controller';
import { getQAModel, getVectorModel } from '../../ai/model';
import { delay } from '@fastgpt/global/common/system/utils';
export async function pushDataToDatasetCollection({
teamId,
tmbId,
collectionId,
data,
prompt,
billId,
trainingMode
}: {
teamId: string;
tmbId: string;
} & PushDatasetDataProps): Promise<PushDataResponse> {
const checkModelValid = async ({ collectionId }: { collectionId: string }) => {
const {
datasetId: { _id: datasetId, vectorModel, agentModel }
} = await getCollectionWithDataset(collectionId);
if (trainingMode === TrainingModeEnum.chunk) {
if (!collectionId) return Promise.reject(`CollectionId is empty`);
const vectorModelData = getVectorModel(vectorModel);
if (!vectorModelData) {
return Promise.reject(`Model ${vectorModel} is inValid`);
}
return {
datasetId,
maxToken: vectorModelData.maxToken * 1.5,
model: vectorModelData.model,
weight: vectorModelData.weight
};
}
if (trainingMode === TrainingModeEnum.qa) {
const qaModelData = getQAModel(agentModel);
if (!qaModelData) {
return Promise.reject(`Model ${agentModel} is inValid`);
}
return {
datasetId,
maxToken: qaModelData.maxContext * 0.8,
model: qaModelData.model,
weight: 0
};
}
return Promise.reject(`Mode ${trainingMode} is inValid`);
};
const { datasetId, model, maxToken, weight } = await checkModelValid({
collectionId
});
// format q and a, remove empty char
data.forEach((item) => {
item.q = simpleText(item.q);
item.a = simpleText(item.a);
item.indexes = item.indexes
?.map((index) => {
return {
...index,
text: simpleText(index.text)
};
})
.filter(Boolean);
});
// filter repeat or equal content
const set = new Set();
const filterResult: Record<string, PushDatasetDataChunkProps[]> = {
success: [],
overToken: [],
repeat: [],
error: []
};
data.forEach((item) => {
if (!item.q) {
filterResult.error.push(item);
return;
}
const text = item.q + item.a;
// count q token
const token = countPromptTokens(item.q);
if (token > maxToken) {
filterResult.overToken.push(item);
return;
}
if (set.has(text)) {
console.log('repeat', item);
filterResult.repeat.push(item);
} else {
filterResult.success.push(item);
set.add(text);
}
});
// 插入记录
const insertData = async (dataList: PushDatasetDataChunkProps[], retry = 3): Promise<number> => {
try {
const results = await MongoDatasetTraining.insertMany(
dataList.map((item, i) => ({
teamId,
tmbId,
datasetId,
collectionId,
billId,
mode: trainingMode,
prompt,
model,
q: item.q,
a: item.a,
chunkIndex: item.chunkIndex ?? i,
weight: weight ?? 0,
indexes: item.indexes
}))
);
await delay(500);
return results.length;
} catch (error) {
if (retry > 0) {
await delay(1000);
return insertData(dataList, retry - 1);
}
return Promise.reject(error);
}
};
let insertLen = 0;
const chunkSize = 50;
const chunkList = filterResult.success.reduce(
(acc, cur) => {
const lastChunk = acc[acc.length - 1];
if (lastChunk.length < chunkSize) {
lastChunk.push(cur);
} else {
acc.push([cur]);
}
return acc;
},
[[]] as PushDatasetDataChunkProps[][]
);
for await (const chunks of chunkList) {
insertLen += await insertData(chunks);
}
startQueue();
delete filterResult.success;
return {
insertLen,
...filterResult
};
}
/* insert data.
* 1. create data id
@@ -439,7 +606,9 @@ export async function searchDatasetData(props: {
}))
});
if (!Array.isArray(results)) return [];
if (!Array.isArray(results)) {
return [];
}
// add new score to data
const mergeResult = results
@@ -457,7 +626,6 @@ export async function searchDatasetData(props: {
return mergeResult;
} catch (error) {
usingReRank = false;
return [];
}
};

View File

@@ -8,20 +8,15 @@ import { addLog } from '@fastgpt/service/common/system/log';
import { splitText2Chunks } from '@fastgpt/global/common/string/textSplitter';
import { replaceVariable } from '@fastgpt/global/common/string/tools';
import { Prompt_AgentQA } from '@/global/core/prompt/agent';
import { pushDataToDatasetCollection } from '@/pages/api/core/dataset/data/pushData';
import { getErrText } from '@fastgpt/global/common/error/utils';
import { authTeamBalance } from '../support/permission/auth/bill';
import type { PushDatasetDataChunkProps } from '@fastgpt/global/core/dataset/api.d';
import { UserErrEnum } from '@fastgpt/global/common/error/code/user';
import { lockTrainingDataByTeamId } from '@fastgpt/service/core/dataset/training/controller';
import { pushDataToDatasetCollection } from '@/service/core/dataset/data/controller';
const reduceQueue = (retry = false) => {
const reduceQueue = () => {
global.qaQueueLen = global.qaQueueLen > 0 ? global.qaQueueLen - 1 : 0;
if (global.qaQueueLen === 0 && retry) {
setTimeout(() => {
generateQA();
}, 60000);
}
return global.vectorQueueLen === 0;
};
@@ -144,11 +139,11 @@ ${replaceVariable(Prompt_AgentQA.fixedText, { text })}`;
teamId: data.teamId,
tmbId: data.tmbId,
collectionId: data.collectionId,
trainingMode: TrainingModeEnum.chunk,
data: qaArr.map((item) => ({
...item,
chunkIndex: data.chunkIndex
})),
mode: TrainingModeEnum.chunk,
billId: data.billId
});
@@ -178,7 +173,7 @@ ${replaceVariable(Prompt_AgentQA.fixedText, { text })}`;
reduceQueue();
generateQA();
} catch (err: any) {
reduceQueue(true);
reduceQueue();
// log
if (err?.response) {
addLog.info('openai error: 生成QA错误', {

View File

@@ -9,15 +9,9 @@ import { pushGenerateVectorBill } from '@/service/support/wallet/bill/push';
import { UserErrEnum } from '@fastgpt/global/common/error/code/user';
import { lockTrainingDataByTeamId } from '@fastgpt/service/core/dataset/training/controller';
const reduceQueue = (retry = false) => {
const reduceQueue = () => {
global.vectorQueueLen = global.vectorQueueLen > 0 ? global.vectorQueueLen - 1 : 0;
if (global.vectorQueueLen === 0 && retry) {
setTimeout(() => {
generateVector();
}, 60000);
}
return global.vectorQueueLen === 0;
};
@@ -159,7 +153,7 @@ export async function generateVector(): Promise<any> {
console.log(`embedding finished, time: ${Date.now() - start}ms`);
} catch (err: any) {
reduceQueue(true);
reduceQueue();
// log
if (err?.response) {
addLog.info('openai error: 生成向量错误', {

View File

@@ -214,7 +214,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
model: modelName,
inputTokens,
outputTokens,
query: userChatInput,
query: `${userChatInput}`,
maxToken: max_tokens,
quoteList: filterQuoteQA,
historyPreview: getHistoryPreview(completeMessages),
@@ -407,7 +407,7 @@ async function streamResponse({
}
if (!answer) {
return Promise.reject('Chat API is error or undefined');
return Promise.reject('core.chat API is error or undefined');
}
return { answer };

View File

@@ -58,7 +58,7 @@ export async function dispatchDatasetSearch(
usingSimilarityFilter,
usingReRank: searchUsingReRank
} = await searchDatasetData({
rawQuery: userChatInput,
rawQuery: `${userChatInput}`,
queries: concatQueries,
model: vectorModel.model,
similarity,

View File

@@ -61,7 +61,7 @@ A: ${systemPrompt}
{
role: 'user',
content: replaceVariable(defaultPrompt, {
query: userChatInput,
query: `${userChatInput}`,
histories: concatFewShot
})
}

View File

@@ -6,6 +6,8 @@ import { hashStr } from '@fastgpt/global/common/string/tools';
import { createDefaultTeam } from '@fastgpt/service/support/user/team/controller';
import { exit } from 'process';
import { initVectorStore } from '@fastgpt/service/common/vectorStore/controller';
import { getInitConfig } from '@/pages/api/common/system/getInitData';
import { setUpdateSystemConfigCron, setTrainingQueueCron } from './common/system/cron';
/**
* connect MongoDB and init data
@@ -13,11 +15,18 @@ import { initVectorStore } from '@fastgpt/service/common/vectorStore/controller'
export function connectToDatabase(): Promise<void> {
return connectMongo({
beforeHook: () => {},
afterHook: () => {
afterHook: async () => {
initVectorStore();
// start queue
startQueue();
return initRootUser();
// init system config
getInitConfig();
// cron
setUpdateSystemConfigCron();
setTrainingQueueCron();
initRootUser();
}
});
}

View File

@@ -60,7 +60,6 @@ export async function saveChat({
}))
)
];
console.log(metadataUpdate);
const title =
chatContentReplaceBlock(content[0].value).slice(0, 20) ||

View File

@@ -2,20 +2,9 @@ import { generateQA } from '../events/generateQA';
import { generateVector } from '../events/generateVector';
/* start task */
export const startQueue = (limit?: number) => {
export const startQueue = () => {
if (!global.systemEnv) return;
if (limit) {
for (let i = 0; i < limit; i++) {
generateVector();
generateQA();
}
return;
}
for (let i = 0; i < global.systemEnv.qaMaxProcess; i++) {
generateQA();
}
for (let i = 0; i < global.systemEnv.vectorMaxProcess; i++) {
generateVector();
}
generateQA();
generateVector();
};

View File

@@ -1,10 +1,8 @@
import { postUploadImg, postUploadFiles } from '@/web/common/file/api';
import { UploadImgProps } from '@fastgpt/global/common/file/api';
import { BucketNameEnum } from '@fastgpt/global/common/file/constants';
import {
compressBase64ImgAndUpload as compressBase64ImgAndUploadControl,
type CompressImgProps
} from '@fastgpt/web/common/file/img';
import { preUploadImgProps } from '@fastgpt/global/common/file/api';
import { compressBase64Img, type CompressImgProps } from '@fastgpt/web/common/file/img';
/**
* upload file to mongo gridfs
@@ -34,57 +32,45 @@ export const uploadFiles = ({
});
};
export const getUploadMdImgController = ({
base64Img,
metadata
}: {
base64Img: string;
metadata: Record<string, any>;
}) =>
compressBase64ImgAndUpload({
base64Img,
export const getUploadBase64ImgController = (props: CompressImgProps & UploadImgProps) =>
compressBase64Img({
maxW: 4000,
maxH: 4000,
maxSize: 1024 * 1024 * 5,
metadata
...props
});
/**
* compress image. response base64
* @param maxSize The max size of the compressed image
*/
export const compressBase64ImgAndUpload = ({
expiredTime,
metadata,
shareId,
...props
}: UploadImgProps & CompressImgProps) => {
return compressBase64ImgAndUploadControl({
...props,
uploadController: (base64Img) =>
postUploadImg({
shareId,
base64Img,
expiredTime,
metadata
})
});
};
export const compressImgFileAndUpload = async ({
file,
export const compressBase64ImgAndUpload = async ({
base64Img,
maxW,
maxH,
maxSize,
expiredTime,
shareId
}: {
file: File;
maxW?: number;
maxH?: number;
maxSize?: number;
expiredTime?: Date;
shareId?: string;
}) => {
...props
}: UploadImgProps & CompressImgProps) => {
const compressUrl = await compressBase64Img({
base64Img,
maxW,
maxH,
maxSize
});
return postUploadImg({
...props,
base64Img: compressUrl
});
};
export const compressImgFileAndUpload = async ({
file,
...props
}: preUploadImgProps &
CompressImgProps & {
file: File;
}) => {
const reader = new FileReader();
reader.readAsDataURL(file);
@@ -94,16 +80,12 @@ export const compressImgFileAndUpload = async ({
};
reader.onerror = (err) => {
console.log(err);
reject('压缩图片异常');
reject('Load image error');
};
});
return compressBase64ImgAndUpload({
base64Img,
maxW,
maxH,
maxSize,
expiredTime,
shareId
...props
});
};

View File

@@ -1,80 +1,5 @@
import mammoth from 'mammoth';
import Papa from 'papaparse';
import { compressBase64ImgAndUpload } from './controller';
import { simpleMarkdownText } from '@fastgpt/global/common/string/markdown';
import { htmlStr2Md } from '@fastgpt/web/common/string/markdown';
import { readPdfFile } from '@fastgpt/global/common/file/read/index';
import { readFileRawText } from '@fastgpt/web/common/file/read';
/**
* read pdf to raw text
*/
export const readPdfContent = (file: File) =>
new Promise<string>((resolve, reject) => {
try {
let reader = new FileReader();
reader.readAsArrayBuffer(file);
reader.onload = async (event) => {
if (!event?.target?.result) return reject('解析 PDF 失败');
try {
const content = await readPdfFile({ pdf: event.target.result });
resolve(content);
} catch (err) {
console.log(err, 'pdf load error');
reject('解析 PDF 失败');
}
};
reader.onerror = (err) => {
console.log(err, 'pdf load error');
reject('解析 PDF 失败');
};
} catch (error) {
reject('浏览器不支持文件内容读取');
}
});
/**
* read docx to markdown
*/
export const readDocContent = (file: File, metadata: Record<string, any>) =>
new Promise<string>((resolve, reject) => {
try {
const reader = new FileReader();
reader.readAsArrayBuffer(file);
reader.onload = async ({ target }) => {
if (!target?.result) return reject('读取 doc 文件失败');
try {
const buffer = target.result as ArrayBuffer;
const { value: html } = await mammoth.convertToHtml({
arrayBuffer: buffer
});
const md = htmlStr2Md(html);
const rawText = await uploadMarkdownBase64(md, metadata);
resolve(rawText);
} catch (error) {
window.umami?.track('wordReadError', {
err: error?.toString()
});
console.log('error doc read:', error);
reject('读取 doc 文件失败, 请转换成 PDF');
}
};
reader.onerror = (err) => {
window.umami?.track('wordReadError', {
err: err?.toString()
});
console.log('error doc read:', err);
reject('读取 doc 文件失败');
};
} catch (error) {
reject('浏览器不支持文件内容读取');
}
});
import { readFileRawText } from '@fastgpt/web/common/file/read/rawText';
/**
* read csv to json
@@ -85,7 +10,7 @@ export const readDocContent = (file: File, metadata: Record<string, any>) =>
*/
export const readCsvContent = async (file: File) => {
try {
const textArr = await readFileRawText(file);
const { rawText: textArr } = await readFileRawText(file);
const csvArr = Papa.parse(textArr).data as string[][];
if (csvArr.length === 0) {
throw new Error('csv 解析失败');
@@ -99,44 +24,6 @@ export const readCsvContent = async (file: File) => {
}
};
/**
* format markdown
* 1. upload base64
* 2. replace \
*/
export const uploadMarkdownBase64 = async (rawText: string = '', metadata: Record<string, any>) => {
// match base64, upload and replace it
const base64Regex = /data:image\/.*;base64,([^\)]+)/g;
const base64Arr = rawText.match(base64Regex) || [];
// upload base64 and replace it
await Promise.all(
base64Arr.map(async (base64Img) => {
try {
const str = await compressBase64ImgAndUpload({
base64Img,
maxW: 4329,
maxH: 4329,
maxSize: 1024 * 1024 * 5,
metadata
});
rawText = rawText.replace(base64Img, str);
} catch (error) {
rawText = rawText.replace(base64Img, '');
rawText = rawText.replace(/!\[.*\]\(\)/g, '');
}
})
);
// Remove white space on both sides of the picture
const trimReg = /(!\[.*\]\(.*\))\s*/g;
if (trimReg.test(rawText)) {
rawText = rawText.replace(trimReg, '$1');
}
return simpleMarkdownText(rawText);
};
/**
* file download by text
*/

View File

@@ -105,7 +105,7 @@ export const useConfirm = (props?: {
)}
<Button
{...(bg && { bg: `${bg} !important` })}
bg={bg ? bg : map.bg}
isDisabled={countDownAmount > 0}
ml={4}
isLoading={isLoading}
@@ -120,7 +120,7 @@ export const useConfirm = (props?: {
</MyModal>
);
},
[customContent, iconSrc, isOpen, onClose, showCancel, t, title]
[customContent, iconSrc, isOpen, map.bg, onClose, showCancel, t, title]
)
};
};

View File

@@ -74,7 +74,7 @@ export const postDatasetCollection = (data: CreateDatasetCollectionParams) =>
POST<string>(`/core/dataset/collection/create`, data);
export const putDatasetCollectionById = (data: UpdateDatasetCollectionParams) =>
POST(`/core/dataset/collection/update`, data);
export const delDatasetCollectionById = (params: { collectionId: string }) =>
export const delDatasetCollectionById = (params: { id: string }) =>
DELETE(`/core/dataset/collection/delete`, params);
export const postLinkCollectionSync = (collectionId: string) =>
POST<`${DatasetCollectionSyncResultEnum}`>(`/core/dataset/collection/sync/link`, {
@@ -86,8 +86,8 @@ export const postLinkCollectionSync = (collectionId: string) =>
export const getDatasetDataList = (data: GetDatasetDataListProps) =>
POST(`/core/dataset/data/list`, data);
export const getDatasetDataItemById = (dataId: string) =>
GET<DatasetDataItemType>(`/core/dataset/data/detail`, { dataId });
export const getDatasetDataItemById = (id: string) =>
GET<DatasetDataItemType>(`/core/dataset/data/detail`, { id });
/**
* push data to training queue
@@ -109,8 +109,8 @@ export const putDatasetDataById = (data: UpdateDatasetDataProps) =>
/**
* 删除一条知识库数据
*/
export const delOneDatasetDataById = (dataId: string) =>
DELETE<string>(`/core/dataset/data/delete`, { dataId });
export const delOneDatasetDataById = (id: string) =>
DELETE<string>(`/core/dataset/data/delete`, { id });
/* ================ training ==================== */
/* get length of system training queue */

View File

@@ -13,7 +13,7 @@ import { defaultDatasetDetail } from '@/constants/dataset';
import type { DatasetUpdateBody } from '@fastgpt/global/core/dataset/api.d';
import { DatasetStatusEnum } from '@fastgpt/global/core/dataset/constant';
import { postCreateTrainingBill } from '@/web/support/wallet/bill/api';
import { checkTeamWebSyncLimit } from '@/web/support/user/api';
import { checkTeamWebSyncLimit } from '@/web/support/user/team/api';
type State = {
allDatasets: DatasetListItemType[];

View File

@@ -5,17 +5,17 @@ import { strIsLink } from '@fastgpt/global/common/string/tools';
import type { PushDatasetDataChunkProps } from '@fastgpt/global/core/dataset/api.d';
export async function chunksUpload({
collectionId,
billId,
mode,
collectionId,
trainingMode,
chunks,
prompt,
rate = 150,
onUploading
}: {
collectionId: string;
billId: string;
mode: `${TrainingModeEnum}`;
collectionId: string;
trainingMode: `${TrainingModeEnum}`;
chunks: PushDatasetDataChunkProps[];
prompt?: string;
rate?: number;
@@ -24,8 +24,8 @@ export async function chunksUpload({
async function upload(data: PushDatasetDataChunkProps[]) {
return postChunks2Dataset({
collectionId,
trainingMode,
data,
mode,
prompt,
billId
});

View File

@@ -195,6 +195,7 @@ const Input: ComponentStyleConfig = {
baseStyle: {
fontsize: '14px'
},
sizes: {},
variants: {
outline: {
field: {

View File

@@ -71,8 +71,3 @@ export const postLogin = ({ password, ...props }: PostLoginProps) =>
export const loginOut = () => GET('/support/user/account/loginout');
export const putUserInfo = (data: UserUpdateParams) => PUT('/support/user/account/update', data);
/* team limit */
export const checkTeamExportDatasetLimit = (datasetId: string) =>
GET(`/support/user/team/limit/exportDatasetLimit`, { datasetId });
export const checkTeamWebSyncLimit = () => GET(`/support/user/team/limit/webSyncLimit`);

View File

@@ -39,3 +39,8 @@ export const updateInviteResult = (data: UpdateInviteProps) =>
PUT('/plusApi/support/user/team/member/updateInvite', data);
export const delLeaveTeam = (teamId: string) =>
DELETE('/plusApi/support/user/team/member/leave', { teamId });
/* team limit */
export const checkTeamExportDatasetLimit = (datasetId: string) =>
GET(`/support/user/team/limit/exportDatasetLimit`, { datasetId });
export const checkTeamWebSyncLimit = () => GET(`/support/user/team/limit/webSyncLimit`);

View File

@@ -0,0 +1,13 @@
import { GET, POST, PUT, DELETE } from '@/web/common/api/request';
import { SubDatasetSizeParams } from '@fastgpt/global/support/wallet/sub/api';
import { TeamSubSchema } from '@fastgpt/global/support/wallet/sub/type';
export const getTeamDatasetValidSub = () =>
GET<{
sub: TeamSubSchema;
maxSize: number;
usedSize: number;
}>(`/support/wallet/sub/getDatasetSub`);
export const postExpandTeamDatasetSub = (data: SubDatasetSizeParams) =>
POST('/plusApi/support/wallet/sub/datasetSize/expand', data);