This commit is contained in:
Archer
2023-12-31 14:12:51 +08:00
committed by GitHub
parent ccca0468da
commit 9ccfda47b7
270 changed files with 8182 additions and 1295 deletions

View File

@@ -151,7 +151,7 @@ export const QuoteList = React.memo(function QuoteList({
{item.q.length + (item.a?.length || 0)}
</Flex>
</MyTooltip>
{!isShare && item.score && (
{/* {!isShare && item.score && (
<MyTooltip label={t('core.dataset.Similarity')}>
<Flex alignItems={'center'}>
<MyIcon name={'kbTest'} w={'12px'} />
@@ -167,7 +167,7 @@ export const QuoteList = React.memo(function QuoteList({
<Box>{item.score.toFixed(4)}</Box>
</Flex>
</MyTooltip>
)}
)} */}
<Box flex={1} />
{item.id && (
<MyTooltip label={t('core.dataset.data.Edit')}>

View File

@@ -205,9 +205,9 @@ const ResponseTags = ({
</Tag>
</MyTooltip>
)}
<MyTooltip label={'点击查看完整响应'}>
<MyTooltip label={t('core.chat.response.Read complete response tips')}>
<Tag colorSchema="gray" cursor={'pointer'} {...TagStyles} onClick={onOpenWholeModal}>
{t('chat.Complete Response')}
{t('core.chat.response.Read complete response')}
</Tag>
</MyTooltip>

View File

@@ -8,7 +8,7 @@ import Tabs from '../Tabs';
import MyModal from '../MyModal';
import MyTooltip from '../MyTooltip';
import { QuestionOutlineIcon } from '@chakra-ui/icons';
import { formatPrice } from '@fastgpt/global/support/wallet/bill/tools';
import { formatStorePrice2Read } from '@fastgpt/global/support/wallet/bill/tools';
import Markdown from '../Markdown';
import { QuoteList } from './QuoteModal';
import { DatasetSearchModeMap } from '@fastgpt/global/core/dataset/constant';
@@ -19,7 +19,7 @@ function Row({
rawDom
}: {
label: string;
value?: string | number;
value?: string | number | boolean;
rawDom?: React.ReactNode;
}) {
const { t } = useTranslation();
@@ -70,7 +70,7 @@ const WholeResponseModal = ({
iconSrc="/imgs/modal/wholeRecord.svg"
title={
<Flex alignItems={'center'}>
{t('chat.Complete Response')}
{t('core.chat.response.Complete Response')}
<MyTooltip label={'从左往右,为各个模块的响应顺序'}>
<QuestionOutlineIcon ml={2} />
</MyTooltip>
@@ -133,15 +133,16 @@ const ResponseBox = React.memo(function ResponseBox({
{activeModule?.price !== undefined && (
<Row
label={t('core.chat.response.module price')}
value={`${formatPrice(activeModule?.price)}`}
value={`${formatStorePrice2Read(activeModule?.price)}`}
/>
)}
<Row
label={t('core.chat.response.module time')}
value={`${activeModule?.runningTime || 0}s`}
/>
<Row label={t('core.chat.response.module tokens')} value={`${activeModule?.tokens}`} />
<Row label={t('core.chat.response.module model')} value={activeModule?.model} />
<Row label={t('wallet.bill.Input Token Length')} value={`${activeModule?.inputTokens}`} />
<Row label={t('wallet.bill.Output Token Length')} value={`${activeModule?.outputTokens}`} />
<Row label={t('core.chat.response.module query')} value={activeModule?.query} />
<Row
label={t('core.chat.response.context total length')}
@@ -193,6 +194,10 @@ const ResponseBox = React.memo(function ResponseBox({
)}
<Row label={t('core.chat.response.module similarity')} value={activeModule?.similarity} />
<Row label={t('core.chat.response.module limit')} value={activeModule?.limit} />
<Row
label={t('core.chat.response.search using reRank')}
value={activeModule?.searchUsingReRank}
/>
{/* classify question */}
<Row

View File

@@ -0,0 +1 @@
<?xml version="1.0" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1703840539554" class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="7163" xmlns:xlink="http://www.w3.org/1999/xlink" width="128" height="128"><path d="M224.63064 14.207408h566.760385a4.266489 4.266489 0 0 1 4.735803 4.735803v227.915837a4.266489 4.266489 0 0 1-4.735803 4.735802H224.63064a4.266489 4.266489 0 0 1-4.735802-4.735802V18.943211a4.266489 4.266489 0 0 1 4.735802-4.735803z" fill="#4A8BFD" p-id="7164"></path><path d="M781.876755 28.414816v209.057956H234.059581v-209.057956h547.817174M791.34836 0H224.417316a18.943211 18.943211 0 0 0-18.943211 18.943211v227.915837a18.943211 18.943211 0 0 0 18.943211 18.94321h566.760385a18.943211 18.943211 0 0 0 18.943211-18.94321V18.943211a18.943211 18.943211 0 0 0-18.772552-18.943211z" fill="#333333" p-id="7165"></path><path d="M224.63064 393.028957h566.760385a4.266489 4.266489 0 0 1 4.735803 4.735803v227.915837a4.266489 4.266489 0 0 1-4.735803 4.735802H224.63064a4.266489 4.266489 0 0 1-4.735802-4.735802V397.76476a4.266489 4.266489 0 0 1 4.735802-4.735803z" fill="#EDEDED" p-id="7166"></path><path d="M781.876755 407.535019v208.503313H234.059581V407.535019h547.817174m9.471605-28.414816H224.417316a18.943211 18.943211 0 0 0-18.943211 18.943211v227.446523a18.943211 18.943211 0 0 0 18.943211 18.943211h566.760385a18.943211 18.943211 0 0 0 18.943211-18.943211V398.063414a18.943211 18.943211 0 0 0-18.772552-18.943211z" fill="#333333" p-id="7167"></path><path d="M224.63064 771.935836h566.760385a4.266489 4.266489 0 0 1 4.735803 4.735803v227.958501a4.266489 4.266489 0 0 1-4.735803 4.735803H224.63064a4.266489 4.266489 0 0 1-4.735802-4.735803v-227.958501a4.266489 4.266489 0 0 1 4.735802-4.735803z" fill="#4A8BFD" p-id="7168"></path><path d="M781.876755 786.185909v209.057956H234.059581v-209.057956h547.817174m9.471605-28.414816H224.417316a18.943211 18.943211 0 0 0-18.943211 18.943211v227.915836a18.943211 18.943211 0 0 0 18.943211 18.943211h566.760385a18.943211 18.943211 0 0 0 18.943211-18.943211v-227.915836a18.943211 18.943211 0 0 0-18.772552-18.943211z m56.573643-251.722845a14.207408 14.207408 0 1 1 0-28.414816 147.577851 147.577851 0 0 0 0-295.113037 14.207408 14.207408 0 1 1 0-28.414816 175.992667 175.992667 0 0 1 0 351.985334z" fill="#333333" p-id="7169"></path><path d="M884.443148 530.751219a14.122078 14.122078 0 0 1-7.295696-2.00525l-41.512937-24.916295a14.207408 14.207408 0 0 1-1.791925-23.2097l41.512937-33.150619a14.207408 14.207408 0 0 1 17.705929 22.185743l-25.598934 20.393817 23.977668 14.420732a14.207408 14.207408 0 0 1-7.295696 26.452231z m-708.237157 326.215741a175.992667 175.992667 0 0 1 0-351.985334 14.207408 14.207408 0 0 1 0 28.414816 147.577851 147.577851 0 1 0 0 295.113037 14.207408 14.207408 0 0 1 0 28.414816z" fill="#333333" p-id="7170"></path><path d="M139.428857 881.413941a14.207408 14.207408 0 0 1-7.295696-26.452231l23.977668-14.420733-25.598934-20.393816a14.207408 14.207408 0 0 1 17.705929-22.185743l41.043623 33.662598a14.207408 14.207408 0 0 1-1.621265 23.295029l-41.512937 24.916295a14.079413 14.079413 0 0 1-6.698388 1.578601z" fill="#333333" p-id="7171"></path></svg>

After

Width:  |  Height:  |  Size: 3.2 KiB

View File

@@ -0,0 +1,11 @@
<?xml version="1.0" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1703750094429"
class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="4262"
xmlns:xlink="http://www.w3.org/1999/xlink" width="128" height="128">
<path
d="M512 0C229.312 0 0 229.312 0 512s229.312 512 512 512 512-229.312 512-512-229.312-512-512-512z m311.04 823.04a437.76 437.76 0 0 1-139.84 94.336c-54.208 23.04-111.808 34.624-171.2 34.624a439.36 439.36 0 0 1-311.04-128.896 437.76 437.76 0 0 1-94.336-139.904A435.776 435.776 0 0 1 72 512a439.36 439.36 0 0 1 128.896-311.04 437.76 437.76 0 0 1 139.904-94.336A435.776 435.776 0 0 1 512 72a439.36 439.36 0 0 1 311.04 128.896 437.76 437.76 0 0 1 94.336 139.904c23.04 54.208 34.624 111.808 34.624 171.2 0 59.392-11.584 116.992-34.56 171.2a436.096 436.096 0 0 1-94.336 139.904z"
p-id="4263"></path>
<path
d="M701.824 436.992h-167.04c0.704 0 1.408-0.32 2.112-1.024l118.208-117.76a35.904 35.904 0 0 0-50.816-50.56L511.872 360.064l-92.864-92.8a36.16 36.16 0 0 0-51.136 0.192 36.16 36.16 0 0 0-0.064 51.072l118.208 118.08c0.64 0.704 1.408 1.728 2.112 1.728H321.6a34.688 34.688 0 0 0-34.304 34.56v3.2c0 18.816 15.488 33.856 34.304 33.856h154.368v64H353.536c-18.752 0-33.472 16.256-33.472 35.072v4.032c0 18.752 14.72 32.896 33.472 32.896h122.56v149.184a35.968 35.968 0 1 0 72 0v-149.184h121.792c18.752 0 35.2-14.08 35.2-32.896v-4.032a35.968 35.968 0 0 0-35.2-35.072H548.032v-64h153.792c18.752 0 35.2-14.208 35.2-33.088v-4.032a36.672 36.672 0 0 0-35.2-35.84z"
p-id="4264"></path>
</svg>

After

Width:  |  Height:  |  Size: 1.6 KiB

View File

@@ -113,8 +113,8 @@ const iconPaths = {
'core/chat/speaking': () => import('./icons/core/chat/speaking.svg'),
'core/chat/fileSelect': () => import('./icons/core/chat/fileSelect.svg'),
'core/dataset/modeEmbedding': () => import('./icons/core/dataset/modeEmbedding.svg'),
'core/dataset/modeEmbeddingRerank': () => import('./icons/core/dataset/modeEmbeddingRerank.svg'),
'core/dataset/modeEmbFTRerank': () => import('./icons/core/dataset/modeEmbFTRerank.svg'),
'core/dataset/fullTextRecall': () => import('./icons/core/dataset/fullTextRecall.svg'),
'core/dataset/mixedRecall': () => import('./icons/core/dataset/mixedRecall.svg'),
'core/app/variable/input': () => import('./icons/core/app/variable/input.svg'),
'core/app/variable/textarea': () => import('./icons/core/app/variable/textarea.svg'),
'core/app/variable/select': () => import('./icons/core/app/variable/select.svg'),
@@ -125,7 +125,9 @@ const iconPaths = {
'common/confirm/commonTip': () => import('./icons/common/confirm/commonTip.svg'),
'common/routePushLight': () => import('./icons/common/routePushLight.svg'),
'common/viewLight': () => import('./icons/common/viewLight.svg'),
'core/app/customFeedback': () => import('./icons/core/app/customFeedback.svg')
'core/app/customFeedback': () => import('./icons/core/app/customFeedback.svg'),
'support/pay/priceLight': () => import('./icons/support/pay/priceLight.svg'),
'core/dataset/rerank': () => import('./icons/core/dataset/rerank.svg')
};
export type IconName = keyof typeof iconPaths;

View File

@@ -1,4 +1,3 @@
import React from 'react';
import { useRouter } from 'next/router';
import { useToast } from '@chakra-ui/react';
import { useUserStore } from '@/web/support/user/useUserStore';

View File

@@ -218,6 +218,9 @@
.markdown blockquote > *:last-child {
margin-bottom: 0;
}
.markdown table {
width: 100%;
}
.markdown table th {
font-weight: bold;
}

View File

@@ -0,0 +1,44 @@
import React, { useMemo } from 'react';
import MySelect, { type SelectProps } from './index';
import { useTranslation } from 'next-i18next';
import dynamic from 'next/dynamic';
import { useDisclosure } from '@chakra-ui/react';
const PriceBox = dynamic(() => import('@/components/support/wallet/Price'));
const SelectAiModel = ({ list, ...props }: SelectProps) => {
const { t } = useTranslation();
const expandList = useMemo(
() =>
list.concat({
label: t('support.user.Price'),
value: 'price'
}),
[list, t]
);
const {
isOpen: isOpenPriceBox,
onOpen: onOpenPriceBox,
onClose: onClosePriceBox
} = useDisclosure();
return (
<>
<MySelect
list={expandList}
{...props}
onchange={(e) => {
if (e === 'price') {
onOpenPriceBox();
return;
}
props.onchange?.(e);
}}
/>
{isOpenPriceBox && <PriceBox onClose={onClosePriceBox} />}
</>
);
};
export default SelectAiModel;

View File

@@ -1,17 +1,9 @@
import React, { useRef, forwardRef, useMemo } from 'react';
import {
Menu,
Box,
MenuList,
MenuItem,
Button,
useDisclosure,
useOutsideClick,
MenuButton
} from '@chakra-ui/react';
import { Menu, MenuList, MenuItem, Button, useDisclosure, MenuButton } from '@chakra-ui/react';
import type { ButtonProps } from '@chakra-ui/react';
import { ChevronDownIcon } from '@chakra-ui/icons';
interface Props extends ButtonProps {
export type SelectProps = ButtonProps & {
value?: string;
placeholder?: string;
list: {
@@ -20,10 +12,10 @@ interface Props extends ButtonProps {
value: string;
}[];
onchange?: (val: any) => void;
}
};
const MySelect = (
{ placeholder, value, width = '100%', list, onchange, ...props }: Props,
{ placeholder, value, width = '100%', list, onchange, ...props }: SelectProps,
selectRef: any
) => {
const ref = useRef<HTMLButtonElement>(null);

View File

@@ -53,7 +53,7 @@ const SideTabs = ({ list, size = 'md', activeId, onChange, ...props }: Props) =>
cursor: 'pointer'
})}
_hover={{
bg: 'myWhite.600'
bg: 'myGray.05'
}}
onClick={() => {
if (activeId === item.id) return;

View File

@@ -1,5 +1,15 @@
import React, { useMemo, useState } from 'react';
import { Box, Button, ModalBody, ModalFooter, Textarea } from '@chakra-ui/react';
import {
Box,
Button,
Checkbox,
Divider,
Flex,
ModalBody,
ModalFooter,
Textarea,
useTheme
} from '@chakra-ui/react';
import { useForm } from 'react-hook-form';
import { QuestionOutlineIcon } from '@chakra-ui/icons';
import MySlider from '@/components/Slider';
@@ -12,43 +22,58 @@ import { reRankModelList } from '@/web/common/system/staticData';
import { ModuleInputKeyEnum } from '@fastgpt/global/core/module/constants';
import { DatasetSearchModeMap } from '@fastgpt/global/core/dataset/constant';
import MyRadio from '@/components/common/MyRadio';
import MyIcon from '@/components/Icon';
type DatasetParamsProps = {
similarity?: number;
limit?: number;
searchMode: `${DatasetSearchModeEnum}`;
searchEmptyText?: string;
limit?: number;
similarity?: number;
usingReRank?: boolean;
maxTokens?: number;
};
const DatasetParamsModal = ({
searchMode = DatasetSearchModeEnum.embedding,
searchEmptyText,
limit,
similarity,
searchMode = DatasetSearchModeEnum.embedding,
usingReRank,
maxTokens = 3000,
onClose,
onSuccess
}: DatasetParamsProps & { onClose: () => void; onSuccess: (e: DatasetParamsProps) => void }) => {
const { t } = useTranslation();
const theme = useTheme();
const [refresh, setRefresh] = useState(false);
const { register, setValue, getValues, handleSubmit } = useForm<DatasetParamsProps>({
defaultValues: {
searchEmptyText,
limit,
similarity,
searchMode
searchMode,
usingReRank
}
});
const searchModeList = useMemo(() => {
const list = Object.values(DatasetSearchModeMap);
if (reRankModelList.length > 0) {
return list;
}
return list.slice(0, 1);
return list;
}, []);
const showSimilarity = useMemo(() => {
if (similarity === undefined) return false;
if (
getValues('searchMode') === DatasetSearchModeEnum.fullTextRecall &&
!getValues('usingReRank')
)
return false;
if (getValues('searchMode') === DatasetSearchModeEnum.mixedRecall && !getValues('usingReRank'))
return false;
return true;
}, [getValues, similarity, refresh]);
return (
<MyModal
isOpen={true}
@@ -57,7 +82,6 @@ const DatasetParamsModal = ({
title={t('core.dataset.search.Dataset Search Params')}
w={['90vw', '550px']}
h={['90vh', 'auto']}
overflow={'unset'}
isCentered={searchEmptyText !== undefined}
>
<ModalBody flex={['1 0 0', 'auto']} overflow={'auto'}>
@@ -71,9 +95,73 @@ const DatasetParamsModal = ({
setRefresh(!refresh);
}}
/>
{usingReRank !== undefined && reRankModelList.length > 0 && (
<>
<Divider my={4} />
<Flex
alignItems={'center'}
cursor={'pointer'}
userSelect={'none'}
py={3}
pl={'14px'}
pr={'16px'}
border={theme.borders.sm}
borderWidth={'1.5px'}
borderRadius={'md'}
position={'relative'}
{...(getValues('usingReRank')
? {
borderColor: 'primary.400'
}
: {})}
onClick={(e) => {
setValue('usingReRank', !getValues('usingReRank'));
setRefresh((state) => !state);
}}
>
<MyIcon name="core/dataset/rerank" w={'18px'} mr={'14px'} />
<Box pr={2} color={'myGray.800'} flex={'1 0 0'}>
<Box>{t('core.dataset.search.ReRank')}</Box>
<Box fontSize={['xs', 'sm']} color={'myGray.500'}>
{t('core.dataset.search.ReRank desc')}
</Box>
</Box>
<Box position={'relative'} w={'18px'} h={'18px'}>
<Checkbox colorScheme="primary" isChecked={getValues('usingReRank')} size="lg" />
<Box position={'absolute'} top={0} right={0} bottom={0} left={0} zIndex={1}></Box>
</Box>
</Flex>
</>
)}
{similarity !== undefined && (
{limit !== undefined && (
<Box display={['block', 'flex']} py={8} mt={3}>
<Box flex={'0 0 100px'} mb={[8, 0]}>
{t('core.dataset.search.Max Tokens')}
<MyTooltip label={t('core.dataset.search.Max Tokens Tips')} forceShow>
<QuestionOutlineIcon ml={1} />
</MyTooltip>
</Box>
<Box flex={1} mx={4}>
<MySlider
markList={[
{ label: '100', value: 100 },
{ label: maxTokens, value: maxTokens }
]}
min={100}
max={maxTokens}
step={50}
value={getValues(ModuleInputKeyEnum.datasetLimit) ?? 1000}
onChange={(val) => {
setValue(ModuleInputKeyEnum.datasetLimit, val);
setRefresh(!refresh);
}}
/>
</Box>
</Box>
)}
{showSimilarity && (
<Box display={['block', 'flex']} py={8}>
<Box flex={'0 0 100px'} mb={[8, 0]}>
{t('core.dataset.search.Min Similarity')}
<MyTooltip label={t('core.dataset.search.Min Similarity Tips')} forceShow>
@@ -98,32 +186,7 @@ const DatasetParamsModal = ({
</Box>
</Box>
)}
{limit !== undefined && (
<Box display={['block', 'flex']} py={8}>
<Box flex={'0 0 100px'} mb={[8, 0]}>
{t('core.dataset.search.Max Tokens')}
<MyTooltip label={t('core.dataset.search.Max Tokens Tips')} forceShow>
<QuestionOutlineIcon ml={1} />
</MyTooltip>
</Box>
<Box flex={1} mx={4}>
<MySlider
markList={[
{ label: '300', value: 300 },
{ label: maxTokens, value: maxTokens }
]}
min={300}
max={maxTokens}
step={10}
value={getValues(ModuleInputKeyEnum.datasetLimit) ?? 1000}
onChange={(val) => {
setValue(ModuleInputKeyEnum.datasetLimit, val);
setRefresh(!refresh);
}}
/>
</Box>
</Box>
)}
{searchEmptyText !== undefined && (
<Box display={['block', 'flex']} pt={3}>
<Box flex={'0 0 100px'} mb={[2, 0]}>

View File

@@ -8,7 +8,7 @@ import MySelect from '@/components/Select';
import { TTSTypeEnum } from '@/constants/app';
import type { AppTTSConfigType } from '@fastgpt/global/core/module/type.d';
import { useAudioPlay } from '@/web/common/utils/voice';
import { audioSpeechModels } from '@/web/common/system/staticData';
import { audioSpeechModelList } from '@/web/common/system/staticData';
import MyModal from '@/components/MyModal';
import MySlider from '@/components/Slider';
@@ -26,7 +26,7 @@ const TTSSelect = ({
() => [
{ label: t('core.app.tts.Close'), value: TTSTypeEnum.none },
{ label: t('core.app.tts.Web'), value: TTSTypeEnum.web },
...audioSpeechModels.map((item) => item?.voices || []).flat()
...audioSpeechModelList.map((item) => item?.voices || []).flat()
],
[t]
);
@@ -52,7 +52,7 @@ const TTSSelect = ({
if (e === TTSTypeEnum.none || e === TTSTypeEnum.web) {
onChange({ type: e as `${TTSTypeEnum}` });
} else {
const audioModel = audioSpeechModels.find(
const audioModel = audioSpeechModelList.find(
(item) => item.voices?.find((voice) => voice.value === e)
);
if (!audioModel) {

View File

@@ -1,10 +1,9 @@
import React, { useCallback, useEffect } from 'react';
import type { RenderInputProps } from '../type';
import { onChangeNode } from '../../../../FlowProvider';
import MySelect from '@/components/Select';
import SelectAiModel from '@/components/Select/SelectAiModel';
import { FlowNodeInputTypeEnum } from '@fastgpt/global/core/module/node/constant';
import { chatModelList, cqModelList, extractModelList } from '@/web/common/system/staticData';
import { formatPrice } from '@fastgpt/global/support/wallet/bill/tools';
const SelectAiModelRender = ({ item, inputs = [], moduleId }: RenderInputProps) => {
const modelList = (() => {
@@ -16,8 +15,7 @@ const SelectAiModelRender = ({ item, inputs = [], moduleId }: RenderInputProps)
})().map((item) => ({
model: item.model,
name: item.name,
maxResponse: item.maxResponse,
price: item.price
maxResponse: item.maxResponse
}));
const onChangeModel = useCallback(
@@ -55,11 +53,9 @@ const SelectAiModelRender = ({ item, inputs = [], moduleId }: RenderInputProps)
);
const list = modelList.map((item) => {
const priceStr = `(${formatPrice(item.price, 1000)}元/1k Tokens)`;
return {
value: item.model,
label: `${item.name}${priceStr}`
label: item.name
};
});
@@ -70,7 +66,7 @@ const SelectAiModelRender = ({ item, inputs = [], moduleId }: RenderInputProps)
}, [item.value, list, onChangeModel]);
return (
<MySelect
<SelectAiModel
minW={'350px'}
width={'100%'}
value={item.value}

View File

@@ -17,7 +17,8 @@ const SelectDatasetParam = ({ inputs = [], moduleId }: RenderInputProps) => {
const [data, setData] = useState({
searchMode: DatasetSearchModeEnum.embedding,
limit: 5,
similarity: 0.5
similarity: 0.5,
usingReRank: false
});
const tokenLimit = useMemo(() => {

View File

@@ -72,7 +72,7 @@ const ApiKeyTable = ({ tips, appId }: { tips: string; appId?: string }) => {
} = useQuery(['getOpenApiKeys', appId], () => getOpenApiKeys({ appId }));
useEffect(() => {
setBaseUrl(`${location.origin}/api`);
setBaseUrl(feConfigs?.customApiDomain || `${location.origin}/api`);
}, []);
return (
@@ -255,7 +255,7 @@ const ApiKeyTable = ({ tips, appId }: { tips: string; appId?: string }) => {
</ModalBody>
<ModalFooter>
<Button variant="whiteBase" onClick={() => setApiKey('')}>
{t('common.OK')}
</Button>
</ModalFooter>
</MyModal>

View File

@@ -0,0 +1,168 @@
import React from 'react';
import { Box, CloseButton } from '@chakra-ui/react';
import {
chatModelList,
vectorModelList,
qaModelList,
cqModelList,
extractModelList,
qgModelList,
audioSpeechModelList,
reRankModelList,
whisperModel
} from '@/web/common/system/staticData';
import ReactDOM from 'react-dom';
import Markdown from '@/components/Markdown';
const Price = ({ onClose }: { onClose: () => void }) => {
const list = [
{
title: '知识库存储',
describe: '',
md: `
| 计费项 | 价格(¥) |
| --- | --- |
| 知识库索引数量 | 0/1000条/天 |`
},
{
title: '对话模型',
describe: '',
md: `
| 模型 | 输入价格(¥) | 输出价格(¥) |
| --- | --- | --- |
${chatModelList
?.map((item) => `| ${item.name} | ${item.inputPrice}/1k tokens | ${item.outputPrice}/1k tokens |`)
.join('\n')}`
},
{
title: '索引模型(文档训练 & 文档检索)',
describe: '',
md: `
| 模型 | 价格(¥) |
| --- | --- |
${vectorModelList?.map((item) => `| ${item.name} | ${item.inputPrice}/1k tokens |`).join('\n')}
`
},
{
title: '文件预处理模型(QA 拆分)',
describe: '',
md: `
| 模型 | 输入价格(¥) | 输出价格(¥) |
| --- | --- | --- |
${qaModelList
?.map(
(item) => `| ${item.name} | ${item.inputPrice}/1k tokens | ${item.outputPrice}/1k tokens |`
)
.join('\n')}
`
},
{
title: '问题分类',
describe: '',
md: `
| 模型 | 输入价格(¥) | 输出价格(¥) |
| --- | --- | --- |
${cqModelList
?.map(
(item) => `| ${item.name} | ${item.inputPrice}/1k tokens | ${item.outputPrice}/1k tokens |`
)
.join('\n')}`
},
{
title: '内容提取',
describe: '',
md: `
| 模型 | 输入价格(¥) | 输出价格(¥) |
| --- | --- | --- |
${extractModelList
?.map(
(item) => `| ${item.name} | ${item.inputPrice}/1k tokens | ${item.outputPrice}/1k tokens |`
)
.join('\n')}`
},
{
title: '下一步指引',
describe: '',
md: `
| 模型 | 输入价格(¥) | 输出价格(¥) |
| --- | --- | --- |
${qgModelList
?.map(
(item) => `| ${item.name} | ${item.inputPrice}/1k tokens | ${item.outputPrice}/1k tokens |`
)
.join('\n')}`
},
{
title: '重排模型(增强检索 & 混合检索)',
describe: '',
md: `
| 模型 | 价格(¥) |
| --- | --- |
${reRankModelList?.map((item) => `| ${item.name} | ${item.inputPrice}/1k 字符 |`).join('\n')}`
},
{
title: '语音播放',
describe: '',
md: `
| 模型 | 价格(¥) |
| --- | --- |
${audioSpeechModelList
?.map((item) => `| ${item.name} | ${item.inputPrice}/1k 字符 | - |`)
.join('\n')}`
},
...(whisperModel
? [
{
title: '语音输入',
describe: '',
md: `
| 模型 | 价格(¥) |
| --- | --- |
| ${whisperModel.name} | ${whisperModel.inputPrice}/分钟 | - |`
}
]
: [])
];
return ReactDOM.createPortal(
<Box position={'fixed'} top={0} right={0} bottom={0} left={0} zIndex={99999} bg={'white'}>
<CloseButton
position={'absolute'}
top={'10px'}
right={'20px'}
bg={'myGray.200'}
w={'30px'}
h={'30px'}
borderRadius={'50%'}
onClick={onClose}
/>
<Box py={[0, 10]} px={[5, '50px']} overflow={'overlay'} h={'100%'}>
{list.map((item) => (
<Box
display={['block', 'flex']}
key={item.title}
w={'100%'}
mb={4}
pb={6}
_notLast={{
borderBottom: '1px',
borderBottomColor: 'borderColor.high'
}}
>
<Box fontSize={'xl'} fontWeight={'bold'} mb={1} flex={'1 0 0'}>
{item.title}
</Box>
<Box w={['100%', '410px']}>
<Markdown source={item.md}></Markdown>
</Box>
</Box>
))}
</Box>
</Box>,
// @ts-ignore
document.querySelector('body')
);
};
export default Price;

View File

@@ -19,8 +19,9 @@ export type InitDateResponse = {
vectorModels: VectorModelItemType[];
audioSpeechModels: AudioSpeechModels[];
reRankModels: ReRankModelItemType[];
qgModes: LLMModelItemType[];
whisperModel: WhisperModelType;
feConfigs: FastGPTFeConfigsType;
priceMd: string;
systemVersion: string;
simpleModeTemplates: AppSimpleEditConfigTemplateType[];
};

View File

@@ -22,6 +22,7 @@ export const SimpleModeTemplate_FastGPT_Universal: AppSimpleEditConfigTemplateTy
similarity: true,
limit: true,
searchMode: DatasetSearchModeEnum.embedding,
usingReRank: true,
searchEmptyText: true
},
userGuide: {

View File

@@ -43,12 +43,22 @@ export type UpdateDatasetDataProps = {
})[];
};
export type GetTrainingQueueProps = {
vectorModel: string;
agentModel: string;
};
export type GetTrainingQueueResponse = {
vectorTrainingCount: number;
agentTrainingCount: number;
};
/* -------------- search ---------------- */
export type SearchTestProps = {
datasetId: string;
text: string;
limit?: number;
searchMode?: `${DatasetSearchModeEnum}`;
usingReRank: boolean;
};
export type SearchTestResponse = {
list: SearchDataResponseItemType[];

View File

@@ -14,7 +14,7 @@ import {
import { BillItemType } from '@fastgpt/global/support/wallet/bill/type.d';
import dayjs from 'dayjs';
import { BillSourceMap } from '@fastgpt/global/support/wallet/bill/constants';
import { formatPrice } from '@fastgpt/global/support/wallet/bill/tools';
import { formatStorePrice2Read } from '@fastgpt/global/support/wallet/bill/tools';
import MyModal from '@/components/MyModal';
import { useTranslation } from 'next-i18next';
@@ -25,49 +25,107 @@ const BillDetail = ({ bill, onClose }: { bill: BillItemType; onClose: () => void
[bill.list]
);
const {
hasModel,
hasTokens,
hasInputTokens,
hasOutputTokens,
hasTextLen,
hasDuration,
hasDataLen
} = useMemo(() => {
let hasModel = false;
let hasTokens = false;
let hasInputTokens = false;
let hasOutputTokens = false;
let hasTextLen = false;
let hasDuration = false;
let hasDataLen = false;
bill.list.forEach((item) => {
if (item.model !== undefined) {
hasModel = true;
}
if (item.tokenLen !== undefined) {
hasTokens = true;
}
if (item.inputTokens !== undefined) {
hasInputTokens = true;
}
if (item.outputTokens !== undefined) {
hasOutputTokens = true;
}
if (item.textLen !== undefined) {
hasTextLen = true;
}
if (item.duration !== undefined) {
hasDuration = true;
}
if (item.dataLen !== undefined) {
hasDataLen = true;
}
});
return {
hasModel,
hasTokens,
hasInputTokens,
hasOutputTokens,
hasTextLen,
hasDuration,
hasDataLen
};
}, [bill.list]);
return (
<MyModal
isOpen={true}
onClose={onClose}
iconSrc="/imgs/modal/bill.svg"
title={t('user.Bill Detail')}
maxW={['90vw', '700px']}
>
<ModalBody>
<Flex alignItems={'center'} pb={4}>
<Box flex={'0 0 80px'}>:</Box>
<Box flex={'0 0 80px'}>{t('wallet.bill.bill username')}:</Box>
<Box>{t(bill.memberName)}</Box>
</Flex>
<Flex alignItems={'center'} pb={4}>
<Box flex={'0 0 80px'}>:</Box>
<Box flex={'0 0 80px'}>{t('wallet.bill.Number')}:</Box>
<Box>{bill.id}</Box>
</Flex>
<Flex alignItems={'center'} pb={4}>
<Box flex={'0 0 80px'}>:</Box>
<Box flex={'0 0 80px'}>{t('wallet.bill.Time')}:</Box>
<Box>{dayjs(bill.time).format('YYYY/MM/DD HH:mm:ss')}</Box>
</Flex>
<Flex alignItems={'center'} pb={4}>
<Box flex={'0 0 80px'}>:</Box>
<Box flex={'0 0 80px'}>{t('wallet.bill.App name')}:</Box>
<Box>{t(bill.appName) || '-'}</Box>
</Flex>
<Flex alignItems={'center'} pb={4}>
<Box flex={'0 0 80px'}>:</Box>
<Box flex={'0 0 80px'}>{t('wallet.bill.Source')}:</Box>
<Box>{BillSourceMap[bill.source]}</Box>
</Flex>
<Flex alignItems={'center'} pb={4}>
<Box flex={'0 0 80px'}>:</Box>
<Box flex={'0 0 80px'}>{t('wallet.bill.Total')}:</Box>
<Box fontWeight={'bold'}>{bill.total}</Box>
</Flex>
<Box pb={4}>
<Box flex={'0 0 80px'} mb={1}>
{t('wallet.bill.Bill Module')}
</Box>
<TableContainer>
<Table>
<Thead>
<Tr>
<Th></Th>
<Th>AI模型</Th>
<Th>Token长度</Th>
<Th>{t('wallet.bill.Module name')}</Th>
{hasModel && <Th>{t('wallet.bill.Ai model')}</Th>}
{hasTokens && <Th>{t('wallet.bill.Token Length')}</Th>}
{hasInputTokens && <Th>{t('wallet.bill.Input Token Length')}</Th>}
{hasOutputTokens && <Th>{t('wallet.bill.Output Token Length')}</Th>}
{hasTextLen && <Th>{t('wallet.bill.Text Length')}</Th>}
{hasDuration && <Th>{t('wallet.bill.Duration')}</Th>}
{hasDataLen && <Th>{t('wallet.bill.Data Length')}</Th>}
<Th>()</Th>
</Tr>
</Thead>
@@ -75,9 +133,15 @@ const BillDetail = ({ bill, onClose }: { bill: BillItemType; onClose: () => void
{filterBillList.map((item, i) => (
<Tr key={i}>
<Td>{t(item.moduleName)}</Td>
<Td>{item.model || '-'}</Td>
<Td>{item.tokenLen || '-'}</Td>
<Td>{formatPrice(item.amount)}</Td>
{hasModel && <Td>{item.model ?? '-'}</Td>}
{hasTokens && <Td>{item.tokenLen ?? '-'}</Td>}
{hasInputTokens && <Td>{item.inputTokens ?? '-'}</Td>}
{hasOutputTokens && <Td>{item.outputTokens ?? '-'}</Td>}
{hasTextLen && <Td>{item.textLen ?? '-'}</Td>}
{hasDuration && <Td>{item.duration ?? '-'}</Td>}
{hasDataLen && <Td>{item.dataLen ?? '-'}</Td>}
<Td>{formatStorePrice2Read(item.amount)}</Td>
</Tr>
))}
</Tbody>

View File

@@ -29,7 +29,7 @@ import MyTooltip from '@/components/MyTooltip';
import { langMap, setLngStore } from '@/web/common/utils/i18n';
import { useRouter } from 'next/router';
import MySelect from '@/components/Select';
import { formatPrice } from '@fastgpt/global/support/wallet/bill/tools';
import { formatStorePrice2Read } from '@fastgpt/global/support/wallet/bill/tools';
import { putUpdateMemberName } from '@/web/support/user/team/api';
import { getDocPath } from '@/web/common/system/doc';
@@ -239,7 +239,7 @@ const UserInfo = () => {
{t('user.team.Balance')}:&nbsp;
</Box>
<Box flex={1}>
<strong>{formatPrice(userInfo?.team?.balance).toFixed(3)}</strong>
<strong>{formatStorePrice2Read(userInfo?.team?.balance).toFixed(3)}</strong>
</Box>
{feConfigs?.show_pay && userInfo?.team?.canWrite && (
<Button size={['sm', 'md']} ml={5} onClick={onOpenPayModal}>

View File

@@ -8,7 +8,6 @@ import { getErrText } from '@fastgpt/global/common/error/utils';
import { useTranslation } from 'next-i18next';
import Markdown from '@/components/Markdown';
import MyModal from '@/components/MyModal';
import { priceMd } from '@/web/common/system/staticData';
const PayModal = ({ onClose }: { onClose: () => void }) => {
const router = useRouter();
@@ -68,13 +67,12 @@ const PayModal = ({ onClose }: { onClose: () => void }) => {
onClose={payId ? undefined : onClose}
title={t('user.Pay')}
iconSrc="/imgs/modal/pay.svg"
isCentered={!payId}
>
<ModalBody px={0} minH={payId ? 'auto' : '70vh'} display={'flex'} flexDirection={'column'}>
<ModalBody px={0} display={'flex'} flexDirection={'column'}>
{!payId && (
<>
<Grid gridTemplateColumns={'repeat(4,1fr)'} gridGap={5} mb={4} px={6}>
{[10, 20, 50, 100].map((item) => (
<Grid gridTemplateColumns={'repeat(3,1fr)'} gridGap={5} mb={4} px={6}>
{[10, 20, 50, 100, 200, 500].map((item) => (
<Button
key={item}
variant={item === inputVal ? 'solid' : 'outline'}
@@ -84,7 +82,7 @@ const PayModal = ({ onClose }: { onClose: () => void }) => {
</Button>
))}
</Grid>
<Box mb={4} px={6}>
<Box px={6}>
<Input
value={inputVal}
type={'number'}
@@ -95,9 +93,6 @@ const PayModal = ({ onClose }: { onClose: () => void }) => {
}}
></Input>
</Box>
<Box flex={[1, '1 0 0']} overflow={'overlay'} px={6}>
<Markdown source={priceMd} />
</Box>
</>
)}
{/* 付费二维码 */}

View File

@@ -15,7 +15,7 @@ import { getPayOrders, checkPayResult } from '@/web/support/wallet/pay/api';
import type { PaySchema } from '@fastgpt/global/support/wallet/pay/type.d';
import dayjs from 'dayjs';
import { useQuery } from '@tanstack/react-query';
import { formatPrice } from '@fastgpt/global/support/wallet/bill/tools';
import { formatStorePrice2Read } from '@fastgpt/global/support/wallet/bill/tools';
import { useToast } from '@/web/common/hooks/useToast';
import { useLoading } from '@/web/common/hooks/useLoading';
import MyIcon from '@/components/Icon';
@@ -85,7 +85,7 @@ const PayRecordTable = () => {
<Td>
{item.createTime ? dayjs(item.createTime).format('YYYY/MM/DD HH:mm:ss') : '-'}
</Td>
<Td>{formatPrice(item.price)}</Td>
<Td>{formatStorePrice2Read(item.price)}</Td>
<Td>{item.status}</Td>
<Td>
{item.status === 'NOTPAY' && (

View File

@@ -1,9 +1,8 @@
import React, { useCallback, useMemo, useRef } from 'react';
import { Box, Flex, useTheme } from '@chakra-ui/react';
import React, { useCallback } from 'react';
import { Box, Flex, useDisclosure, useTheme } from '@chakra-ui/react';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import { useRouter } from 'next/router';
import dynamic from 'next/dynamic';
import { clearToken } from '@/web/support/user/auth';
import { useUserStore } from '@/web/support/user/useUserStore';
import { useConfirm } from '@/web/common/hooks/useConfirm';
import PageContainer from '@/components/PageContainer';
@@ -20,11 +19,13 @@ const BillTable = dynamic(() => import('./components/BillTable'));
const PayRecordTable = dynamic(() => import('./components/PayRecordTable'));
const InformTable = dynamic(() => import('./components/InformTable'));
const ApiKeyTable = dynamic(() => import('./components/ApiKeyTable'));
const PriceBox = dynamic(() => import('@/components/support/wallet/Price'));
enum TabEnum {
'info' = 'info',
'promotion' = 'promotion',
'bill' = 'bill',
'price' = 'price',
'pay' = 'pay',
'inform' = 'inform',
'apikey' = 'apikey',
@@ -50,6 +51,15 @@ const Account = ({ currentTab }: { currentTab: `${TabEnum}` }) => {
}
]
: []),
...(feConfigs?.isPlus && feConfigs?.show_pay
? [
{
icon: 'support/pay/priceLight',
label: t('support.user.Price'),
id: TabEnum.price
}
]
: []),
...(feConfigs?.show_promotion
? [
{
@@ -97,6 +107,11 @@ const Account = ({ currentTab }: { currentTab: `${TabEnum}` }) => {
const { openConfirm, ConfirmModal } = useConfirm({
content: '确认退出登录?'
});
const {
isOpen: isOpenPriceBox,
onOpen: onOpenPriceBox,
onClose: onClosePriceBox
} = useDisclosure();
const router = useRouter();
const theme = useTheme();
@@ -109,6 +124,8 @@ const Account = ({ currentTab }: { currentTab: `${TabEnum}` }) => {
setUserInfo(null);
router.replace('/login');
})();
} else if (tab === TabEnum.price) {
onOpenPriceBox();
} else {
router.replace({
query: {
@@ -117,7 +134,7 @@ const Account = ({ currentTab }: { currentTab: `${TabEnum}` }) => {
});
}
},
[openConfirm, router, setUserInfo]
[onOpenPriceBox, openConfirm, router, setUserInfo]
);
return (
@@ -169,6 +186,8 @@ const Account = ({ currentTab }: { currentTab: `${TabEnum}` }) => {
</Flex>
<ConfirmModal />
</PageContainer>
{isOpenPriceBox && <PriceBox onClose={onClosePriceBox} />}
</>
);
};

View File

@@ -2,11 +2,9 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { delay } from '@fastgpt/global/common/system/utils';
import { PgClient } from '@fastgpt/service/common/pg';
import {
DatasetDataIndexTypeEnum,
PgDatasetTableName
} from '@fastgpt/global/core/dataset/constant';
import { PgClient } from '@fastgpt/service/common/vectorStore/pg';
import { DatasetDataIndexTypeEnum } from '@fastgpt/global/core/dataset/constant';
import { PgDatasetTableName } from '@fastgpt/global/common/vectorStore/constants';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { MongoDatasetData } from '@fastgpt/service/core/dataset/data/schema';

View File

@@ -2,8 +2,8 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { delay } from '@fastgpt/global/common/system/utils';
import { PgClient } from '@fastgpt/service/common/pg';
import { PgDatasetTableName } from '@fastgpt/global/core/dataset/constant';
import { PgClient } from '@fastgpt/service/common/vectorStore/pg';
import { PgDatasetTableName } from '@fastgpt/global/common/vectorStore/constants';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { MongoDatasetData } from '@fastgpt/service/core/dataset/data/schema';

View File

@@ -13,8 +13,8 @@ import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
import { PermissionTypeEnum } from '@fastgpt/global/support/permission/constant';
import { MongoDatasetCollection } from '@fastgpt/service/core/dataset/collection/schema';
import { MongoDatasetTraining } from '@fastgpt/service/core/dataset/training/schema';
import { PgClient } from '@fastgpt/service/common/pg';
import { PgDatasetTableName } from '@fastgpt/global/core/dataset/constant';
import { PgClient } from '@fastgpt/service/common/vectorStore/pg';
import { PgDatasetTableName } from '@fastgpt/global/common/vectorStore/constants';
import { MongoOutLink } from '@fastgpt/service/support/outLink/schema';
import { MongoOpenApi } from '@fastgpt/service/support/openapi/schema';
import { MongoApp } from '@fastgpt/service/core/app/schema';

View File

@@ -1,80 +0,0 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { delay } from '@fastgpt/global/common/system/utils';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { MongoApp } from '@fastgpt/service/core/app/schema';
import { FlowNodeInputTypeEnum, FlowNodeTypeEnum } from '@fastgpt/global/core/module/node/constant';
import { DatasetSearchModeEnum } from '@fastgpt/global/core/dataset/constant';
import { ModuleIOValueTypeEnum, ModuleInputKeyEnum } from '@fastgpt/global/core/module/constants';
import { ModuleItemType } from '@fastgpt/global/core/module/type';
let success = 0;
/* pg 中的数据搬到 mongo dataset.datas 中,并做映射 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
const { limit = 50 } = req.body as { limit: number };
await authCert({ req, authRoot: true });
await connectToDatabase();
success = 0;
console.log('total', await MongoApp.countDocuments());
await initApp(limit);
jsonRes(res, {
message: 'success'
});
} catch (error) {
console.log(error);
jsonRes(res, {
code: 500,
error
});
}
}
export async function initApp(limit = 50): Promise<any> {
try {
const apps = await MongoApp.find({ inited: false }).limit(limit);
if (apps.length === 0) return;
const result = await Promise.allSettled(
apps.map(async (app) => {
// 遍历app的modules找到 datasetSearch, 如果 rerank=true searchMode = embFullTextReRank, 否则等于embedding
const modules = JSON.parse(JSON.stringify(app.modules)) as ModuleItemType[];
modules.forEach((module) => {
if (module.flowType === FlowNodeTypeEnum.datasetSearchNode) {
module.inputs.forEach((input, i) => {
if (input.key === 'rerank') {
const val = !!input.value as boolean;
module.inputs.splice(i, 1, {
key: ModuleInputKeyEnum.datasetSearchMode,
type: FlowNodeInputTypeEnum.hidden,
label: 'core.dataset.search.Mode',
valueType: ModuleIOValueTypeEnum.string,
showTargetInApp: false,
showTargetInPlugin: false,
value: val
? DatasetSearchModeEnum.embFullTextReRank
: DatasetSearchModeEnum.embedding
});
}
});
}
});
app.modules = modules;
app.inited = true;
await app.save();
})
);
success += result.filter((item) => item.status === 'fulfilled').length;
console.log(`success: ${success}`);
return initApp(limit);
} catch (error) {
console.log(error);
await delay(1000);
return initApp(limit);
}
}

View File

@@ -4,7 +4,7 @@ import { connectToDatabase } from '@/service/mongo';
import { delay } from '@fastgpt/global/common/system/utils';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { MongoDatasetData } from '@fastgpt/service/core/dataset/data/schema';
import { jiebaSplit } from '@/service/core/dataset/utils';
import { jiebaSplit } from '@/service/common/string/jieba';
let success = 0;
/* pg 中的数据搬到 mongo dataset.datas 中,并做映射 */

View File

@@ -4,7 +4,7 @@ import { connectToDatabase } from '@/service/mongo';
import { delay } from '@fastgpt/global/common/system/utils';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { MongoDatasetData } from '@fastgpt/service/core/dataset/data/schema';
import { jiebaSplit } from '@/service/core/dataset/utils';
import { jiebaSplit } from '@/service/common/string/jieba';
let success = 0;
/* pg 中的数据搬到 mongo dataset.datas 中,并做映射 */

View File

@@ -2,8 +2,8 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { PgClient } from '@fastgpt/service/common/pg';
import { PgDatasetTableName } from '@fastgpt/global/core/dataset/constant';
import { PgClient } from '@fastgpt/service/common/vectorStore/pg';
import { PgDatasetTableName } from '@fastgpt/global/common/vectorStore/constants';
import { MongoChatItem } from '@fastgpt/service/core/chat/chatItemSchema';
import { connectToDatabase } from '@/service/mongo';

View File

@@ -4,7 +4,6 @@ import { jsonRes } from '@fastgpt/service/common/response';
import { readFileSync, readdirSync } from 'fs';
import type { InitDateResponse } from '@/global/common/api/systemRes';
import type { FastGPTConfigFileType } from '@fastgpt/global/common/system/types/index.d';
import { formatPrice } from '@fastgpt/global/support/wallet/bill/tools';
import { getTikTokenEnc } from '@fastgpt/global/common/string/tiktoken';
import { initHttpAgent } from '@fastgpt/service/common/middle/httpAgent';
import { SimpleModeTemplate_FastGPT_Universal } from '@/global/core/app/constants';
@@ -33,8 +32,9 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
requestUrl: undefined,
requestAuth: undefined
})) || [],
qgModes: global.qgModels,
whisperModel: global.whisperModel,
audioSpeechModels: global.audioSpeechModels,
priceMd: global.priceMd,
systemVersion: global.systemVersion || '0.0.0',
simpleModeTemplates: global.simpleModeTemplates
}
@@ -73,7 +73,6 @@ export async function getInitConfig() {
await getSimpleModeTemplates();
getSystemVersion();
countModelPrice();
getSystemPlugin();
console.log({
@@ -88,7 +87,6 @@ export async function getInitConfig() {
reRankModels: global.reRankModels,
audioSpeechModels: global.audioSpeechModels,
whisperModel: global.whisperModel,
price: global.priceMd,
simpleModeTemplates: global.simpleModeTemplates,
communityPlugins: global.communityPlugins
});
@@ -123,22 +121,20 @@ export async function initSystemConfig() {
// set config
global.feConfigs = {
isPlus: !!config.systemEnv.pluginBaseUrl,
isPlus: !!config.systemEnv?.pluginBaseUrl,
...config.feConfigs
};
global.systemEnv = config.systemEnv;
global.chatModels = config.chatModels || [];
global.qaModels = config.qaModels || [];
global.cqModels = config.cqModels || [];
global.extractModels = config.extractModels || [];
global.qgModels = config.qgModels || [];
global.vectorModels = config.vectorModels || [];
global.reRankModels = config.reRankModels || [];
global.audioSpeechModels = config.audioSpeechModels || [];
global.chatModels = config.chatModels;
global.qaModels = config.qaModels;
global.cqModels = config.cqModels;
global.extractModels = config.extractModels;
global.qgModels = config.qgModels;
global.vectorModels = config.vectorModels;
global.reRankModels = config.reRankModels;
global.audioSpeechModels = config.audioSpeechModels;
global.whisperModel = config.whisperModel;
global.priceMd = '';
}
export function initGlobal() {
@@ -168,38 +164,6 @@ export function getSystemVersion() {
}
}
export function countModelPrice() {
global.priceMd = `| 计费项 | 价格: 元/ 1K tokens(包含上下文)|
| --- | --- |
${global.vectorModels
?.map((item) => `| 索引-${item.name} | ${formatPrice(item.price, 1000)} |`)
.join('\n')}
${global.chatModels
?.map((item) => `| 对话-${item.name} | ${formatPrice(item.price, 1000)} |`)
.join('\n')}
${global.qaModels
?.map((item) => `| 文件QA拆分-${item.name} | ${formatPrice(item.price, 1000)} |`)
.join('\n')}
${global.cqModels
?.map((item) => `| 问题分类-${item.name} | ${formatPrice(item.price, 1000)} |`)
.join('\n')}
${global.extractModels
?.map((item) => `| 内容提取-${item.name} | ${formatPrice(item.price, 1000)} |`)
.join('\n')}
${global.qgModels
?.map((item) => `| 下一步指引-${item.name} | ${formatPrice(item.price, 1000)} |`)
.join('\n')}
${global.audioSpeechModels
?.map((item) => `| 语音播放-${item.name} | ${formatPrice(item.price, 1000)} |`)
.join('\n')}
${
global.whisperModel
? `| 语音输入-${global.whisperModel.name} | ${global.whisperModel.price}/分钟 |`
: ''
}
`;
}
async function getSimpleModeTemplates() {
if (global.simpleModeTemplates && global.simpleModeTemplates.length > 0) return;

View File

@@ -2,14 +2,13 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { countModelPrice, initSystemConfig } from './getInitData';
import { initSystemConfig } from './getInitData';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
await authCert({ req, authRoot: true });
await initSystemConfig();
countModelPrice();
console.log(`refresh config`);
console.log({
@@ -23,8 +22,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
vectorModels: global.vectorModels,
reRankModels: global.reRankModels,
audioSpeechModels: global.audioSpeechModels,
whisperModel: global.whisperModel,
price: global.priceMd
whisperModel: global.whisperModel
});
} catch (error) {
console.log(error);

View File

@@ -19,7 +19,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
const qgModel = global.qgModels[0];
const { result, tokens } = await createQuestionGuide({
const { result, inputTokens, outputTokens } = await createQuestionGuide({
messages,
model: qgModel.model
});
@@ -29,7 +29,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
});
pushQuestionGuideBill({
tokens: tokens,
inputTokens,
outputTokens,
teamId,
tmbId
});

View File

@@ -374,11 +374,21 @@ function datasetTemplate({ formData, maxToken }: Props): ModuleItemType[] {
{
key: 'searchMode',
type: 'hidden',
label: 'core.dataset.search.Mode',
label: '',
valueType: 'string',
showTargetInApp: false,
showTargetInPlugin: false,
value: DatasetSearchModeEnum.embFullTextReRank,
value: DatasetSearchModeEnum.mixedRecall,
connected: false
},
{
key: 'usingReRank',
type: 'hidden',
label: '',
valueType: 'string',
showTargetInApp: false,
showTargetInPlugin: false,
value: true,
connected: false
},
{

View File

@@ -377,6 +377,16 @@ function datasetTemplate(formData: AppSimpleEditFormType): ModuleItemType[] {
value: formData.dataset.searchMode,
connected: false
},
{
key: 'usingReRank',
type: 'hidden',
label: '',
valueType: 'string',
showTargetInApp: false,
showTargetInPlugin: false,
value: formData.dataset.usingReRank,
connected: false
},
{
key: 'datasetParamsModal',
type: 'selectDatasetParamsModal',

View File

@@ -56,7 +56,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
try {
pushAudioSpeechBill({
model: model,
textLength: input.length,
textLen: input.length,
tmbId,
teamId,
source: authType2BillSource({ authType })

View File

@@ -6,10 +6,7 @@ import { Types } from '@fastgpt/service/common/mongo';
import type { DatasetCollectionsListItemType } from '@/global/core/dataset/type.d';
import type { GetDatasetCollectionsProps } from '@/global/core/api/datasetReq';
import { PagingData } from '@/types';
import {
DatasetColCollectionName,
MongoDatasetCollection
} from '@fastgpt/service/core/dataset/collection/schema';
import { MongoDatasetCollection } from '@fastgpt/service/core/dataset/collection/schema';
import { DatasetCollectionTypeEnum } from '@fastgpt/global/core/dataset/constant';
import { startQueue } from '@/service/utils/tools';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';

View File

@@ -69,7 +69,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
a: formatA
});
const { insertId, tokenLen } = await insertData2Dataset({
const { insertId, tokens } = await insertData2Dataset({
teamId,
tmbId,
datasetId,
@@ -84,7 +84,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
pushGenerateVectorBill({
teamId,
tmbId,
tokenLen: tokenLen,
tokens,
model: vectorModelData.model
});

View File

@@ -30,7 +30,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
// auth team balance
await authTeamBalance(teamId);
const { tokenLen } = await updateData2Dataset({
const { tokens } = await updateData2Dataset({
dataId: id,
q,
a,
@@ -38,14 +38,12 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
model: vectorModel
});
if (tokenLen) {
pushGenerateVectorBill({
teamId,
tmbId,
tokenLen: tokenLen,
model: vectorModel
});
}
pushGenerateVectorBill({
teamId,
tmbId,
tokens,
model: vectorModel
});
jsonRes(res);
} catch (err) {

View File

@@ -6,7 +6,7 @@ import { connectToDatabase } from '@/service/mongo';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
import { authTeamBalance } from '@/service/support/permission/auth/bill';
import { pushGenerateVectorBill } from '@/service/support/wallet/bill/push';
import { searchDatasetData } from '@/service/core/dataset/data/pg';
import { searchDatasetData } from '@/service/core/dataset/data/controller';
import { updateApiKeyUsage } from '@fastgpt/service/support/openapi/tools';
import { BillSourceEnum } from '@fastgpt/global/support/wallet/bill/constants';
import { searchQueryExtension } from '@fastgpt/service/core/ai/functions/queryExtension';
@@ -14,7 +14,7 @@ import { searchQueryExtension } from '@fastgpt/service/core/ai/functions/queryEx
export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
const { datasetId, text, limit = 20, searchMode } = req.body as SearchTestProps;
const { datasetId, text, limit = 20, searchMode, usingReRank } = req.body as SearchTestProps;
if (!datasetId || !text) {
throw new Error('缺少参数');
@@ -40,20 +40,21 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
// model: global.chatModels[0].model
// });
const { searchRes, tokenLen } = await searchDatasetData({
const { searchRes, tokens } = await searchDatasetData({
rawQuery: text,
queries: [text],
model: dataset.vectorModel,
limit: Math.min(limit * 800, 30000),
datasetIds: [datasetId],
searchMode
searchMode,
usingReRank
});
// push bill
const { total } = pushGenerateVectorBill({
teamId,
tmbId,
tokenLen: tokenLen,
tokens,
model: dataset.vectorModel,
source: apikey ? BillSourceEnum.api : BillSourceEnum.fastgpt
});

View File

@@ -3,20 +3,39 @@ import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { MongoDatasetTraining } from '@fastgpt/service/core/dataset/training/schema';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { GetTrainingQueueProps } from '@/global/core/dataset/api';
/* 拆分数据成QA */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
await connectToDatabase();
await authCert({ req, authToken: true });
const { vectorModel, agentModel } = req.query as GetTrainingQueueProps;
// split queue data
const result = await MongoDatasetTraining.countDocuments({
lockTime: { $lt: new Date('2040/1/1') }
});
// get queue data
// 分别统计 model = vectorModel和agentModel的数量
const data = await MongoDatasetTraining.aggregate([
{
$match: {
lockTime: { $lt: new Date('2040/1/1') },
$or: [{ model: { $eq: vectorModel } }, { model: { $eq: agentModel } }]
}
},
{
$group: {
_id: '$model',
count: { $sum: 1 }
}
}
]);
const vectorTrainingCount = data.find((item) => item._id === vectorModel)?.count || 0;
const agentTrainingCount = data.find((item) => item._id === agentModel)?.count || 0;
jsonRes(res, {
data: result
data: {
vectorTrainingCount,
agentTrainingCount
}
});
} catch (err) {
jsonRes(res, {

View File

@@ -5,7 +5,7 @@ import { withNextCors } from '@fastgpt/service/common/middle/cors';
import { pushGenerateVectorBill } from '@/service/support/wallet/bill/push';
import { connectToDatabase } from '@/service/mongo';
import { authTeamBalance } from '@/service/support/permission/auth/bill';
import { getVectorsByText, GetVectorProps } from '@/service/core/ai/vector';
import { getVectorsByText, GetVectorProps } from '@fastgpt/service/core/ai/embedding';
import { updateApiKeyUsage } from '@fastgpt/service/support/openapi/tools';
import { getBillSourceByAuthType } from '@fastgpt/global/support/wallet/bill/tools';
@@ -30,7 +30,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
await authTeamBalance(teamId);
const { tokenLen, vectors } = await getVectorsByText({ input, model });
const { tokens, vectors } = await getVectorsByText({ input, model });
jsonRes(res, {
data: {
@@ -42,8 +42,8 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
})),
model,
usage: {
prompt_tokens: tokenLen,
total_tokens: tokenLen
prompt_tokens: tokens,
total_tokens: tokens
}
}
});
@@ -51,7 +51,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
const { total } = pushGenerateVectorBill({
teamId,
tmbId,
tokenLen,
tokens,
model,
billId,
source: getBillSourceByAuthType({ authType })

View File

@@ -42,11 +42,9 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
data: result
});
} catch (err) {
console.log(err);
jsonRes<PostReRankResponse>(res, {
data: inputs.map((input) => ({
id: input.id
}))
jsonRes(res, {
code: 500,
error: err
});
}
});

View File

@@ -4,7 +4,7 @@ import { useSystemStore } from '@/web/common/system/useSystemStore';
import { getAppTotalUsage } from '@/web/core/app/api';
import { useQuery } from '@tanstack/react-query';
import dayjs from 'dayjs';
import { formatPrice } from '@fastgpt/global/support/wallet/bill/tools';
import { formatStorePrice2Read } from '@fastgpt/global/support/wallet/bill/tools';
import Loading from '@/components/Loading';
import { Box } from '@chakra-ui/react';
@@ -135,7 +135,7 @@ const TokenUsage = ({ appId }: { appId: string }) => {
return `
<div>
<div>${dayjs(data.axisValue).format('YYYY/MM/DD')}</div>
<div>${formatPrice(e[0]?.value || 0)}元</div>
<div>${formatStorePrice2Read(e[0]?.value || 0)}元</div>
</div>
`;
}

View File

@@ -47,7 +47,7 @@ const Render = ({ app, onClose }: Props) => {
return <Flow templates={moduleTemplates} Header={<Header app={app} onClose={onClose} />} />;
};
export default React.memo(function AdEdit(props: Props) {
export default React.memo(function FlowEdit(props: Props) {
return (
<FlowProvider mode={'app'} filterAppIds={[props.app._id]}>
<Render {...props} />

View File

@@ -9,6 +9,7 @@ import MyIcon from '@/components/Icon';
import { useCopyData } from '@/web/common/hooks/useCopyData';
import { useSelectFile } from '@/web/common/file/hooks/useSelectFile';
import { fileToBase64 } from '@/web/common/file/utils';
import { feConfigs } from '@/web/common/system/staticData';
enum UsingWayEnum {
link = 'link',
@@ -70,7 +71,8 @@ const SelectUsingWayModal = ({ share, onClose }: { share: OutLinkSchema; onClose
setRefresh(!refresh);
});
const linkUrl = `${location?.origin}/chat/share?shareId=${share?.shareId}${
const baseUrl = feConfigs?.customSharePageDomain || location?.origin;
const linkUrl = `${baseUrl}/chat/share?shareId=${share?.shareId}${
getValues('showHistory') ? '' : '&showHistory=0'
}`;
@@ -91,7 +93,7 @@ const SelectUsingWayModal = ({ share, onClose }: { share: OutLinkSchema; onClose
[UsingWayEnum.script]: {
blockTitle: t('core.app.outLink.Script block title'),
code: `<script
src="${location?.origin}/js/iframe.js"
src="${baseUrl}/js/iframe.js"
id="chatbot-iframe"
data-bot-src="${linkUrl}"
data-default-open="${getValues('scriptDefaultOpen') ? 'true' : 'false'}"

View File

@@ -36,7 +36,7 @@ import { useForm } from 'react-hook-form';
import { defaultOutLinkForm } from '@/constants/app';
import type { OutLinkEditType, OutLinkSchema } from '@fastgpt/global/support/outLink/type.d';
import { useRequest } from '@/web/common/hooks/useRequest';
import { formatPrice } from '@fastgpt/global/support/wallet/bill/tools';
import { formatStorePrice2Read } from '@fastgpt/global/support/wallet/bill/tools';
import { OutLinkTypeEnum } from '@fastgpt/global/support/outLink/constant';
import { useTranslation } from 'next-i18next';
import { useToast } from '@/web/common/hooks/useToast';
@@ -113,7 +113,7 @@ const Share = ({ appId }: { appId: string }) => {
<Tr key={item._id}>
<Td>{item.name}</Td>
<Td>
{formatPrice(item.total)}
{formatStorePrice2Read(item.total)}
{feConfigs?.isPlus
? `${
item.limit && item.limit.credit > -1

View File

@@ -19,7 +19,6 @@ import { useSystemStore } from '@/web/common/system/useSystemStore';
import { appModules2Form, getDefaultAppForm } from '@fastgpt/global/core/app/utils';
import type { AppSimpleEditFormType } from '@fastgpt/global/core/app/type.d';
import { chatModelList, simpleModeTemplates } from '@/web/common/system/staticData';
import { formatPrice } from '@fastgpt/global/support/wallet/bill/tools';
import { chatNodeSystemPromptTip, welcomeTextTip } from '@fastgpt/global/core/module/template/tip';
import type { ModuleItemType } from '@fastgpt/global/core/module/type';
import { useRequest } from '@/web/common/hooks/useRequest';
@@ -51,6 +50,7 @@ import VariableEdit from '@/components/core/module/Flow/components/modules/Varia
import { ModuleInputKeyEnum } from '@fastgpt/global/core/module/constants';
import PromptTextarea from '@/components/common/Textarea/PromptTextarea/index';
import { DatasetSearchModeMap } from '@fastgpt/global/core/dataset/constant';
import SelectAiModel from '@/components/Select/SelectAiModel';
const InfoModal = dynamic(() => import('../InfoModal'));
const DatasetSelectModal = dynamic(() => import('@/components/core/module/DatasetSelectModal'));
@@ -109,7 +109,7 @@ function ConfigForm({
const chatModelSelectList = useMemo(() => {
return chatModelList.map((item) => ({
value: item.model,
label: `${item.name} (${formatPrice(item.price, 1000)} 元/1k tokens)`
label: item.name
}));
}, [refresh]);
@@ -278,7 +278,7 @@ function ConfigForm({
<Flex alignItems={'center'} mt={5}>
<Box {...LabelStyles}>{t('core.ai.Model')}</Box>
<Box flex={'1 0 0'}>
<MySelect
<SelectAiModel
width={'100%'}
value={getValues(`aiSettings.model`)}
list={chatModelSelectList}
@@ -502,7 +502,28 @@ function ConfigForm({
)}
{isOpenDatasetParams && (
<DatasetParamsModal
{...getValues('dataset')}
// {...getValues('dataset')}
searchMode={getValues('dataset.searchMode')}
searchEmptyText={
selectSimpleTemplate?.systemForm?.dataset?.searchEmptyText
? getValues('dataset.searchEmptyText')
: undefined
}
limit={
selectSimpleTemplate?.systemForm?.dataset?.limit
? getValues('dataset.limit')
: undefined
}
similarity={
selectSimpleTemplate?.systemForm?.dataset?.similarity
? getValues('dataset.similarity')
: undefined
}
usingReRank={
selectSimpleTemplate?.systemForm?.dataset?.usingReRank
? getValues('dataset.usingReRank')
: undefined
}
maxTokens={tokenLimit}
onClose={onCloseKbParams}
onSuccess={(e) => {

View File

@@ -17,7 +17,7 @@ import { serviceSideProps } from '@/web/common/utils/i18n';
import { useAppStore } from '@/web/core/app/store/useAppStore';
import Head from 'next/head';
const AdEdit = dynamic(() => import('./components/AdEdit'), {
const FlowEdit = dynamic(() => import('./components/FlowEdit'), {
loading: () => <Loading />
});
const OutLink = dynamic(() => import('./components/OutLink'), {});
@@ -173,7 +173,7 @@ const AppDetail = ({ currentTab }: { currentTab: `${TabEnum}` }) => {
<Box flex={'1 0 0'} h={[0, '100%']} overflow={['overlay', '']}>
{currentTab === TabEnum.simpleEdit && <SimpleEdit appId={appId} />}
{currentTab === TabEnum.adEdit && appDetail && (
<AdEdit app={appDetail} onClose={() => setCurrentTab(TabEnum.simpleEdit)} />
<FlowEdit app={appDetail} onClose={() => setCurrentTab(TabEnum.simpleEdit)} />
)}
{currentTab === TabEnum.logs && <Logs appId={appId} />}
{currentTab === TabEnum.outLink && <OutLink appId={appId} />}

View File

@@ -1,4 +1,4 @@
import React, { useState } from 'react';
import React from 'react';
import {
Box,
Flex,
@@ -8,10 +8,10 @@ import {
NumberInputStepper,
NumberIncrementStepper,
NumberDecrementStepper,
Input
Input,
Grid
} from '@chakra-ui/react';
import { useConfirm } from '@/web/common/hooks/useConfirm';
import { formatPrice } from '@fastgpt/global/support/wallet/bill/tools';
import MyTooltip from '@/components/MyTooltip';
import { QuestionOutlineIcon } from '@chakra-ui/icons';
import { useDatasetStore } from '@/web/core/dataset/store/dataset';
@@ -25,7 +25,7 @@ const ChunkImport = () => {
const { t } = useTranslation();
const { datasetDetail } = useDatasetStore();
const vectorModel = datasetDetail.vectorModel;
const unitPrice = vectorModel?.price || 0.2;
const unitPrice = vectorModel?.inputPrice || 0.002;
const {
chunkLen,
@@ -33,6 +33,7 @@ const ChunkImport = () => {
setCustomSplitChar,
successChunks,
totalChunks,
totalTokens,
isUnselectedFile,
price,
onclickUpload,
@@ -108,21 +109,27 @@ const ChunkImport = () => {
/>
</Box>
</Box>
{/* price */}
<Flex mt={4} alignItems={'center'}>
<Box>
{t('core.dataset.import.Estimated Price')}
<MyTooltip
label={t('core.dataset.import.Estimated Price Tips', {
price: formatPrice(unitPrice, 1000)
})}
forceShow
>
<QuestionOutlineIcon ml={1} />
</MyTooltip>
</Box>
<Box ml={4}>{t('common.price.Amount', { amount: price, unit: '元' })}</Box>
</Flex>
<Grid mt={4} gridTemplateColumns={'1fr 1fr'} gridGap={2}>
<Flex alignItems={'center'}>
<Box>{t('core.dataset.import.Total tokens')}</Box>
<Box>{totalTokens}</Box>
</Flex>
{/* price */}
<Flex alignItems={'center'}>
<Box>
{t('core.dataset.import.Estimated Price')}
<MyTooltip
label={t('core.dataset.import.Embedding Estimated Price Tips', {
price: unitPrice
})}
forceShow
>
<QuestionOutlineIcon ml={1} />
</MyTooltip>
</Box>
<Box ml={4}>{t('common.price.Amount', { amount: price, unit: '元' })}</Box>
</Flex>
</Grid>
<Flex mt={3}>
{showRePreview && (
<Button variant={'whitePrimary'} mr={4} onClick={onReSplitChunks}>

View File

@@ -1,8 +1,11 @@
import React from 'react';
import { Box, Flex, Button } from '@chakra-ui/react';
import { Box, Flex, Button, Grid } from '@chakra-ui/react';
import { useConfirm } from '@/web/common/hooks/useConfirm';
import { useImportStore, SelectorContainer, PreviewFileOrChunk } from './Provider';
import { useTranslation } from 'next-i18next';
import { useDatasetStore } from '@/web/core/dataset/store/dataset';
import MyTooltip from '@/components/MyTooltip';
import { QuestionOutlineIcon } from '@chakra-ui/icons';
const fileExtension = '.csv';
const csvTemplate = `index,content
@@ -12,8 +15,19 @@ const csvTemplate = `index,content
const CsvImport = () => {
const { t } = useTranslation();
const { successChunks, totalChunks, isUnselectedFile, onclickUpload, uploading } =
useImportStore();
const {
successChunks,
totalChunks,
isUnselectedFile,
onclickUpload,
uploading,
totalTokens,
price
} = useImportStore();
const { datasetDetail } = useDatasetStore();
const vectorModel = datasetDetail.vectorModel;
const unitPrice = vectorModel?.inputPrice || 0.002;
const { openConfirm, ConfirmModal } = useConfirm({
content: t('core.dataset.import.Import Tip')
@@ -31,6 +45,27 @@ const CsvImport = () => {
}}
tip={t('dataset.import csv tip')}
>
<Grid mt={4} gridTemplateColumns={'1fr 1fr'} gridGap={2}>
<Flex alignItems={'center'}>
<Box>{t('core.dataset.import.Total tokens')}</Box>
<Box>{totalTokens}</Box>
</Flex>
{/* price */}
<Flex alignItems={'center'}>
<Box>
{t('core.dataset.import.Estimated Price')}
<MyTooltip
label={t('core.dataset.import.Embedding Estimated Price Tips', {
price: unitPrice
})}
forceShow
>
<QuestionOutlineIcon ml={1} />
</MyTooltip>
</Box>
<Box ml={4}>{t('common.price.Amount', { amount: price, unit: '元' })}</Box>
</Flex>
</Grid>
<Flex mt={3}>
<Button isDisabled={uploading} onClick={openConfirm(onclickUpload)}>
{uploading ? (

View File

@@ -44,32 +44,36 @@ const ImportData = ({
[ImportTypeEnum.chunk]: {
defaultChunkLen: vectorModel?.defaultToken || 500,
chunkOverlapRatio: 0.2,
unitPrice: vectorModel?.price || 0.2,
inputPrice: vectorModel?.inputPrice || 0,
outputPrice: 0,
mode: TrainingModeEnum.chunk,
collectionTrainingType: DatasetCollectionTrainingModeEnum.chunk
},
[ImportTypeEnum.qa]: {
defaultChunkLen: agentModel?.maxContext * 0.55 || 8000,
chunkOverlapRatio: 0,
unitPrice: agentModel?.price || 3,
inputPrice: agentModel?.inputPrice || 0,
outputPrice: agentModel?.outputPrice || 0,
mode: TrainingModeEnum.qa,
collectionTrainingType: DatasetCollectionTrainingModeEnum.qa
},
[ImportTypeEnum.csv]: {
defaultChunkLen: 0,
chunkOverlapRatio: 0,
unitPrice: vectorModel?.price || 0.2,
inputPrice: vectorModel?.inputPrice || 0,
outputPrice: 0,
mode: TrainingModeEnum.chunk,
collectionTrainingType: DatasetCollectionTrainingModeEnum.manual
}
};
return map[importType];
}, [
agentModel?.inputPrice,
agentModel?.maxContext,
agentModel?.price,
agentModel?.outputPrice,
importType,
vectorModel?.defaultToken,
vectorModel?.price
vectorModel?.inputPrice
]);
const TitleStyle: BoxProps = {

View File

@@ -11,7 +11,7 @@ import React, {
import FileSelect, { FileItemType, Props as FileSelectProps } from './FileSelect';
import { useRequest } from '@/web/common/hooks/useRequest';
import { postDatasetCollection } from '@/web/core/dataset/api';
import { formatPrice } from '@fastgpt/global/support/wallet/bill/tools';
import { formatModelPrice2Read } from '@fastgpt/global/support/wallet/bill/tools';
import { splitText2Chunks } from '@fastgpt/global/common/string/textSplitter';
import { hashStr } from '@fastgpt/global/common/string/tools';
import { useToast } from '@/web/common/hooks/useToast';
@@ -43,6 +43,7 @@ type useImportStoreType = {
setSuccessChunks: Dispatch<SetStateAction<number>>;
isUnselectedFile: boolean;
totalChunks: number;
totalTokens: number;
onclickUpload: (e?: { prompt?: string }) => void;
onReSplitChunks: () => void;
price: number;
@@ -68,6 +69,7 @@ const StateContext = createContext<useImportStoreType>({
isUnselectedFile: false,
totalChunks: 0,
totalTokens: 0,
onReSplitChunks: function (): void {
throw new Error('Function not implemented.');
},
@@ -100,7 +102,8 @@ export const useImportStore = () => useContext(StateContext);
const Provider = ({
datasetId,
parentId,
unitPrice,
inputPrice,
outputPrice,
mode,
collectionTrainingType,
vectorModel,
@@ -113,7 +116,8 @@ const Provider = ({
}: {
datasetId: string;
parentId: string;
unitPrice: number;
inputPrice: number;
outputPrice: number;
mode: `${TrainingModeEnum}`;
collectionTrainingType: `${DatasetCollectionTrainingModeEnum}`;
vectorModel: string;
@@ -140,9 +144,17 @@ const Provider = ({
[files]
);
const totalTokens = useMemo(() => files.reduce((sum, file) => sum + file.tokens, 0), [files]);
const price = useMemo(() => {
return formatPrice(files.reduce((sum, file) => sum + file.tokens, 0) * unitPrice);
}, [files, unitPrice]);
if (mode === TrainingModeEnum.qa) {
const inputTotal = totalTokens * inputPrice;
const outputTotal = totalTokens * 0.5 * outputPrice;
return formatModelPrice2Read(inputTotal + outputTotal);
}
return formatModelPrice2Read(totalTokens * inputPrice);
}, [inputPrice, mode, outputPrice, totalTokens]);
/* start upload data */
const { mutate: onclickUpload, isLoading: uploading } = useRequest({
@@ -249,6 +261,7 @@ const Provider = ({
setSuccessChunks,
isUnselectedFile,
totalChunks,
totalTokens,
price,
onReSplitChunks,
onclickUpload,

View File

@@ -1,7 +1,6 @@
import React, { useState } from 'react';
import { Box, Flex, Button, Textarea } from '@chakra-ui/react';
import { Box, Flex, Button, Textarea, Grid } from '@chakra-ui/react';
import { useConfirm } from '@/web/common/hooks/useConfirm';
import { formatPrice } from '@fastgpt/global/support/wallet/bill/tools';
import MyTooltip from '@/components/MyTooltip';
import { QuestionOutlineIcon } from '@chakra-ui/icons';
import { Prompt_AgentQA } from '@/global/core/prompt/agent';
@@ -15,11 +14,11 @@ const QAImport = () => {
const { t } = useTranslation();
const { datasetDetail } = useDatasetStore();
const agentModel = datasetDetail.agentModel;
const unitPrice = agentModel?.price || 3;
const {
successChunks,
totalChunks,
totalTokens,
isUnselectedFile,
price,
onclickUpload,
@@ -55,20 +54,28 @@ const QAImport = () => {
</Box>
</Box>
{/* price */}
<Flex py={5} alignItems={'center'}>
<Box>
{t('core.dataset.import.Estimated Price')}
<MyTooltip
label={t('core.dataset.import.Estimated Price Tips', {
price: formatPrice(unitPrice, 1000)
})}
forceShow
>
<QuestionOutlineIcon ml={1} />
</MyTooltip>
</Box>
<Box ml={4}>{t('common.price.Amount', { amount: price, unit: '元' })}</Box>
</Flex>
<Grid mt={4} gridTemplateColumns={'1fr 1fr'} gridGap={2}>
<Flex alignItems={'center'}>
<Box>{t('core.dataset.import.Total tokens')}</Box>
<Box>{totalTokens}</Box>
</Flex>
{/* price */}
<Flex alignItems={'center'}>
<Box>
{t('core.dataset.import.Estimated Price')}
<MyTooltip
label={t('core.dataset.import.QA Estimated Price Tips', {
inputPrice: agentModel?.inputPrice,
outputPrice: agentModel?.outputPrice
})}
forceShow
>
<QuestionOutlineIcon ml={1} />
</MyTooltip>
</Box>
<Box ml={4}>{t('common.price.Amount', { amount: price, unit: '元' })}</Box>
</Flex>
</Grid>
<Flex mt={3}>
{showRePreview && (
<Button variant={'whitePrimary'} mr={4} onClick={onReSplitChunks}>

View File

@@ -1,22 +1,12 @@
import React, { useEffect, useMemo, useState } from 'react';
import {
Box,
Textarea,
Button,
Flex,
useTheme,
Grid,
Progress,
Switch,
useDisclosure
} from '@chakra-ui/react';
import { Box, Textarea, Button, Flex, useTheme, Grid, useDisclosure } from '@chakra-ui/react';
import { useDatasetStore } from '@/web/core/dataset/store/dataset';
import { useSearchTestStore, SearchTestStoreItemType } from '@/web/core/dataset/store/searchTest';
import { getDatasetDataItemById, postSearchText } from '@/web/core/dataset/api';
import MyIcon from '@/components/Icon';
import { useRequest } from '@/web/common/hooks/useRequest';
import { formatTimeToChatTime } from '@/utils/tools';
import InputDataModal, { type InputDataType } from './InputDataModal';
import InputDataModal, { RawSourceText, type InputDataType } from './InputDataModal';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import { getErrText } from '@fastgpt/global/common/error/utils';
import { useToast } from '@/web/common/hooks/useToast';
@@ -45,6 +35,7 @@ const Test = ({ datasetId }: { datasetId: string }) => {
const [searchMode, setSearchMode] = useState<`${DatasetSearchModeEnum}`>(
DatasetSearchModeEnum.embedding
);
const [usingReRank, setUsingReRank] = useState(false);
const searchModeData = DatasetSearchModeMap[searchMode];
const {
@@ -59,7 +50,8 @@ const Test = ({ datasetId }: { datasetId: string }) => {
);
const { mutate, isLoading } = useRequest({
mutationFn: () => postSearchText({ datasetId, text: inputText.trim(), searchMode, limit: 30 }),
mutationFn: () =>
postSearchText({ datasetId, text: inputText.trim(), searchMode, usingReRank, limit: 20 }),
onSuccess(res: SearchTestResponse) {
if (!res || res.list.length === 0) {
return toast({
@@ -73,7 +65,8 @@ const Test = ({ datasetId }: { datasetId: string }) => {
text: inputText.trim(),
time: new Date(),
results: res.list,
duration: res.duration
duration: res.duration,
searchMode
};
pushDatasetTestItem(testItem);
setDatasetTestItem(testItem);
@@ -123,8 +116,8 @@ const Test = ({ datasetId }: { datasetId: string }) => {
variant={'unstyled'}
maxLength={datasetDetail.vectorModel.maxToken}
placeholder={t('core.dataset.test.Test Text Placeholder')}
value={inputText}
onChange={(e) => setInputText(e.target.value)}
defaultValue={inputText}
onBlur={(e) => setInputText(e.target.value)}
/>
<Flex alignItems={'center'} justifyContent={'flex-end'}>
<Box mx={3} color={'myGray.500'}>
@@ -142,8 +135,9 @@ const Test = ({ datasetId }: { datasetId: string }) => {
</Flex>
<Box mt={2}>
<Flex py={2} fontWeight={'bold'} borderBottom={theme.borders.sm}>
<Box w={'80px'}>{t('core.dataset.search.search mode')}</Box>
<Box flex={1}>{t('core.dataset.test.Test Text')}</Box>
<Box w={'80px'}>{t('common.Time')}</Box>
<Box w={'70px'}>{t('common.Time')}</Box>
<Box w={'14px'}></Box>
</Flex>
{testHistories.map((item) => (
@@ -159,12 +153,27 @@ const Test = ({ datasetId }: { datasetId: string }) => {
}
}}
cursor={'pointer'}
fontSize={'sm'}
onClick={() => setDatasetTestItem(item)}
>
<Box w={'80px'}>
{DatasetSearchModeMap[item.searchMode] ? (
<Flex alignItems={'center'}>
<MyIcon
name={DatasetSearchModeMap[item.searchMode].icon as any}
w={'12px'}
mr={'1px'}
/>
{t(DatasetSearchModeMap[item.searchMode].title)}
</Flex>
) : (
'-'
)}
</Box>
<Box flex={1} mr={2}>
{item.text}
</Box>
<Box w={'80px'}>{formatTimeToChatTime(item.time)}</Box>
<Box w={'70px'}>{formatTimeToChatTime(item.time)}</Box>
<MyTooltip label={t('core.dataset.test.delete test history')}>
<Box w={'14px'} h={'14px'}>
<MyIcon
@@ -232,7 +241,7 @@ const Test = ({ datasetId }: { datasetId: string }) => {
<Box
key={item.id}
pb={2}
borderRadius={'sm'}
borderRadius={'lg'}
border={theme.borders.base}
_notLast={{ mb: 2 }}
cursor={'pointer'}
@@ -267,12 +276,19 @@ const Test = ({ datasetId }: { datasetId: string }) => {
border={theme.borders.base}
px={2}
fontSize={'sm'}
mr={1}
mr={3}
borderRadius={'md'}
>
# {index + 1}
</Box>
<MyIcon name={'kbTest'} w={'14px'} />
<RawSourceText
fontWeight={'bold'}
color={'black'}
sourceName={item.sourceName}
sourceId={item.sourceId}
canView
/>
{/* <MyIcon name={'kbTest'} w={'14px'} />
<Progress
mx={2}
flex={'1 0 0'}
@@ -281,7 +297,7 @@ const Test = ({ datasetId }: { datasetId: string }) => {
borderRadius={'20px'}
colorScheme="gray"
/>
<Box>{item.score.toFixed(4)}</Box>
<Box>{item.score.toFixed(4)}</Box> */}
</Flex>
<Box px={2} fontSize={'xs'} color={'myGray.600'} wordBreak={'break-word'}>
<Box>{item.q}</Box>
@@ -335,9 +351,11 @@ const Test = ({ datasetId }: { datasetId: string }) => {
{isOpenSelectMode && (
<DatasetParamsModal
searchMode={searchMode}
usingReRank={usingReRank}
onClose={onCloseSelectMode}
onSuccess={(e) => {
setSearchMode(e.searchMode);
e.usingReRank !== undefined && setUsingReRank(e.usingReRank);
}}
/>
)}

View File

@@ -1,6 +1,6 @@
import React, { useCallback } from 'react';
import React, { useCallback, useMemo } from 'react';
import { useRouter } from 'next/router';
import { Box, Flex, IconButton, useTheme } from '@chakra-ui/react';
import { Box, Flex, IconButton, useTheme, Progress } from '@chakra-ui/react';
import { useToast } from '@/web/common/hooks/useToast';
import { useQuery } from '@tanstack/react-query';
import { getErrText } from '@fastgpt/global/common/error/utils';
@@ -92,9 +92,55 @@ const Detail = ({ datasetId, currentTab }: { datasetId: string; currentTab: `${T
}
});
const { data: trainingQueueLen = 0 } = useQuery(['getTrainingQueueLen'], getTrainingQueueLen, {
refetchInterval: 10000
});
const { data: { vectorTrainingCount = 0, agentTrainingCount = 0 } = {} } = useQuery(
['getTrainingQueueLen'],
() =>
getTrainingQueueLen({
vectorModel: datasetDetail.vectorModel.model,
agentModel: datasetDetail.agentModel.model
}),
{
refetchInterval: 10000
}
);
const { vectorTrainingMap, agentTrainingMap } = useMemo(() => {
const vectorTrainingMap = (() => {
if (vectorTrainingCount < 1000)
return {
colorSchema: 'green',
tip: t('core.dataset.training.Leisure')
};
if (vectorTrainingCount < 10000)
return {
colorSchema: 'yellow',
tip: t('core.dataset.training.Waiting')
};
return {
colorSchema: 'red',
tip: t('core.dataset.training.Full')
};
})();
const agentTrainingMap = (() => {
if (agentTrainingCount < 100)
return {
colorSchema: 'green',
tip: t('core.dataset.training.Leisure')
};
if (agentTrainingCount < 1000)
return {
colorSchema: 'yellow',
tip: t('core.dataset.training.Waiting')
};
return {
colorSchema: 'red',
tip: t('core.dataset.training.Full')
};
})();
return {
vectorTrainingMap,
agentTrainingMap
};
}, [agentTrainingCount, t, vectorTrainingCount]);
return (
<>
@@ -155,19 +201,32 @@ const Detail = ({ datasetId, currentTab }: { datasetId: string; currentTab: `${T
setCurrentTab(e);
}}
/>
<Box textAlign={'center'}>
<Flex justifyContent={'center'} alignItems={'center'}>
<MyIcon mr={1} name="overviewLight" w={'16px'} color={'green.500'} />
<Box>{t('dataset.System Data Queue')}</Box>
<MyTooltip
label={t('dataset.Queue Desc', { title: feConfigs?.systemTitle })}
placement={'top'}
>
<QuestionOutlineIcon ml={1} w={'16px'} />
</MyTooltip>
</Flex>
<Box mt={1} fontWeight={'bold'}>
{trainingQueueLen}
<Box>
<Box mb={3}>
<Box fontSize={'sm'}>
{t('core.dataset.training.Agent queue')}({agentTrainingMap.tip})
</Box>
<Progress
value={100}
size={'xs'}
colorScheme={agentTrainingMap.colorSchema}
borderRadius={'10px'}
isAnimated
hasStripe
/>
</Box>
<Box mb={3}>
<Box fontSize={'sm'}>
{t('core.dataset.training.Vector queue')}({vectorTrainingMap.tip})
</Box>
<Progress
value={100}
size={'xs'}
colorScheme={vectorTrainingMap.colorSchema}
borderRadius={'10px'}
isAnimated
hasStripe
/>
</Box>
</Box>
<Flex

View File

@@ -1,5 +1,5 @@
import React, { useState, Dispatch, useCallback } from 'react';
import { FormControl, Box, Input, Button, FormErrorMessage, Flex } from '@chakra-ui/react';
import { FormControl, Box, Input, Button } from '@chakra-ui/react';
import { useForm } from 'react-hook-form';
import { PageTypeEnum } from '@/constants/user';
import { postFindPassword } from '@/web/support/user/api';
@@ -76,7 +76,14 @@ const RegisterForm = ({ setPageType, loginSuccess }: Props) => {
<Box fontWeight={'bold'} fontSize={'2xl'} textAlign={'center'}>
{feConfigs?.systemTitle}
</Box>
<Box mt={'42px'}>
<Box
mt={'42px'}
onKeyDown={(e) => {
if (e.keyCode === 13 && !e.shiftKey && !requesting) {
handleSubmit(onclickFindPassword)();
}
}}
>
<FormControl isInvalid={!!errors.username}>
<Input
bg={'myGray.50'}

View File

@@ -124,7 +124,14 @@ const LoginForm = ({ setPageType, loginSuccess }: Props) => {
{feConfigs?.systemTitle}
</Box>
</Flex>
<Box mt={'42px'}>
<Box
mt={'42px'}
onKeyDown={(e) => {
if (e.keyCode === 13 && !e.shiftKey && !requesting) {
handleSubmit(onclickLogin)();
}
}}
>
<FormControl isInvalid={!!errors.username}>
<Input
bg={'myGray.50'}
@@ -197,7 +204,7 @@ const LoginForm = ({ setPageType, loginSuccess }: Props) => {
</>
)}
{/* oauth */}
{feConfigs?.show_register && (
{feConfigs?.show_register && oAuthList.length > 0 && (
<>
<Box mt={'80px'} position={'relative'}>
<Divider />

View File

@@ -90,7 +90,14 @@ const RegisterForm = ({ setPageType, loginSuccess }: Props) => {
<Box fontWeight={'bold'} fontSize={'2xl'} textAlign={'center'}>
{feConfigs?.systemTitle}
</Box>
<Box mt={'42px'}>
<Box
mt={'42px'}
onKeyDown={(e) => {
if (e.keyCode === 13 && !e.shiftKey && !requesting) {
handleSubmit(onclickRegister)();
}
}}
>
<FormControl isInvalid={!!errors.username}>
<Input
bg={'myGray.50'}

View File

@@ -85,7 +85,7 @@ const Render = ({ pluginId }: Props) => {
);
};
export default function AdEdit(props: any) {
export default function FlowEdit(props: any) {
return (
<FlowProvider mode={'plugin'} filterAppIds={[]}>
<Render {...props} />

View File

@@ -0,0 +1,13 @@
import { cut } from '@node-rs/jieba';
import { stopWords } from '@fastgpt/global/common/string/jieba';
export function jiebaSplit({ text }: { text: string }) {
const tokens = cut(text, true);
return (
tokens
.map((item) => item.replace(/[^\u4e00-\u9fa5a-zA-Z0-9\s]/g, '').trim())
.filter((item) => item && !stopWords.has(item))
.join(' ') || ''
);
}

View File

@@ -24,13 +24,24 @@ export function getAudioSpeechModel(model?: string) {
);
}
export function getWhisperModel(model?: string) {
return global.whisperModel;
}
export function getReRankModel(model?: string) {
return global.reRankModels.find((item) => item.model === model);
}
export enum ModelTypeEnum {
chat = 'chat',
qa = 'qa',
cq = 'cq',
extract = 'extract',
qg = 'qg',
vector = 'vector'
vector = 'vector',
audioSpeech = 'audioSpeech',
whisper = 'whisper',
rerank = 'rerank'
}
export const getModelMap = {
[ModelTypeEnum.chat]: getChatModel,
@@ -38,5 +49,8 @@ export const getModelMap = {
[ModelTypeEnum.cq]: getCQModel,
[ModelTypeEnum.extract]: getExtractModel,
[ModelTypeEnum.qg]: getQGModel,
[ModelTypeEnum.vector]: getVectorModel
[ModelTypeEnum.vector]: getVectorModel,
[ModelTypeEnum.audioSpeech]: getAudioSpeechModel,
[ModelTypeEnum.whisper]: getWhisperModel,
[ModelTypeEnum.rerank]: getReRankModel
};

View File

@@ -20,8 +20,14 @@ export function reRankRecall({ query, inputs }: PostReRankProps) {
Authorization: `Bearer ${model.requestAuth}`
}
}
).then((data) => {
console.log('rerank time:', Date.now() - start);
return data;
});
)
.then((data) => {
console.log('rerank time:', Date.now() - start);
return data;
})
.catch((err) => {
console.log(err);
return [];
});
}

View File

@@ -1,73 +0,0 @@
import { getAIApi } from '@fastgpt/service/core/ai/config';
export type GetVectorProps = {
model: string;
input: string | string[];
};
// text to vector
export async function getVectorsByText({
model = 'text-embedding-ada-002',
input
}: GetVectorProps) {
try {
if (typeof input === 'string' && !input) {
return Promise.reject({
code: 500,
message: 'input is empty'
});
} else if (Array.isArray(input)) {
for (let i = 0; i < input.length; i++) {
if (!input[i]) {
return Promise.reject({
code: 500,
message: 'input array is empty'
});
}
}
}
// 获取 chatAPI
const ai = getAIApi();
// 把输入的内容转成向量
const result = await ai.embeddings
.create({
model,
input
})
.then(async (res) => {
if (!res.data) {
return Promise.reject('Embedding API 404');
}
if (!res?.data?.[0]?.embedding) {
console.log(res?.data);
// @ts-ignore
return Promise.reject(res.data?.err?.message || 'Embedding API Error');
}
return {
tokenLen: res.usage.total_tokens || 0,
vectors: await Promise.all(res.data.map((item) => unityDimensional(item.embedding)))
};
});
return result;
} catch (error) {
console.log(`Embedding Error`, error);
return Promise.reject(error);
}
}
function unityDimensional(vector: number[]) {
if (vector.length > 1536) {
console.log(`当前向量维度为: ${vector.length}, 向量维度不能超过 1536, 已自动截取前 1536 维度`);
return vector.slice(0, 1536);
}
let resultVector = vector;
const vectorLen = vector.length;
const zeroVector = new Array(1536 - vectorLen).fill(0);
return resultVector.concat(zeroVector);
}

View File

@@ -4,12 +4,30 @@ import {
PatchIndexesProps,
UpdateDatasetDataProps
} from '@fastgpt/global/core/dataset/controller';
import { deletePgDataById } from '@fastgpt/service/core/dataset/data/pg';
import { insertData2Pg, updatePgDataById } from './pg';
import {
insertDatasetDataVector,
recallFromVectorStore,
updateDatasetDataVector
} from '@fastgpt/service/common/vectorStore/controller';
import { Types } from 'mongoose';
import { DatasetDataIndexTypeEnum } from '@fastgpt/global/core/dataset/constant';
import {
DatasetDataIndexTypeEnum,
DatasetSearchModeEnum,
DatasetSearchModeMap,
SearchScoreTypeEnum
} from '@fastgpt/global/core/dataset/constant';
import { getDefaultIndex } from '@fastgpt/global/core/dataset/utils';
import { jiebaSplit } from '../utils';
import { jiebaSplit } from '@/service/common/string/jieba';
import { deleteDatasetDataVector } from '@fastgpt/service/common/vectorStore/controller';
import { getVectorsByText } from '@fastgpt/service/core/ai/embedding';
import { MongoDatasetCollection } from '@fastgpt/service/core/dataset/collection/schema';
import {
DatasetDataSchemaType,
SearchDataResponseItemType
} from '@fastgpt/global/core/dataset/type';
import { reRankRecall } from '../../ai/rerank';
import { countPromptTokens } from '@fastgpt/global/common/string/tiktoken';
import { hashStr } from '@fastgpt/global/common/string/tools';
/* insert data.
* 1. create data id
@@ -50,17 +68,17 @@ export async function insertData2Dataset({
}))
: [getDefaultIndex({ q, a })];
// insert to pg
// insert to vector store
const result = await Promise.all(
indexes.map((item) =>
insertData2Pg({
mongoDataId: String(id),
input: item.text,
insertDatasetDataVector({
query: item.text,
model,
teamId,
tmbId,
datasetId,
collectionId
collectionId,
dataId: String(id)
})
)
);
@@ -84,7 +102,7 @@ export async function insertData2Dataset({
return {
insertId: _id,
tokenLen: result.reduce((acc, cur) => acc + cur.tokenLen, 0)
tokens: result.reduce((acc, cur) => acc + cur.tokens, 0)
};
}
@@ -172,35 +190,40 @@ export async function updateData2Dataset({
const result = await Promise.all(
patchResult.map(async (item) => {
if (item.type === 'create') {
const result = await insertData2Pg({
mongoDataId: dataId,
input: item.index.text,
const result = await insertDatasetDataVector({
query: item.index.text,
model,
teamId: mongoData.teamId,
tmbId: mongoData.tmbId,
datasetId: mongoData.datasetId,
collectionId: mongoData.collectionId
collectionId: mongoData.collectionId,
dataId
});
item.index.dataId = result.insertId;
return result;
}
if (item.type === 'update' && item.index.dataId) {
return updatePgDataById({
return updateDatasetDataVector({
id: item.index.dataId,
input: item.index.text,
query: item.index.text,
model
});
}
if (item.type === 'delete' && item.index.dataId) {
return deletePgDataById(['id', item.index.dataId]);
await deleteDatasetDataVector({
id: item.index.dataId
});
return {
tokens: 0
};
}
return {
tokenLen: 0
tokens: 0
};
})
);
const tokenLen = result.reduce((acc, cur) => acc + cur.tokenLen, 0);
const tokens = result.reduce((acc, cur) => acc + cur.tokens, 0);
// update mongo
mongoData.q = q || mongoData.q;
@@ -211,6 +234,457 @@ export async function updateData2Dataset({
await mongoData.save();
return {
tokenLen
tokens
};
}
export async function searchDatasetData(props: {
model: string;
similarity?: number; // min distance
limit: number; // max Token limit
datasetIds: string[];
searchMode?: `${DatasetSearchModeEnum}`;
usingReRank?: boolean;
rawQuery: string;
queries: string[];
}) {
let {
rawQuery,
queries,
model,
similarity = 0,
limit: maxTokens,
searchMode = DatasetSearchModeEnum.embedding,
usingReRank = false,
datasetIds = []
} = props;
/* init params */
searchMode = DatasetSearchModeMap[searchMode] ? searchMode : DatasetSearchModeEnum.embedding;
usingReRank = usingReRank && global.reRankModels.length > 0;
// Compatible with topk limit
if (maxTokens < 50) {
maxTokens = 1500;
}
let set = new Set<string>();
let usingSimilarityFilter = false;
/* function */
const countRecallLimit = () => {
const oneChunkToken = 50;
const estimatedLen = Math.max(20, Math.ceil(maxTokens / oneChunkToken));
// Increase search range, reduce hnsw loss. 20 ~ 100
if (searchMode === DatasetSearchModeEnum.embedding) {
return {
embeddingLimit: Math.min(estimatedLen, 100),
fullTextLimit: 0
};
}
// 50 < 2*limit < value < 100
if (searchMode === DatasetSearchModeEnum.fullTextRecall) {
return {
embeddingLimit: 0,
fullTextLimit: Math.min(estimatedLen, 50)
};
}
// mixed
// 50 < 2*limit < embedding < 80
// 20 < limit < fullTextLimit < 40
return {
embeddingLimit: Math.min(estimatedLen, 80),
fullTextLimit: Math.min(estimatedLen, 40)
};
};
const embeddingRecall = async ({ query, limit }: { query: string; limit: number }) => {
const { vectors, tokens } = await getVectorsByText({
model,
input: [query]
});
const { results } = await recallFromVectorStore({
vectors,
limit,
datasetIds
});
// get q and a
const [collections, dataList] = await Promise.all([
MongoDatasetCollection.find(
{
_id: { $in: results.map((item) => item.collectionId) }
},
'name fileId rawLink'
).lean(),
MongoDatasetData.find(
{
_id: { $in: results.map((item) => item.dataId?.trim()) }
},
'datasetId collectionId q a chunkIndex indexes'
).lean()
]);
const formatResult = results
.map((item, index) => {
const collection = collections.find(
(collection) => String(collection._id) === item.collectionId
);
const data = dataList.find((data) => String(data._id) === item.dataId);
// if collection or data UnExist, the relational mongo data already deleted
if (!collection || !data) return null;
const result: SearchDataResponseItemType = {
id: String(data._id),
q: data.q,
a: data.a,
chunkIndex: data.chunkIndex,
indexes: data.indexes,
datasetId: String(data.datasetId),
collectionId: String(data.collectionId),
sourceName: collection.name || '',
sourceId: collection?.fileId || collection?.rawLink,
score: [{ type: SearchScoreTypeEnum.embedding, value: item.score, index }]
};
return result;
})
.filter((item) => item !== null) as SearchDataResponseItemType[];
return {
embeddingRecallResults: formatResult,
tokens
};
};
const fullTextRecall = async ({
query,
limit
}: {
query: string;
limit: number;
}): Promise<{
fullTextRecallResults: SearchDataResponseItemType[];
tokenLen: number;
}> => {
if (limit === 0) {
return {
fullTextRecallResults: [],
tokenLen: 0
};
}
let searchResults = (
await Promise.all(
datasetIds.map((id) =>
MongoDatasetData.find(
{
datasetId: id,
$text: { $search: jiebaSplit({ text: query }) }
},
{
score: { $meta: 'textScore' },
_id: 1,
datasetId: 1,
collectionId: 1,
q: 1,
a: 1,
indexes: 1,
chunkIndex: 1
}
)
.sort({ score: { $meta: 'textScore' } })
.limit(limit)
.lean()
)
)
).flat() as (DatasetDataSchemaType & { score: number })[];
// resort
searchResults.sort((a, b) => b.score - a.score);
searchResults.slice(0, limit);
const collections = await MongoDatasetCollection.find(
{
_id: { $in: searchResults.map((item) => item.collectionId) }
},
'_id name fileId rawLink'
);
return {
fullTextRecallResults: searchResults.map((item, index) => {
const collection = collections.find((col) => String(col._id) === String(item.collectionId));
return {
id: String(item._id),
datasetId: String(item.datasetId),
collectionId: String(item.collectionId),
sourceName: collection?.name || '',
sourceId: collection?.fileId || collection?.rawLink,
q: item.q,
a: item.a,
chunkIndex: item.chunkIndex,
indexes: item.indexes,
score: [{ type: SearchScoreTypeEnum.fullText, value: item.score, index }]
};
}),
tokenLen: 0
};
};
const reRankSearchResult = async ({
data,
query
}: {
data: SearchDataResponseItemType[];
query: string;
}): Promise<SearchDataResponseItemType[]> => {
try {
const results = await reRankRecall({
query,
inputs: data.map((item) => ({
id: item.id,
text: `${item.q}\n${item.a}`
}))
});
if (!Array.isArray(results)) return [];
// add new score to data
const mergeResult = results
.map((item, index) => {
const target = data.find((dataItem) => dataItem.id === item.id);
if (!target) return null;
const score = item.score || 0;
return {
...target,
score: [{ type: SearchScoreTypeEnum.reRank, value: score, index }]
};
})
.filter(Boolean) as SearchDataResponseItemType[];
return mergeResult;
} catch (error) {
return [];
}
};
const filterResultsByMaxTokens = (list: SearchDataResponseItemType[], maxTokens: number) => {
const results: SearchDataResponseItemType[] = [];
let totalTokens = 0;
for (let i = 0; i < list.length; i++) {
const item = list[i];
totalTokens += countPromptTokens(item.q + item.a);
if (totalTokens > maxTokens + 500) {
break;
}
results.push(item);
if (totalTokens > maxTokens) {
break;
}
}
return results.length === 0 ? list.slice(0, 1) : results;
};
const multiQueryRecall = async ({
embeddingLimit,
fullTextLimit
}: {
embeddingLimit: number;
fullTextLimit: number;
}) => {
// In a group n recall, as long as one of the data appears minAmount of times, it is retained
const getIntersection = (resultList: SearchDataResponseItemType[][], minAmount = 1) => {
minAmount = Math.min(resultList.length, minAmount);
const map: Record<
string,
{
amount: number;
data: SearchDataResponseItemType;
}
> = {};
for (const list of resultList) {
for (const item of list) {
map[item.id] = map[item.id]
? {
amount: map[item.id].amount + 1,
data: item
}
: {
amount: 1,
data: item
};
}
}
return Object.values(map)
.filter((item) => item.amount >= minAmount)
.map((item) => item.data);
};
// multi query recall
const embeddingRecallResList: SearchDataResponseItemType[][] = [];
const fullTextRecallResList: SearchDataResponseItemType[][] = [];
let embTokens = 0;
for await (const query of queries) {
const [{ tokens, embeddingRecallResults }, { fullTextRecallResults }] = await Promise.all([
embeddingRecall({
query,
limit: embeddingLimit
}),
fullTextRecall({
query,
limit: fullTextLimit
})
]);
embTokens += tokens;
embeddingRecallResList.push(embeddingRecallResults);
fullTextRecallResList.push(fullTextRecallResults);
}
return {
tokens: embTokens,
embeddingRecallResults: embeddingRecallResList[0],
fullTextRecallResults: fullTextRecallResList[0]
};
};
const rrfConcat = (
arr: { k: number; list: SearchDataResponseItemType[] }[]
): SearchDataResponseItemType[] => {
const map = new Map<string, SearchDataResponseItemType & { rrfScore: number }>();
// rrf
arr.forEach((item) => {
const k = item.k;
item.list.forEach((data, index) => {
const rank = index + 1;
const score = 1 / (k + rank);
const record = map.get(data.id);
if (record) {
// 合并两个score,有相同type的score,取最大值
const concatScore = [...record.score];
for (const dataItem of data.score) {
const sameScore = concatScore.find((item) => item.type === dataItem.type);
if (sameScore) {
sameScore.value = Math.max(sameScore.value, dataItem.value);
} else {
concatScore.push(dataItem);
}
}
map.set(data.id, {
...record,
score: concatScore,
rrfScore: record.rrfScore + score
});
} else {
map.set(data.id, {
...data,
rrfScore: score
});
}
});
});
// sort
const mapArray = Array.from(map.values());
const results = mapArray.sort((a, b) => b.rrfScore - a.rrfScore);
return results.map((item, index) => {
item.score.push({
type: SearchScoreTypeEnum.rrf,
value: item.rrfScore,
index
});
// @ts-ignore
delete item.rrfScore;
return item;
});
};
/* main step */
// count limit
const { embeddingLimit, fullTextLimit } = countRecallLimit();
// recall
const { embeddingRecallResults, fullTextRecallResults, tokens } = await multiQueryRecall({
embeddingLimit,
fullTextLimit
});
// ReRank results
const reRankResults = await (async () => {
if (!usingReRank) return [];
set = new Set<string>(embeddingRecallResults.map((item) => item.id));
const concatRecallResults = embeddingRecallResults.concat(
fullTextRecallResults.filter((item) => !set.has(item.id))
);
// remove same q and a data
set = new Set<string>();
const filterSameDataResults = concatRecallResults.filter((item) => {
// 删除所有的标点符号与空格等,只对文本进行比较
const str = hashStr(`${item.q}${item.a}`.replace(/[^\p{L}\p{N}]/gu, ''));
if (set.has(str)) return false;
set.add(str);
return true;
});
return reRankSearchResult({
query: rawQuery,
data: filterSameDataResults
});
})();
// embedding recall and fullText recall rrf concat
const rrfConcatResults = rrfConcat([
{ k: 60, list: embeddingRecallResults },
{ k: 60, list: fullTextRecallResults },
{ k: 60, list: reRankResults }
]);
// remove same q and a data
set = new Set<string>();
const filterSameDataResults = rrfConcatResults.filter((item) => {
// 删除所有的标点符号与空格等,只对文本进行比较
const str = hashStr(`${item.q}${item.a}`.replace(/[^\p{L}\p{N}]/gu, ''));
if (set.has(str)) return false;
set.add(str);
return true;
});
// score filter
const scoreFilter = (() => {
if (usingReRank) {
usingSimilarityFilter = true;
return filterSameDataResults.filter((item) => {
const reRankScore = item.score.find((item) => item.type === SearchScoreTypeEnum.reRank);
if (reRankScore && reRankScore.value < similarity) return false;
return true;
});
}
if (searchMode === DatasetSearchModeEnum.embedding) {
return filterSameDataResults.filter((item) => {
usingSimilarityFilter = true;
const embeddingScore = item.score.find(
(item) => item.type === SearchScoreTypeEnum.embedding
);
if (embeddingScore && embeddingScore.value < similarity) return false;
return true;
});
}
return filterSameDataResults;
})();
return {
searchRes: filterResultsByMaxTokens(scoreFilter, maxTokens),
tokens,
usingSimilarityFilter
};
}

View File

@@ -1,478 +0,0 @@
import { DatasetSearchModeEnum, PgDatasetTableName } from '@fastgpt/global/core/dataset/constant';
import type {
DatasetDataSchemaType,
SearchDataResponseItemType
} from '@fastgpt/global/core/dataset/type.d';
import { PgClient } from '@fastgpt/service/common/pg';
import { getVectorsByText } from '@/service/core/ai/vector';
import { delay } from '@fastgpt/global/common/system/utils';
import { PgSearchRawType } from '@fastgpt/global/core/dataset/api';
import { MongoDatasetCollection } from '@fastgpt/service/core/dataset/collection/schema';
import { MongoDatasetData } from '@fastgpt/service/core/dataset/data/schema';
import { jiebaSplit } from '../utils';
import { reRankRecall } from '../../ai/rerank';
import { countPromptTokens } from '@fastgpt/global/common/string/tiktoken';
import { hashStr } from '@fastgpt/global/common/string/tools';
export async function insertData2Pg(props: {
mongoDataId: string;
input: string;
model: string;
teamId: string;
tmbId: string;
datasetId: string;
collectionId: string;
retry?: number;
}): Promise<{ insertId: string; vectors: number[][]; tokenLen: number }> {
const { mongoDataId, input, model, teamId, tmbId, datasetId, collectionId, retry = 3 } = props;
try {
// get vector
const { vectors, tokenLen } = await getVectorsByText({
model,
input: [input]
});
const { rows } = await PgClient.insert(PgDatasetTableName, {
values: [
[
{ key: 'vector', value: `[${vectors[0]}]` },
{ key: 'team_id', value: String(teamId) },
{ key: 'tmb_id', value: String(tmbId) },
{ key: 'dataset_id', value: datasetId },
{ key: 'collection_id', value: collectionId },
{ key: 'data_id', value: String(mongoDataId) }
]
]
});
return {
insertId: rows[0].id,
vectors,
tokenLen
};
} catch (error) {
if (retry <= 0) {
return Promise.reject(error);
}
await delay(500);
return insertData2Pg({
...props,
retry: retry - 1
});
}
}
export async function updatePgDataById({
id,
input,
model
}: {
id: string;
input: string;
model: string;
}) {
let retry = 2;
async function updatePg(): Promise<{ vectors: number[][]; tokenLen: number }> {
try {
// get vector
const { vectors, tokenLen } = await getVectorsByText({
model,
input: [input]
});
// update pg
await PgClient.update(PgDatasetTableName, {
where: [['id', id]],
values: [{ key: 'vector', value: `[${vectors[0]}]` }]
});
return {
vectors,
tokenLen
};
} catch (error) {
if (--retry < 0) {
return Promise.reject(error);
}
await delay(500);
return updatePg();
}
}
return updatePg();
}
// ------------------ search start ------------------
type SearchProps = {
model: string;
similarity?: number; // min distance
limit: number; // max Token limit
datasetIds: string[];
searchMode?: `${DatasetSearchModeEnum}`;
};
export async function searchDatasetData(
props: SearchProps & { rawQuery: string; queries: string[] }
) {
let {
rawQuery,
queries,
model,
similarity = 0,
limit: maxTokens,
searchMode = DatasetSearchModeEnum.embedding,
datasetIds = []
} = props;
/* init params */
searchMode = global.systemEnv?.pluginBaseUrl ? searchMode : DatasetSearchModeEnum.embedding;
// Compatible with topk limit
if (maxTokens < 50) {
maxTokens = 1500;
}
const rerank =
global.reRankModels?.[0] &&
(searchMode === DatasetSearchModeEnum.embeddingReRank ||
searchMode === DatasetSearchModeEnum.embFullTextReRank);
let set = new Set<string>();
/* function */
const countRecallLimit = () => {
const oneChunkToken = 50;
const estimatedLen = Math.max(20, Math.ceil(maxTokens / oneChunkToken));
// Increase search range, reduce hnsw loss. 20 ~ 100
if (searchMode === DatasetSearchModeEnum.embedding) {
return {
embeddingLimit: Math.min(estimatedLen, 100),
fullTextLimit: 0
};
}
// 50 < 2*limit < value < 100
if (searchMode === DatasetSearchModeEnum.embeddingReRank) {
return {
embeddingLimit: Math.min(100, Math.max(50, estimatedLen * 2)),
fullTextLimit: 0
};
}
// 50 < 2*limit < embedding < 80
// 20 < limit < fullTextLimit < 40
return {
embeddingLimit: Math.min(80, Math.max(50, estimatedLen * 2)),
fullTextLimit: Math.min(40, Math.max(20, estimatedLen))
};
};
const embeddingRecall = async ({ query, limit }: { query: string; limit: number }) => {
const { vectors, tokenLen } = await getVectorsByText({
model,
input: [query]
});
const results: any = await PgClient.query(
`BEGIN;
SET LOCAL hnsw.ef_search = ${global.systemEnv.pgHNSWEfSearch || 100};
select id, collection_id, data_id, (vector <#> '[${vectors[0]}]') * -1 AS score
from ${PgDatasetTableName}
where dataset_id IN (${datasetIds.map((id) => `'${String(id)}'`).join(',')})
${rerank ? '' : `AND vector <#> '[${vectors[0]}]' < -${similarity}`}
order by score desc limit ${limit};
COMMIT;`
);
const rows = results?.[2]?.rows as PgSearchRawType[];
// concat same data_id
const filterRows: PgSearchRawType[] = [];
let set = new Set<string>();
for (const row of rows) {
if (!set.has(row.data_id)) {
filterRows.push(row);
set.add(row.data_id);
}
}
// get q and a
const [collections, dataList] = await Promise.all([
MongoDatasetCollection.find(
{
_id: { $in: filterRows.map((item) => item.collection_id) }
},
'name fileId rawLink'
).lean(),
MongoDatasetData.find(
{
_id: { $in: filterRows.map((item) => item.data_id?.trim()) }
},
'datasetId collectionId q a chunkIndex indexes'
).lean()
]);
const formatResult = filterRows
.map((item) => {
const collection = collections.find(
(collection) => String(collection._id) === item.collection_id
);
const data = dataList.find((data) => String(data._id) === item.data_id);
// if collection or data UnExist, the relational mongo data already deleted
if (!collection || !data) return null;
return {
id: String(data._id),
q: data.q,
a: data.a,
chunkIndex: data.chunkIndex,
indexes: data.indexes,
datasetId: String(data.datasetId),
collectionId: String(data.collectionId),
sourceName: collection.name || '',
sourceId: collection?.fileId || collection?.rawLink,
score: item.score
};
})
.filter((item) => item !== null) as SearchDataResponseItemType[];
return {
embeddingRecallResults: formatResult,
tokenLen
};
};
const fullTextRecall = async ({
query,
limit
}: {
query: string;
limit: number;
}): Promise<{
fullTextRecallResults: SearchDataResponseItemType[];
tokenLen: number;
}> => {
if (limit === 0) {
return {
fullTextRecallResults: [],
tokenLen: 0
};
}
let searchResults = (
await Promise.all(
datasetIds.map((id) =>
MongoDatasetData.find(
{
datasetId: id,
$text: { $search: jiebaSplit({ text: query }) }
},
{
score: { $meta: 'textScore' },
_id: 1,
datasetId: 1,
collectionId: 1,
q: 1,
a: 1,
indexes: 1,
chunkIndex: 1
}
)
.sort({ score: { $meta: 'textScore' } })
.limit(limit)
.lean()
)
)
).flat() as (DatasetDataSchemaType & { score: number })[];
// resort
searchResults.sort((a, b) => b.score - a.score);
searchResults.slice(0, limit);
const collections = await MongoDatasetCollection.find(
{
_id: { $in: searchResults.map((item) => item.collectionId) }
},
'_id name fileId rawLink'
);
return {
fullTextRecallResults: searchResults.map((item) => {
const collection = collections.find((col) => String(col._id) === String(item.collectionId));
return {
id: String(item._id),
datasetId: String(item.datasetId),
collectionId: String(item.collectionId),
sourceName: collection?.name || '',
sourceId: collection?.fileId || collection?.rawLink,
q: item.q,
a: item.a,
chunkIndex: item.chunkIndex,
indexes: item.indexes,
// @ts-ignore
score: item.score
};
}),
tokenLen: 0
};
};
const reRankSearchResult = async ({
data,
query
}: {
data: SearchDataResponseItemType[];
query: string;
}): Promise<SearchDataResponseItemType[]> => {
try {
const results = await reRankRecall({
query,
inputs: data.map((item) => ({
id: item.id,
text: `${item.q}\n${item.a}`
}))
});
if (!Array.isArray(results)) return data;
// add new score to data
const mergeResult = results
.map((item) => {
const target = data.find((dataItem) => dataItem.id === item.id);
if (!target) return null;
return {
...target,
score: item.score || target.score
};
})
.filter(Boolean) as SearchDataResponseItemType[];
return mergeResult;
} catch (error) {
return data;
}
};
const filterResultsByMaxTokens = (list: SearchDataResponseItemType[], maxTokens: number) => {
const results: SearchDataResponseItemType[] = [];
let totalTokens = 0;
for (let i = 0; i < list.length; i++) {
const item = list[i];
totalTokens += countPromptTokens(item.q + item.a);
if (totalTokens > maxTokens + 500) {
break;
}
results.push(item);
if (totalTokens > maxTokens) {
break;
}
}
return results.length === 0 ? list.slice(0, 1) : results;
};
const multiQueryRecall = async ({
embeddingLimit,
fullTextLimit
}: {
embeddingLimit: number;
fullTextLimit: number;
}) => {
// In a group n recall, as long as one of the data appears minAmount of times, it is retained
const getIntersection = (resultList: SearchDataResponseItemType[][], minAmount = 1) => {
minAmount = Math.min(resultList.length, minAmount);
const map: Record<
string,
{
amount: number;
data: SearchDataResponseItemType;
}
> = {};
for (const list of resultList) {
for (const item of list) {
map[item.id] = map[item.id]
? {
amount: map[item.id].amount + 1,
data: item
}
: {
amount: 1,
data: item
};
}
}
return Object.values(map)
.filter((item) => item.amount >= minAmount)
.map((item) => item.data);
};
// multi query recall
const embeddingRecallResList: SearchDataResponseItemType[][] = [];
const fullTextRecallResList: SearchDataResponseItemType[][] = [];
let embTokens = 0;
for await (const query of queries) {
const [{ tokenLen, embeddingRecallResults }, { fullTextRecallResults }] = await Promise.all([
embeddingRecall({
query,
limit: embeddingLimit
}),
fullTextRecall({
query,
limit: fullTextLimit
})
]);
embTokens += tokenLen;
embeddingRecallResList.push(embeddingRecallResults);
fullTextRecallResList.push(fullTextRecallResults);
}
return {
tokens: embTokens,
embeddingRecallResults: getIntersection(embeddingRecallResList, 2),
fullTextRecallResults: getIntersection(fullTextRecallResList, 2)
};
};
/* main step */
// count limit
const { embeddingLimit, fullTextLimit } = countRecallLimit();
// recall
const { embeddingRecallResults, fullTextRecallResults, tokens } = await multiQueryRecall({
embeddingLimit,
fullTextLimit
});
// concat recall results
set = new Set<string>(embeddingRecallResults.map((item) => item.id));
const concatRecallResults = embeddingRecallResults.concat(
fullTextRecallResults.filter((item) => !set.has(item.id))
);
// remove same q and a data
set = new Set<string>();
const filterSameDataResults = concatRecallResults.filter((item) => {
// 删除所有的标点符号与空格等,只对文本进行比较
const str = hashStr(`${item.q}${item.a}`.replace(/[^\p{L}\p{N}]/gu, ''));
if (set.has(str)) return false;
set.add(str);
return true;
});
if (!rerank) {
return {
searchRes: filterResultsByMaxTokens(
filterSameDataResults.filter((item) => item.score >= similarity),
maxTokens
),
tokenLen: tokens
};
}
// ReRank results
const reRankResults = (
await reRankSearchResult({
query: rawQuery,
data: filterSameDataResults
})
).filter((item) => item.score > similarity);
return {
searchRes: filterResultsByMaxTokens(
reRankResults.filter((item) => item.score >= similarity),
maxTokens
),
tokenLen: tokens
};
}
// ------------------ search end ------------------

View File

@@ -1,6 +1,4 @@
import { MongoDatasetData } from '@fastgpt/service/core/dataset/data/schema';
import { cut } from '@node-rs/jieba';
import { stopWords } from '@fastgpt/global/common/string/jieba';
/**
* Same value judgment
@@ -24,14 +22,3 @@ export async function hasSameValue({
return Promise.reject('已经存在完全一致的数据');
}
}
export function jiebaSplit({ text }: { text: string }) {
const tokens = cut(text, true);
return (
tokens
.map((item) => item.replace(/[^\u4e00-\u9fa5a-zA-Z0-9\s]/g, '').trim())
.filter((item) => item && !stopWords.has(item))
.join(' ') || ''
);
}

View File

@@ -136,7 +136,6 @@ ${replaceVariable(Prompt_AgentQA.fixedText, { text })}`;
stream: false
});
const answer = chatResponse.choices?.[0].message?.content || '';
const totalTokens = chatResponse.usage?.total_tokens || 0;
const qaArr = formatSplitText(answer, text); // 格式化后的QA对
@@ -167,7 +166,8 @@ ${replaceVariable(Prompt_AgentQA.fixedText, { text })}`;
pushQABill({
teamId: data.teamId,
tmbId: data.tmbId,
totalTokens,
inputTokens: chatResponse.usage?.prompt_tokens || 0,
outputTokens: chatResponse.usage?.completion_tokens || 0,
billId: data.billId,
model
});

View File

@@ -129,7 +129,7 @@ export async function generateVector(): Promise<any> {
}
// insert data to pg
const { tokenLen } = await insertData2Dataset({
const { tokens } = await insertData2Dataset({
teamId: data.teamId,
tmbId: data.tmbId,
datasetId: data.datasetId,
@@ -145,7 +145,7 @@ export async function generateVector(): Promise<any> {
pushGenerateVectorBill({
teamId: data.teamId,
tmbId: data.tmbId,
tokenLen: tokenLen,
tokens,
model: data.model,
billId: data.billId
});

View File

@@ -9,8 +9,9 @@ import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
import { replaceVariable } from '@fastgpt/global/common/string/tools';
import { Prompt_CQJson } from '@/global/core/prompt/agent';
import { FunctionModelItemType } from '@fastgpt/global/core/ai/model.d';
import { getCQModel } from '@/service/core/ai/model';
import { ModelTypeEnum, getCQModel } from '@/service/core/ai/model';
import { getHistories } from '../utils';
import { formatModelPrice2Store } from '@/service/support/wallet/bill/utils';
type Props = ModuleDispatchProps<{
[ModuleInputKeyEnum.aiModel]: string;
@@ -42,7 +43,7 @@ export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse
const chatHistories = getHistories(history, histories);
const { arg, tokens } = await (async () => {
const { arg, inputTokens, outputTokens } = await (async () => {
if (cqModel.toolChoice) {
return toolChoice({
...props,
@@ -59,13 +60,21 @@ export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse
const result = agents.find((item) => item.key === arg?.type) || agents[agents.length - 1];
const { total, modelName } = formatModelPrice2Store({
model: cqModel.model,
inputLen: inputTokens,
outputLen: outputTokens,
type: ModelTypeEnum.cq
});
return {
[result.key]: result.value,
[ModuleOutputKeyEnum.responseData]: {
price: user.openaiAccount?.key ? 0 : cqModel.price * tokens,
model: cqModel.name || '',
price: user.openaiAccount?.key ? 0 : total,
model: modelName,
query: userChatInput,
tokens,
inputTokens,
outputTokens,
cqList: agents,
cqResult: result.value,
contextTotalLen: chatHistories.length + 2
@@ -140,7 +149,8 @@ ${systemPrompt}
return {
arg,
tokens: response.usage?.total_tokens || 0
inputTokens: response.usage?.prompt_tokens || 0,
outputTokens: response.usage?.completion_tokens || 0
};
} catch (error) {
console.log(agentFunction.parameters);
@@ -150,7 +160,8 @@ ${systemPrompt}
return {
arg: {},
tokens: 0
inputTokens: 0,
outputTokens: 0
};
}
}
@@ -182,12 +193,12 @@ Human:${userChatInput}`
stream: false
});
const answer = data.choices?.[0].message?.content || '';
const totalTokens = data.usage?.total_tokens || 0;
const id = agents.find((item) => answer.includes(item.key))?.key || '';
return {
tokens: totalTokens,
inputTokens: data.usage?.prompt_tokens || 0,
outputTokens: data.usage?.completion_tokens || 0,
arg: { type: id }
};
}

View File

@@ -10,7 +10,8 @@ import { Prompt_ExtractJson } from '@/global/core/prompt/agent';
import { replaceVariable } from '@fastgpt/global/common/string/tools';
import { FunctionModelItemType } from '@fastgpt/global/core/ai/model.d';
import { getHistories } from '../utils';
import { getExtractModel } from '@/service/core/ai/model';
import { ModelTypeEnum, getExtractModel } from '@/service/core/ai/model';
import { formatModelPrice2Store } from '@/service/support/wallet/bill/utils';
type Props = ModuleDispatchProps<{
[ModuleInputKeyEnum.history]?: ChatItemType[];
@@ -42,7 +43,7 @@ export async function dispatchContentExtract(props: Props): Promise<Response> {
const extractModel = getExtractModel(model);
const chatHistories = getHistories(history, histories);
const { arg, tokens } = await (async () => {
const { arg, inputTokens, outputTokens } = await (async () => {
if (extractModel.toolChoice) {
return toolChoice({
...props,
@@ -79,16 +80,24 @@ export async function dispatchContentExtract(props: Props): Promise<Response> {
}
}
const { total, modelName } = formatModelPrice2Store({
model: extractModel.model,
inputLen: inputTokens,
outputLen: outputTokens,
type: ModelTypeEnum.extract
});
return {
[ModuleOutputKeyEnum.success]: success ? true : undefined,
[ModuleOutputKeyEnum.failed]: success ? undefined : true,
[ModuleOutputKeyEnum.contextExtractFields]: JSON.stringify(arg),
...arg,
[ModuleOutputKeyEnum.responseData]: {
price: user.openaiAccount?.key ? 0 : extractModel.price * tokens,
model: extractModel.name || '',
price: user.openaiAccount?.key ? 0 : total,
model: modelName,
query: content,
tokens,
inputTokens,
outputTokens,
extractDescription: description,
extractResult: arg,
contextTotalLen: chatHistories.length + 2
@@ -181,10 +190,10 @@ ${description || '根据用户要求获取适当的 JSON 字符串。'}
}
})();
const tokens = response.usage?.total_tokens || 0;
return {
rawResponse: response?.choices?.[0]?.message?.tool_calls?.[0]?.function?.arguments || '',
tokens,
inputTokens: response.usage?.prompt_tokens || 0,
outputTokens: response.usage?.completion_tokens || 0,
arg
};
}
@@ -223,7 +232,8 @@ Human: ${content}`
stream: false
});
const answer = data.choices?.[0].message?.content || '';
const totalTokens = data.usage?.total_tokens || 0;
const inputTokens = data.usage?.prompt_tokens || 0;
const outputTokens = data.usage?.completion_tokens || 0;
// parse response
const start = answer.indexOf('{');
@@ -232,7 +242,8 @@ Human: ${content}`
if (start === -1 || end === -1)
return {
rawResponse: answer,
tokens: totalTokens,
inputTokens,
outputTokens,
arg: {}
};
@@ -244,13 +255,15 @@ Human: ${content}`
try {
return {
rawResponse: answer,
tokens: totalTokens,
inputTokens,
outputTokens,
arg: JSON.parse(jsonStr) as Record<string, any>
};
} catch (error) {
return {
rawResponse: answer,
tokens: totalTokens,
inputTokens,
outputTokens,
arg: {}
};
}

View File

@@ -6,7 +6,7 @@ import { sseResponseEventEnum } from '@fastgpt/service/common/response/constant'
import { textAdaptGptResponse } from '@/utils/adapt';
import { getAIApi } from '@fastgpt/service/core/ai/config';
import type { ChatCompletion, StreamChatType } from '@fastgpt/global/core/ai/type.d';
import { countModelPrice } from '@/service/support/wallet/bill/utils';
import { formatModelPrice2Store } from '@/service/support/wallet/bill/utils';
import type { ChatModelItemType } from '@fastgpt/global/core/ai/model.d';
import { postTextCensor } from '@/service/common/censor';
import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/global/core/ai/constant';
@@ -151,7 +151,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
}
);
const { answerText, totalTokens, completeMessages } = await (async () => {
const { answerText, inputTokens, outputTokens, completeMessages } = await (async () => {
if (stream) {
// sse response
const { answer } = await streamResponse({
@@ -165,21 +165,26 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
value: answer
});
const totalTokens = countMessagesTokens({
messages: completeMessages
});
targetResponse({ res, detail, outputs });
return {
answerText: answer,
totalTokens,
inputTokens: countMessagesTokens({
messages: filterMessages
}),
outputTokens: countMessagesTokens({
messages: [
{
obj: ChatRoleEnum.AI,
value: answer
}
]
}),
completeMessages
};
} else {
const unStreamResponse = response as ChatCompletion;
const answer = unStreamResponse.choices?.[0]?.message?.content || '';
const totalTokens = unStreamResponse.usage?.total_tokens || 0;
const completeMessages = filterMessages.concat({
obj: ChatRoleEnum.AI,
@@ -188,20 +193,27 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
return {
answerText: answer,
totalTokens,
inputTokens: unStreamResponse.usage?.prompt_tokens || 0,
outputTokens: unStreamResponse.usage?.completion_tokens || 0,
completeMessages
};
}
})();
const { total, modelName } = formatModelPrice2Store({
model,
inputLen: inputTokens,
outputLen: outputTokens,
type: ModelTypeEnum.chat
});
return {
answerText,
responseData: {
price: user.openaiAccount?.key
? 0
: countModelPrice({ model, tokens: totalTokens, type: ModelTypeEnum.chat }),
model: modelConstantsData.name,
tokens: totalTokens,
price: user.openaiAccount?.key ? 0 : total,
model: modelName,
inputTokens,
outputTokens,
query: userChatInput,
maxToken: max_tokens,
quoteList: filterQuoteQA,
@@ -227,8 +239,7 @@ function filterQuote({
a: item.a,
source: item.sourceName,
sourceId: String(item.sourceId || 'UnKnow'),
index: index + 1,
score: item.score?.toFixed(4)
index: index + 1
});
}

View File

@@ -1,13 +1,12 @@
import type { moduleDispatchResType } from '@fastgpt/global/core/chat/type.d';
import { countModelPrice } from '@/service/support/wallet/bill/utils';
import { formatModelPrice2Store } from '@/service/support/wallet/bill/utils';
import type { SelectedDatasetType } from '@fastgpt/global/core/module/api.d';
import type { SearchDataResponseItemType } from '@fastgpt/global/core/dataset/type';
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
import { ModelTypeEnum } from '@/service/core/ai/model';
import { searchDatasetData } from '@/service/core/dataset/data/pg';
import { searchDatasetData } from '@/service/core/dataset/data/controller';
import { ModuleInputKeyEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
import { DatasetSearchModeEnum } from '@fastgpt/global/core/dataset/constant';
import { searchQueryExtension } from '@fastgpt/service/core/ai/functions/queryExtension';
type DatasetSearchProps = ModuleDispatchProps<{
[ModuleInputKeyEnum.datasetSelectList]: SelectedDatasetType;
@@ -15,6 +14,7 @@ type DatasetSearchProps = ModuleDispatchProps<{
[ModuleInputKeyEnum.datasetLimit]: number;
[ModuleInputKeyEnum.datasetSearchMode]: `${DatasetSearchModeEnum}`;
[ModuleInputKeyEnum.userChatInput]: string;
[ModuleInputKeyEnum.datasetSearchUsingReRank]: boolean;
}>;
export type DatasetSearchResponse = {
[ModuleOutputKeyEnum.responseData]: moduleDispatchResType;
@@ -27,7 +27,7 @@ export async function dispatchDatasetSearch(
props: DatasetSearchProps
): Promise<DatasetSearchResponse> {
const {
inputs: { datasets = [], similarity = 0.4, limit = 5, searchMode, userChatInput }
inputs: { datasets = [], similarity, limit = 1500, usingReRank, searchMode, userChatInput }
} = props as DatasetSearchProps;
if (!Array.isArray(datasets)) {
@@ -52,14 +52,21 @@ export async function dispatchDatasetSearch(
const concatQueries = [userChatInput];
// start search
const { searchRes, tokenLen } = await searchDatasetData({
const { searchRes, tokens, usingSimilarityFilter } = await searchDatasetData({
rawQuery: userChatInput,
queries: concatQueries,
model: vectorModel.model,
similarity,
limit,
datasetIds: datasets.map((item) => item.datasetId),
searchMode
searchMode,
usingReRank
});
const { total, modelName } = formatModelPrice2Store({
model: vectorModel.model,
inputLen: tokens,
type: ModelTypeEnum.vector
});
return {
@@ -67,17 +74,14 @@ export async function dispatchDatasetSearch(
unEmpty: searchRes.length > 0 ? true : undefined,
quoteQA: searchRes,
responseData: {
price: countModelPrice({
model: vectorModel.model,
tokens: tokenLen,
type: ModelTypeEnum.vector
}),
price: total,
query: concatQueries.join('\n'),
model: vectorModel.name,
tokens: tokenLen,
similarity,
model: modelName,
inputTokens: tokens,
similarity: usingSimilarityFilter ? similarity : undefined,
limit,
searchMode
searchMode,
searchUsingReRank: usingReRank
}
};
}

View File

@@ -4,7 +4,8 @@ import { ModuleInputKeyEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/mo
import { getHistories } from '../utils';
import { getAIApi } from '@fastgpt/service/core/ai/config';
import { replaceVariable } from '@fastgpt/global/common/string/tools';
import { getExtractModel } from '@/service/core/ai/model';
import { ModelTypeEnum, getExtractModel } from '@/service/core/ai/model';
import { formatModelPrice2Store } from '@/service/support/wallet/bill/utils';
type Props = ModuleDispatchProps<{
[ModuleInputKeyEnum.aiModel]: string;
@@ -75,13 +76,22 @@ A: ${systemPrompt}
// );
// console.log(answer);
const tokens = result.usage?.total_tokens || 0;
const inputTokens = result.usage?.prompt_tokens || 0;
const outputTokens = result.usage?.completion_tokens || 0;
const { total, modelName } = formatModelPrice2Store({
model: extractModel.model,
inputLen: inputTokens,
outputLen: outputTokens,
type: ModelTypeEnum.extract
});
return {
[ModuleOutputKeyEnum.responseData]: {
price: extractModel.price * tokens,
model: extractModel.name || '',
tokens,
price: total,
model: modelName,
inputTokens,
outputTokens,
query: userChatInput,
textOutput: answer
},

View File

@@ -1,11 +1,11 @@
import { startQueue } from './utils/tools';
import { PRICE_SCALE } from '@fastgpt/global/support/wallet/bill/constants';
import { initPg } from '@fastgpt/service/common/pg';
import { MongoUser } from '@fastgpt/service/support/user/schema';
import { connectMongo } from '@fastgpt/service/common/mongo/init';
import { hashStr } from '@fastgpt/global/common/string/tools';
import { createDefaultTeam } from '@fastgpt/service/support/user/team/controller';
import { exit } from 'process';
import { initVectorStore } from '@fastgpt/service/common/vectorStore/controller';
/**
* connect MongoDB and init data
@@ -14,7 +14,7 @@ export function connectToDatabase(): Promise<void> {
return connectMongo({
beforeHook: () => {},
afterHook: () => {
initPg();
initVectorStore();
// start queue
startQueue();
return initRootUser();

View File

@@ -0,0 +1,22 @@
import { ConcatBillProps, CreateBillProps } from '@fastgpt/global/support/wallet/bill/api';
import { addLog } from '@fastgpt/service/common/system/log';
import { POST } from '@fastgpt/service/common/api/plusRequest';
export function createBill(data: CreateBillProps) {
if (!global.systemEnv?.pluginBaseUrl) return;
if (data.total === 0) {
addLog.info('0 Bill', data);
}
try {
POST('/support/wallet/bill/createBill', data);
} catch (error) {}
}
export function concatBill(data: ConcatBillProps) {
if (!global.systemEnv?.pluginBaseUrl) return;
if (data.total === 0) {
addLog.info('0 Bill', data);
}
try {
POST('/support/wallet/bill/concatBill', data);
} catch (error) {}
}

View File

@@ -1,30 +1,11 @@
import { BillSourceEnum, PRICE_SCALE } from '@fastgpt/global/support/wallet/bill/constants';
import { getAudioSpeechModel, getQAModel, getVectorModel } from '@/service/core/ai/model';
import { BillSourceEnum } from '@fastgpt/global/support/wallet/bill/constants';
import { ModelTypeEnum } from '@/service/core/ai/model';
import type { ChatHistoryItemResType } from '@fastgpt/global/core/chat/type.d';
import { formatPrice } from '@fastgpt/global/support/wallet/bill/tools';
import { formatStorePrice2Read } from '@fastgpt/global/support/wallet/bill/tools';
import { addLog } from '@fastgpt/service/common/system/log';
import type { ConcatBillProps, CreateBillProps } from '@fastgpt/global/support/wallet/bill/api.d';
import { POST } from '@fastgpt/service/common/api/plusRequest';
import { PostReRankProps } from '@fastgpt/global/core/ai/api';
export function createBill(data: CreateBillProps) {
if (!global.systemEnv?.pluginBaseUrl) return;
if (data.total === 0) {
addLog.info('0 Bill', data);
}
try {
POST('/support/wallet/bill/createBill', data);
} catch (error) {}
}
export function concatBill(data: ConcatBillProps) {
if (!global.systemEnv?.pluginBaseUrl) return;
if (data.total === 0) {
addLog.info('0 Bill', data);
}
try {
POST('/support/wallet/bill/concatBill', data);
} catch (error) {}
}
import { createBill, concatBill } from './controller';
import { formatModelPrice2Store } from '@/service/support/wallet/bill/utils';
export const pushChatBill = ({
appName,
@@ -54,14 +35,15 @@ export const pushChatBill = ({
moduleName: item.moduleName,
amount: item.price || 0,
model: item.model,
tokenLen: item.tokens
inputTokens: item.inputTokens,
outputTokens: item.outputTokens
}))
});
addLog.info(`finish completions`, {
source,
teamId,
tmbId,
price: formatPrice(total)
price: formatStorePrice2Read(total)
});
return { total };
};
@@ -70,26 +52,32 @@ export const pushQABill = async ({
teamId,
tmbId,
model,
totalTokens,
inputTokens,
outputTokens,
billId
}: {
teamId: string;
tmbId: string;
model: string;
totalTokens: number;
inputTokens: number;
outputTokens: number;
billId: string;
}) => {
// 获取模型单价格
const unitPrice = getQAModel(model).price;
// 计算价格
const total = unitPrice * totalTokens;
const { total } = formatModelPrice2Store({
model,
inputLen: inputTokens,
outputLen: outputTokens,
type: ModelTypeEnum.qa
});
concatBill({
billId,
teamId,
tmbId,
total,
tokens: totalTokens,
inputTokens,
outputTokens,
listIndex: 1
});
@@ -100,22 +88,24 @@ export const pushGenerateVectorBill = ({
billId,
teamId,
tmbId,
tokenLen,
tokens,
model,
source = BillSourceEnum.fastgpt
}: {
billId?: string;
teamId: string;
tmbId: string;
tokenLen: number;
tokens: number;
model: string;
source?: `${BillSourceEnum}`;
}) => {
// 计算价格. 至少为1
const vectorModel = getVectorModel(model);
const unitPrice = vectorModel.price || 0.2;
let total = unitPrice * tokenLen;
total = total > 1 ? total : 1;
let { total, modelName } = formatModelPrice2Store({
model,
inputLen: tokens,
type: ModelTypeEnum.vector
});
total = total < 1 ? 1 : total;
// 插入 Bill 记录
if (billId) {
@@ -124,22 +114,22 @@ export const pushGenerateVectorBill = ({
tmbId,
total,
billId,
tokens: tokenLen,
inputTokens: tokens,
listIndex: 0
});
} else {
createBill({
teamId,
tmbId,
appName: '索引生成',
appName: 'wallet.moduleName.index',
total,
source,
list: [
{
moduleName: '索引生成',
moduleName: 'wallet.moduleName.index',
amount: total,
model: vectorModel.name,
tokenLen
model: modelName,
inputTokens: tokens
}
]
});
@@ -148,28 +138,37 @@ export const pushGenerateVectorBill = ({
};
export const pushQuestionGuideBill = ({
tokens,
inputTokens,
outputTokens,
teamId,
tmbId
}: {
tokens: number;
inputTokens: number;
outputTokens: number;
teamId: string;
tmbId: string;
}) => {
const qgModel = global.qgModels[0];
const total = qgModel.price * tokens;
const { total, modelName } = formatModelPrice2Store({
inputLen: inputTokens,
outputLen: outputTokens,
model: qgModel.model,
type: ModelTypeEnum.qg
});
createBill({
teamId,
tmbId,
appName: '下一步指引',
appName: 'wallet.bill.Next Step Guide',
total,
source: BillSourceEnum.fastgpt,
list: [
{
moduleName: '下一步指引',
moduleName: 'wallet.bill.Next Step Guide',
amount: total,
model: qgModel.name,
tokenLen: tokens
model: modelName,
inputTokens,
outputTokens
}
]
});
@@ -178,20 +177,24 @@ export const pushQuestionGuideBill = ({
export function pushAudioSpeechBill({
appName = 'wallet.bill.Audio Speech',
model,
textLength,
textLen,
teamId,
tmbId,
source = BillSourceEnum.fastgpt
}: {
appName?: string;
model: string;
textLength: number;
textLen: number;
teamId: string;
tmbId: string;
source: `${BillSourceEnum}`;
}) {
const modelData = getAudioSpeechModel(model);
const total = modelData.price * textLength;
const { total, modelName } = formatModelPrice2Store({
model,
inputLen: textLen,
type: ModelTypeEnum.audioSpeech
});
createBill({
teamId,
tmbId,
@@ -202,8 +205,8 @@ export function pushAudioSpeechBill({
{
moduleName: appName,
amount: total,
model: modelData.name,
tokenLen: textLength
model: modelName,
textLen
}
]
});
@@ -218,11 +221,16 @@ export function pushWhisperBill({
tmbId: string;
duration: number;
}) {
const modelData = global.whisperModel;
const whisperModel = global.whisperModel;
if (!modelData) return;
if (!whisperModel) return;
const total = ((modelData.price * duration) / 60) * PRICE_SCALE;
const { total, modelName } = formatModelPrice2Store({
model: whisperModel.model,
inputLen: duration,
type: ModelTypeEnum.whisper,
multiple: 60
});
const name = 'wallet.bill.Whisper';
@@ -236,8 +244,8 @@ export function pushWhisperBill({
{
moduleName: name,
amount: total,
model: modelData.name,
tokenLen: duration
model: modelName,
duration
}
]
});
@@ -254,13 +262,16 @@ export function pushReRankBill({
source: `${BillSourceEnum}`;
inputs: PostReRankProps['inputs'];
}) {
const model = global.reRankModels[0];
if (!model) return { total: 0 };
const reRankModel = global.reRankModels[0];
if (!reRankModel) return { total: 0 };
const textLength = inputs.reduce((sum, item) => sum + item.text.length, 0);
const ratio = textLength / 1000;
const textLen = inputs.reduce((sum, item) => sum + item.text.length, 0);
const total = Math.ceil(model.price * PRICE_SCALE * ratio);
const { total, modelName } = formatModelPrice2Store({
model: reRankModel.model,
inputLen: textLen,
type: ModelTypeEnum.rerank
});
const name = 'wallet.bill.ReRank';
createBill({
@@ -273,8 +284,8 @@ export function pushReRankBill({
{
moduleName: name,
amount: total,
model: model.name,
tokenLen: textLength
model: modelName,
textLen
}
]
});

View File

@@ -1,6 +1,6 @@
import { ModelTypeEnum, getModelMap } from '@/service/core/ai/model';
import { AuthUserTypeEnum } from '@fastgpt/global/support/permission/constant';
import { BillSourceEnum } from '@fastgpt/global/support/wallet/bill/constants';
import { BillSourceEnum, PRICE_SCALE } from '@fastgpt/global/support/wallet/bill/constants';
export function authType2BillSource({
authType,
@@ -17,16 +17,38 @@ export function authType2BillSource({
return BillSourceEnum.fastgpt;
}
export const countModelPrice = ({
export const formatModelPrice2Store = ({
model,
tokens,
type
inputLen = 0,
outputLen = 0,
type,
multiple = 1000
}: {
model: string;
tokens: number;
inputLen: number;
outputLen?: number;
type: `${ModelTypeEnum}`;
multiple?: number;
}) => {
const modelData = getModelMap?.[type]?.(model);
if (!modelData) return 0;
return modelData.price * tokens;
if (!modelData)
return {
inputTotal: 0,
outputTotal: 0,
total: 0,
modelName: ''
};
const inputTotal = modelData.inputPrice
? Math.ceil(modelData.inputPrice * (inputLen / multiple) * PRICE_SCALE)
: 0;
const outputTotal = modelData.outputPrice
? Math.ceil(modelData.outputPrice * (outputLen / multiple) * PRICE_SCALE)
: 0;
return {
modelName: modelData.name,
inputTotal: inputTotal,
outputTotal: outputTotal,
total: inputTotal + outputTotal
};
};

View File

@@ -33,7 +33,6 @@ declare global {
var whisperModel: WhisperModelType;
var reRankModels: ReRankModelItemType[];
var priceMd: string;
var systemVersion: string;
var simpleModeTemplates: AppSimpleEditConfigTemplateType[];

View File

@@ -68,6 +68,7 @@ export function usePagination<T = any>({
<Input
defaultValue={pageNum}
w={'50px'}
h={'30px'}
size={'xs'}
type={'number'}
min={1}

View File

@@ -9,20 +9,22 @@ import type {
LLMModelItemType,
ReRankModelItemType,
VectorModelItemType,
AudioSpeechModelType
AudioSpeechModelType,
WhisperModelType
} from '@fastgpt/global/core/ai/model.d';
export let feConfigs: FastGPTFeConfigsType = {};
export let priceMd = '';
export let systemVersion = '0.0.0';
export let chatModelList: ChatModelItemType[] = [];
export let vectorModelList: VectorModelItemType[] = [];
export let qaModelList: LLMModelItemType[] = [];
export let cqModelList: FunctionModelItemType[] = [];
export let qgModelList: LLMModelItemType[] = [];
export let extractModelList: FunctionModelItemType[] = [];
export let audioSpeechModels: AudioSpeechModelType[] = [];
export let audioSpeechModelList: AudioSpeechModelType[] = [];
export let reRankModelList: ReRankModelItemType[] = [];
export let whisperModel: WhisperModelType;
export let simpleModeTemplates: AppSimpleEditConfigTemplateType[] = [];
@@ -39,11 +41,13 @@ export const clientInitData = async (): Promise<InitDateResponse> => {
qaModelList = res.qaModels ?? qaModelList;
cqModelList = res.cqModels ?? cqModelList;
extractModelList = res.extractModels ?? extractModelList;
qgModelList = res.qgModes ?? qgModelList;
audioSpeechModels = res.audioSpeechModels ?? audioSpeechModels;
audioSpeechModelList = res.audioSpeechModels ?? audioSpeechModelList;
reRankModelList = res.reRankModels ?? reRankModelList;
priceMd = res.priceMd;
whisperModel = res.whisperModel;
systemVersion = res.systemVersion;
simpleModeTemplates = res.simpleModeTemplates;

View File

@@ -11,7 +11,12 @@ import type {
DatasetUpdateBody,
PostWebsiteSyncParams
} from '@fastgpt/global/core/dataset/api.d';
import type { SearchTestProps, SearchTestResponse } from '@/global/core/dataset/api.d';
import type {
GetTrainingQueueProps,
GetTrainingQueueResponse,
SearchTestProps,
SearchTestResponse
} from '@/global/core/dataset/api.d';
import type {
PushDatasetDataProps,
UpdateDatasetDataProps,
@@ -107,7 +112,8 @@ export const delOneDatasetDataById = (dataId: string) =>
/* ================ training ==================== */
/* get length of system training queue */
export const getTrainingQueueLen = () => GET<number>(`/core/dataset/training/getQueueLen`);
export const getTrainingQueueLen = (data: GetTrainingQueueProps) =>
GET<GetTrainingQueueResponse>(`/core/dataset/training/getQueueLen`, data);
/* ================== file ======================== */
export const getFileViewUrl = (fileId: string) =>

View File

@@ -2,6 +2,7 @@ import { create } from 'zustand';
import { devtools, persist } from 'zustand/middleware';
import { immer } from 'zustand/middleware/immer';
import type { SearchDataResponseItemType } from '@fastgpt/global/core/dataset/type';
import { DatasetSearchModeEnum } from '@fastgpt/global/core/dataset/constant';
export type SearchTestStoreItemType = {
id: string;
@@ -10,6 +11,7 @@ export type SearchTestStoreItemType = {
time: Date;
duration: string;
results: SearchDataResponseItemType[];
searchMode: `${DatasetSearchModeEnum}`;
};
type State = {

View File

@@ -340,6 +340,10 @@ export const theme = extendTheme({
1000: '#313132'
},
myGray: {
'05': 'rgba(17, 24, 36, 0.05)',
1: 'rgba(17, 24, 36, 0.1)',
15: 'rgba(17, 24, 36, 0.15)',
25: '#FBFBFC',
50: '#F7F8FA',
100: '#F4F4F7',

View File

@@ -3,7 +3,7 @@ import { devtools, persist } from 'zustand/middleware';
import { immer } from 'zustand/middleware/immer';
import type { UserUpdateParams } from '@/types/user';
import type { UserType } from '@fastgpt/global/support/user/type.d';
import { formatPrice } from '@fastgpt/global/support/wallet/bill/tools';
import { formatStorePrice2Read } from '@fastgpt/global/support/wallet/bill/tools';
import { getTokenLogin, putUserInfo } from '@/web/support/user/api';
type State = {
@@ -29,7 +29,7 @@ export const useUserStore = create<State>()(
state.userInfo = user
? {
...user,
balance: formatPrice(user.balance)
balance: formatStorePrice2Read(user.balance)
}
: null;
});