4.7-alpha2 (#1027)

* feat: stop toolCall and rename some field. (#46)

* perf: node delete tip;pay tip

* fix: toolCall cannot save child answer

* feat: stop tool

* fix: team modal

* fix feckbackMoal  auth bug (#47)

* 简单的支持提示词运行tool。优化workflow模板 (#49)

* remove templates

* fix: request body undefined

* feat: prompt tool run

* feat: workflow tamplates modal

* perf: plugin start

* 4.7 (#50)

* fix docker-compose download url (#994)

original code is a bad url with '404 NOT FOUND' return.
fix docker-compose download url, add 'v' before docker-compose version

* Update ai_settings.md (#1000)

* Update configuration.md

* Update configuration.md

* Fix history in classifyQuestion and extract modules (#1012)

* Fix history in classifyQuestion and extract modules

* Add chatValue2RuntimePrompt import and update text formatting

* flow controller to packages

* fix: rerank select

* modal ui

* perf: modal code path

* point not sufficient

* feat: http url support variable

* fix http key

* perf: prompt

* perf: ai setting modal

* simple edit ui

---------

Co-authored-by: entorick <entorick11@qq.com>
Co-authored-by: liujianglc <liujianglc@163.com>
Co-authored-by: Fengrui Liu <liufengrui.work@bytedance.com>

* fix team share redirect to login (#51)

* feat: support openapi import plugins (#48)

* feat: support openapi import plugins

* feat: import from url

* fix: add body params parse

* fix build

* fix

* fix

* fix

* tool box ui (#52)

* fix: training queue

* feat: simple edit tool select

* perf: simple edit dataset prompt

* fix: chatbox tool ux

* feat: quote prompt module

* perf: plugin tools sign

* perf: model avatar

* tool selector ui

* feat: max histories

* perf: http plugin import (#53)

* perf: plugin http import

* chatBox ui

* perf: name

* fix: Node template card (#54)

* fix: ts

* setting modal

* package

* package

* feat: add plugins search (#57)

* feat: add plugins search

* perf: change http plugin header input

* Yjl (#56)

* perf: prompt tool call

* perf: chat box ux

* doc

* doc

* price tip

* perf: tool selector

* ui'

* fix: vector queue

* fix: empty tool and empty response

* fix: empty msg

* perf: pg index

* perf: ui tip

* doc

* tool tip

---------

Co-authored-by: yst <77910600+yu-and-liu@users.noreply.github.com>
Co-authored-by: entorick <entorick11@qq.com>
Co-authored-by: liujianglc <liujianglc@163.com>
Co-authored-by: Fengrui Liu <liufengrui.work@bytedance.com>
Co-authored-by: heheer <71265218+newfish-cmyk@users.noreply.github.com>
This commit is contained in:
Archer
2024-03-21 13:32:31 +08:00
committed by GitHub
parent 6d4b331db9
commit 9d27de154b
322 changed files with 9282 additions and 6498 deletions

View File

@@ -0,0 +1,242 @@
import React, { useMemo, useState } from 'react';
import MyModal from '@fastgpt/web/components/common/MyModal';
import { useTranslation } from 'next-i18next';
import { useForm } from 'react-hook-form';
import {
Box,
BoxProps,
Button,
Flex,
Link,
ModalBody,
ModalFooter,
Switch
} from '@chakra-ui/react';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import MySlider from '@/components/Slider';
import { ModuleInputKeyEnum } from '@fastgpt/global/core/module/constants';
import type { SettingAIDataType } from '@fastgpt/global/core/module/node/type.d';
import { getDocPath } from '@/web/common/system/doc';
import AIModelSelector from '@/components/Select/AIModelSelector';
import { LLMModelItemType } from '@fastgpt/global/core/ai/model.d';
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
import { QuestionOutlineIcon } from '@chakra-ui/icons';
import QuestionTip from '@fastgpt/web/components/common/MyTooltip/QuestionTip';
const AIChatSettingsModal = ({
onClose,
onSuccess,
defaultData,
llmModels = []
}: {
onClose: () => void;
onSuccess: (e: SettingAIDataType) => void;
defaultData: SettingAIDataType;
llmModels?: LLMModelItemType[];
}) => {
const { t } = useTranslation();
const [refresh, setRefresh] = useState(false);
const { feConfigs, llmModelList } = useSystemStore();
const { handleSubmit, getValues, setValue, watch } = useForm({
defaultValues: defaultData
});
const model = watch('model');
const showResponseAnswerText = watch(ModuleInputKeyEnum.aiChatIsResponseText) !== undefined;
const showMaxHistoriesSlider = watch('maxHistories') !== undefined;
const selectedModel = llmModelList.find((item) => item.model === model) || llmModelList[0];
const tokenLimit = useMemo(() => {
return llmModelList.find((item) => item.model === model)?.maxResponse || 4096;
}, [llmModelList, model]);
const onChangeModel = (e: string) => {
setValue('model', e);
// update max tokens
const modelData = llmModelList.find((item) => item.model === e);
if (modelData) {
setValue('maxToken', modelData.maxResponse / 2);
}
setRefresh(!refresh);
};
const LabelStyles: BoxProps = {
display: 'flex',
alignItems: 'center',
fontSize: ['sm', 'md'],
width: ['80px', '90px']
};
return (
<MyModal
isOpen
iconSrc="/imgs/module/AI.png"
onClose={onClose}
title={
<>
{t('core.ai.AI settings')}
{feConfigs?.docUrl && (
<Link
href={getDocPath('/docs/course/ai_settings/')}
target={'_blank'}
ml={1}
textDecoration={'underline'}
fontWeight={'normal'}
fontSize={'md'}
>
{t('common.Read intro')}
</Link>
)}
</>
}
w={'500px'}
>
<ModalBody overflowY={'auto'}>
<Flex alignItems={'center'}>
<Box {...LabelStyles} mr={2}>
{t('core.ai.Model')}
</Box>
<Box flex={'1 0 0'}>
<AIModelSelector
width={'100%'}
value={model}
list={llmModels.map((item) => ({
value: item.model,
label: item.name
}))}
onchange={onChangeModel}
/>
</Box>
</Flex>
{feConfigs && (
<Flex mt={8}>
<Box {...LabelStyles} mr={2}>
{t('core.ai.Ai point price')}
</Box>
<Box flex={1} ml={'10px'}>
{t('support.wallet.Ai point every thousand tokens', {
points: selectedModel?.charsPointsPrice || 0
})}
</Box>
</Flex>
)}
<Flex mt={8}>
<Box {...LabelStyles} mr={2}>
{t('core.ai.Max context')}
</Box>
<Box flex={1} ml={'10px'}>
{selectedModel?.maxContext || 4096}Tokens
</Box>
</Flex>
<Flex mt={8}>
<Box {...LabelStyles} mr={2}>
{t('core.ai.Support tool')}
<QuestionTip ml={1} label={t('core.module.template.AI support tool tip')} />
</Box>
<Box flex={1} ml={'10px'}>
{selectedModel?.usedInToolCall ? '支持' : '不支持'}
</Box>
</Flex>
<Flex mt={8}>
<Box {...LabelStyles} mr={2}>
{t('core.app.Temperature')}
</Box>
<Box flex={1} ml={'10px'}>
<MySlider
markList={[
{ label: t('core.app.deterministic'), value: 0 },
{ label: t('core.app.Random'), value: 10 }
]}
width={'95%'}
min={0}
max={10}
value={getValues(ModuleInputKeyEnum.aiChatTemperature)}
onChange={(e) => {
setValue(ModuleInputKeyEnum.aiChatTemperature, e);
setRefresh(!refresh);
}}
/>
</Box>
</Flex>
<Flex mt={8}>
<Box {...LabelStyles} mr={2}>
{t('core.app.Max tokens')}
</Box>
<Box flex={1} ml={'10px'}>
<MySlider
markList={[
{ label: '100', value: 100 },
{ label: `${tokenLimit}`, value: tokenLimit }
]}
width={'95%'}
min={100}
max={tokenLimit}
step={50}
value={getValues(ModuleInputKeyEnum.aiChatMaxToken)}
onChange={(val) => {
setValue(ModuleInputKeyEnum.aiChatMaxToken, val);
setRefresh(!refresh);
}}
/>
</Box>
</Flex>
{showMaxHistoriesSlider && (
<Flex mt={8}>
<Box {...LabelStyles} mr={2}>
{t('core.app.Max histories')}
</Box>
<Box flex={1} ml={'10px'}>
<MySlider
markList={[
{ label: 0, value: 0 },
{ label: 30, value: 30 }
]}
width={'95%'}
min={0}
max={30}
value={getValues('maxHistories') ?? 6}
onChange={(e) => {
setValue('maxHistories', e);
setRefresh(!refresh);
}}
/>
</Box>
</Flex>
)}
{showResponseAnswerText && (
<Flex mt={8} alignItems={'center'}>
<Box {...LabelStyles}>
{t('core.app.Ai response')}
<MyTooltip label={t('core.module.template.AI response switch tip')}>
<QuestionOutlineIcon ml={1} />
</MyTooltip>
</Box>
<Box flex={1} ml={'10px'}>
<Switch
isChecked={getValues(ModuleInputKeyEnum.aiChatIsResponseText)}
size={'lg'}
onChange={(e) => {
const value = e.target.checked;
setValue(ModuleInputKeyEnum.aiChatIsResponseText, value);
setRefresh((state) => !state);
}}
/>
</Box>
</Flex>
)}
</ModalBody>
<ModalFooter>
<Button variant={'whiteBase'} onClick={onClose}>
{t('common.Close')}
</Button>
<Button ml={4} onClick={handleSubmit(onSuccess)}>
{t('common.Confirm')}
</Button>
</ModalFooter>
</MyModal>
);
};
export default AIChatSettingsModal;

View File

@@ -0,0 +1,83 @@
import React, { useEffect } from 'react';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import { LLMModelTypeEnum, llmModelTypeFilterMap } from '@fastgpt/global/core/ai/constants';
import { Box, Button, useDisclosure } from '@chakra-ui/react';
import { SettingAIDataType } from '@fastgpt/global/core/module/node/type';
import AISettingModal from '@/components/core/ai/AISettingModal';
import Avatar from '@/components/Avatar';
import { HUGGING_FACE_ICON } from '@fastgpt/global/common/system/constants';
type Props = {
llmModelType?: `${LLMModelTypeEnum}`;
defaultData: SettingAIDataType;
onChange: (e: SettingAIDataType) => void;
};
const SettingLLMModel = ({ llmModelType = LLMModelTypeEnum.all, defaultData, onChange }: Props) => {
const { llmModelList } = useSystemStore();
const model = defaultData.model;
const modelList = llmModelList.filter((model) => {
if (!llmModelType) return true;
const filterField = llmModelTypeFilterMap[llmModelType];
if (!filterField) return true;
//@ts-ignore
return !!model[filterField];
});
const selectedModel = modelList.find((item) => item.model === model) || modelList[0];
const {
isOpen: isOpenAIChatSetting,
onOpen: onOpenAIChatSetting,
onClose: onCloseAIChatSetting
} = useDisclosure();
useEffect(() => {
if (!model && modelList.length > 0) {
onChange({
...defaultData,
model: modelList[0].model
});
}
}, [defaultData, model, modelList, onChange]);
return (
<Box position={'relative'}>
<Button
w={'100%'}
justifyContent={'flex-start'}
variant={'whitePrimary'}
_active={{
transform: 'none'
}}
leftIcon={
<Avatar
borderRadius={'0'}
src={selectedModel.avatar || HUGGING_FACE_ICON}
fallbackSrc={HUGGING_FACE_ICON}
w={'18px'}
/>
}
pl={4}
onClick={onOpenAIChatSetting}
>
{selectedModel?.name}
</Button>
{isOpenAIChatSetting && (
<AISettingModal
onClose={onCloseAIChatSetting}
onSuccess={(e) => {
onChange(e);
onCloseAIChatSetting();
}}
defaultData={defaultData}
llmModels={modelList}
/>
)}
</Box>
);
};
export default React.memo(SettingLLMModel);