4.8 preview (#1288)

* Revert "lafAccount add pat & re request when token invalid (#76)" (#77)

This reverts commit 83d85dfe37adcaef4833385ea52ee79fd84720be.

* perf: workflow ux

* system config

* Newflow (#89)

* docs: Add doc for Xinference (#1266)

Signed-off-by: Carson Yang <yangchuansheng33@gmail.com>

* Revert "lafAccount add pat & re request when token invalid (#76)" (#77)

This reverts commit 83d85dfe37adcaef4833385ea52ee79fd84720be.

* perf: workflow ux

* system config

* Revert "lafAccount add pat & re request when token invalid (#76)" (#77)

This reverts commit 83d85dfe37adcaef4833385ea52ee79fd84720be.

* Revert "lafAccount add pat & re request when token invalid (#76)" (#77)

This reverts commit 83d85dfe37adcaef4833385ea52ee79fd84720be.

* Revert "lafAccount add pat & re request when token invalid (#76)" (#77)

This reverts commit 83d85dfe37adcaef4833385ea52ee79fd84720be.

* rename code

* move code

* update flow

* input type selector

* perf: workflow runtime

* feat: node adapt newflow

* feat: adapt plugin

* feat: 360 connection

* check workflow

* perf: flow 性能

* change plugin input type (#81)

* change plugin input type

* plugin label mode

* perf: nodecard

* debug

* perf: debug ui

* connection ui

* change workflow ui (#82)

* feat: workflow debug

* adapt openAPI for new workflow (#83)

* adapt openAPI for new workflow

* i18n

* perf: plugin debug

* plugin input ui

* delete

* perf: global variable select

* fix rebase

* perf: workflow performance

* feat: input render type icon

* input icon

* adapt flow (#84)

* adapt newflow

* temp

* temp

* fix

* feat: app schedule trigger

* feat: app schedule trigger

* perf: schedule ui

* feat: ioslatevm run js code

* perf: workflow varialbe table ui

* feat: adapt simple mode

* feat: adapt input params

* output

* feat: adapt tamplate

* fix: ts

* add if-else module (#86)

* perf: worker

* if else node

* perf: tiktoken worker

* fix: ts

* perf: tiktoken

* fix if-else node (#87)

* fix if-else node

* type

* fix

* perf: audio render

* perf: Parallel worker

* log

* perf: if else node

* adapt plugin

* prompt

* perf: reference ui

* reference ui

* handle ux

* template ui and plugin tool

* adapt v1 workflow

* adapt v1 workflow completions

* perf: time variables

* feat: workflow keyboard shortcuts

* adapt v1 workflow

* update workflow example doc (#88)

* fix: simple mode select tool

---------

Signed-off-by: Carson Yang <yangchuansheng33@gmail.com>
Co-authored-by: Carson Yang <yangchuansheng33@gmail.com>
Co-authored-by: heheer <71265218+newfish-cmyk@users.noreply.github.com>

* doc

* perf: extract node

* extra node field

* update plugin version

* doc

* variable

* change doc & fix prompt editor (#90)

* fold workflow code

* value type label

---------

Signed-off-by: Carson Yang <yangchuansheng33@gmail.com>
Co-authored-by: Carson Yang <yangchuansheng33@gmail.com>
Co-authored-by: heheer <71265218+newfish-cmyk@users.noreply.github.com>
This commit is contained in:
Archer
2024-04-25 17:51:20 +08:00
committed by GitHub
parent b08d81f887
commit 439c819ff1
505 changed files with 23570 additions and 18215 deletions

View File

@@ -14,8 +14,8 @@ import {
} from '@chakra-ui/react';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import MySlider from '@/components/Slider';
import { ModuleInputKeyEnum } from '@fastgpt/global/core/module/constants';
import type { SettingAIDataType } from '@fastgpt/global/core/module/node/type.d';
import { NodeInputKeyEnum } from '@fastgpt/global/core/workflow/constants';
import type { SettingAIDataType } from '@fastgpt/global/core/app/type.d';
import { getDocPath } from '@/web/common/system/doc';
import AIModelSelector from '@/components/Select/AIModelSelector';
import { LLMModelItemType } from '@fastgpt/global/core/ai/model.d';
@@ -42,7 +42,7 @@ const AIChatSettingsModal = ({
defaultValues: defaultData
});
const model = watch('model');
const showResponseAnswerText = watch(ModuleInputKeyEnum.aiChatIsResponseText) !== undefined;
const showResponseAnswerText = watch(NodeInputKeyEnum.aiChatIsResponseText) !== undefined;
const showMaxHistoriesSlider = watch('maxHistories') !== undefined;
const selectedModel = llmModelList.find((item) => item.model === model) || llmModelList[0];
@@ -72,7 +72,7 @@ const AIChatSettingsModal = ({
return (
<MyModal
isOpen
iconSrc="/imgs/module/AI.png"
iconSrc="/imgs/workflow/AI.png"
onClose={onClose}
title={
<>
@@ -136,7 +136,7 @@ const AIChatSettingsModal = ({
<QuestionTip ml={1} label={t('core.module.template.AI support tool tip')} />
</Box>
<Box flex={1} ml={'10px'}>
{selectedModel?.usedInToolCall ? '支持' : '不支持'}
{selectedModel?.toolChoice || selectedModel?.functionCall ? '支持' : '不支持'}
</Box>
</Flex>
<Flex mt={8}>
@@ -152,9 +152,9 @@ const AIChatSettingsModal = ({
width={'95%'}
min={0}
max={10}
value={getValues(ModuleInputKeyEnum.aiChatTemperature)}
value={getValues(NodeInputKeyEnum.aiChatTemperature)}
onChange={(e) => {
setValue(ModuleInputKeyEnum.aiChatTemperature, e);
setValue(NodeInputKeyEnum.aiChatTemperature, e);
setRefresh(!refresh);
}}
/>
@@ -174,9 +174,9 @@ const AIChatSettingsModal = ({
min={100}
max={tokenLimit}
step={50}
value={getValues(ModuleInputKeyEnum.aiChatMaxToken)}
value={getValues(NodeInputKeyEnum.aiChatMaxToken)}
onChange={(val) => {
setValue(ModuleInputKeyEnum.aiChatMaxToken, val);
setValue(NodeInputKeyEnum.aiChatMaxToken, val);
setRefresh(!refresh);
}}
/>
@@ -215,11 +215,11 @@ const AIChatSettingsModal = ({
</Box>
<Box flex={1} ml={'10px'}>
<Switch
isChecked={getValues(ModuleInputKeyEnum.aiChatIsResponseText)}
isChecked={getValues(NodeInputKeyEnum.aiChatIsResponseText)}
size={'lg'}
onChange={(e) => {
const value = e.target.checked;
setValue(ModuleInputKeyEnum.aiChatIsResponseText, value);
setValue(NodeInputKeyEnum.aiChatIsResponseText, value);
setRefresh((state) => !state);
}}
/>

View File

@@ -1,11 +1,13 @@
import React, { useEffect } from 'react';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import { LLMModelTypeEnum, llmModelTypeFilterMap } from '@fastgpt/global/core/ai/constants';
import { Box, Button, useDisclosure } from '@chakra-ui/react';
import { SettingAIDataType } from '@fastgpt/global/core/module/node/type';
import { Box, Button, Flex, css, useDisclosure } from '@chakra-ui/react';
import type { SettingAIDataType } from '@fastgpt/global/core/app/type.d';
import AISettingModal from '@/components/core/ai/AISettingModal';
import Avatar from '@/components/Avatar';
import { HUGGING_FACE_ICON } from '@fastgpt/global/common/system/constants';
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
import { useTranslation } from 'next-i18next';
type Props = {
llmModelType?: `${LLMModelTypeEnum}`;
@@ -14,6 +16,7 @@ type Props = {
};
const SettingLLMModel = ({ llmModelType = LLMModelTypeEnum.all, defaultData, onChange }: Props) => {
const { t } = useTranslation();
const { llmModelList } = useSystemStore();
const model = defaultData.model;
@@ -41,30 +44,39 @@ const SettingLLMModel = ({ llmModelType = LLMModelTypeEnum.all, defaultData, onC
model: modelList[0].model
});
}
}, [defaultData, model, modelList, onChange]);
}, []);
return (
<Box position={'relative'}>
<Button
w={'100%'}
justifyContent={'flex-start'}
variant={'whitePrimary'}
_active={{
transform: 'none'
}}
leftIcon={
<Avatar
borderRadius={'0'}
src={selectedModel?.avatar || HUGGING_FACE_ICON}
fallbackSrc={HUGGING_FACE_ICON}
w={'18px'}
/>
<Box
css={css({
span: {
display: 'block'
}
pl={4}
onClick={onOpenAIChatSetting}
>
{selectedModel?.name}
</Button>
})}
position={'relative'}
>
<MyTooltip label={t('core.app.Setting ai property')}>
<Button
w={'100%'}
justifyContent={'flex-start'}
variant={'whiteFlow'}
_active={{
transform: 'none'
}}
leftIcon={
<Avatar
borderRadius={'0'}
src={selectedModel?.avatar || HUGGING_FACE_ICON}
fallbackSrc={HUGGING_FACE_ICON}
w={'18px'}
/>
}
pl={4}
onClick={onOpenAIChatSetting}
>
{selectedModel?.name}
</Button>
</MyTooltip>
{isOpenAIChatSetting && (
<AISettingModal
onClose={onCloseAIChatSetting}