perf: max_token count;feat: support resoner output;fix: member scroll (#3681)

* perf: supplement assistant empty response

* check array

* perf: max_token count

* feat: support resoner output

* member scroll

* update provider order

* i18n
This commit is contained in:
Archer
2025-02-01 18:04:44 +08:00
committed by archer
parent 9e0379382f
commit 54defd8a3c
46 changed files with 462 additions and 266 deletions

View File

@@ -72,6 +72,7 @@ const AIChatSettingsModal = ({
defaultValues: defaultData
});
const model = watch('model');
const reasoning = watch(NodeInputKeyEnum.aiChatReasoning);
const showResponseAnswerText = watch(NodeInputKeyEnum.aiChatIsResponseText) !== undefined;
const showVisionSwitch = watch(NodeInputKeyEnum.aiChatVision) !== undefined;
const showMaxHistoriesSlider = watch('maxHistories') !== undefined;
@@ -84,6 +85,8 @@ const AIChatSettingsModal = ({
return getWebLLMModel(model);
}, [model]);
const llmSupportVision = !!selectedModel?.vision;
const llmSupportTemperature = typeof selectedModel?.maxTemperature === 'number';
const llmSupportReasoning = !!selectedModel?.reasoning;
const tokenLimit = useMemo(() => {
return selectedModel?.maxResponse || 4096;
@@ -258,36 +261,51 @@ const AIChatSettingsModal = ({
/>
</Box>
</Flex>
<Flex {...FlexItemStyles}>
<Box {...LabelStyles}>
<Flex alignItems={'center'}>
{t('app:temperature')}
<QuestionTip label={t('app:temperature_tip')} />
</Flex>
<Switch
isChecked={temperature !== undefined}
size={'sm'}
onChange={(e) => {
setValue('temperature', e.target.checked ? 0 : undefined);
}}
/>
</Box>
<Box flex={'1 0 0'}>
<InputSlider
min={0}
max={10}
step={1}
value={temperature}
isDisabled={temperature === undefined}
onChange={(e) => {
setValue(NodeInputKeyEnum.aiChatTemperature, e);
setRefresh(!refresh);
}}
/>
</Box>
</Flex>
{llmSupportTemperature && (
<Flex {...FlexItemStyles}>
<Box {...LabelStyles}>
<Flex alignItems={'center'}>
{t('app:temperature')}
<QuestionTip label={t('app:temperature_tip')} />
</Flex>
<Switch
isChecked={temperature !== undefined}
size={'sm'}
onChange={(e) => {
setValue('temperature', e.target.checked ? 0 : undefined);
}}
/>
</Box>
<Box flex={'1 0 0'}>
<InputSlider
min={0}
max={10}
step={1}
value={temperature}
isDisabled={temperature === undefined}
onChange={(e) => {
setValue(NodeInputKeyEnum.aiChatTemperature, e);
setRefresh(!refresh);
}}
/>
</Box>
</Flex>
)}
{llmSupportReasoning && (
<Flex {...FlexItemStyles} h={'25px'}>
<Box {...LabelStyles}>
<Flex alignItems={'center'}>{t('app:reasoning_response')}</Flex>
<Switch
isChecked={reasoning || false}
size={'sm'}
onChange={(e) => {
const value = e.target.checked;
setValue(NodeInputKeyEnum.aiChatReasoning, value);
}}
/>
</Box>
</Flex>
)}
{showResponseAnswerText && (
<Flex {...FlexItemStyles} h={'25px'}>
<Box {...LabelStyles}>

View File

@@ -201,6 +201,7 @@ const ChatBox = ({
({
event,
text = '',
reasoningText,
status,
name,
tool,
@@ -247,6 +248,25 @@ const ChatBox = ({
value: item.value.slice(0, -1).concat(lastValue)
};
}
} else if (event === SseResponseEventEnum.answer && reasoningText) {
if (lastValue.type === ChatItemValueTypeEnum.reasoning && lastValue.reasoning) {
lastValue.reasoning.content += reasoningText;
return {
...item,
value: item.value.slice(0, -1).concat(lastValue)
};
} else {
const val: AIChatItemValueItemType = {
type: ChatItemValueTypeEnum.reasoning,
reasoning: {
content: reasoningText
}
};
return {
...item,
value: item.value.concat(val)
};
}
} else if (event === SseResponseEventEnum.toolCall && tool) {
const val: AIChatItemValueItemType = {
type: ChatItemValueTypeEnum.tool,

View File

@@ -6,6 +6,7 @@ import { WorkflowInteractiveResponseType } from '@fastgpt/global/core/workflow/t
export type generatingMessageProps = {
event: SseResponseEventEnum;
text?: string;
reasoningText?: string;
name?: string;
status?: 'running' | 'finish';
tool?: ToolModuleResponseItemType;

View File

@@ -8,6 +8,7 @@ import {
Box,
Button,
Flex,
HStack,
Textarea
} from '@chakra-ui/react';
import { ChatItemValueTypeEnum } from '@fastgpt/global/core/chat/constants';
@@ -139,6 +140,55 @@ ${toolResponse}`}
},
(prevProps, nextProps) => isEqual(prevProps, nextProps)
);
const RenderResoningContent = React.memo(function RenderResoningContent({
content,
showAnimation
}: {
content: string;
showAnimation: boolean;
}) {
const { t } = useTranslation();
return (
<Accordion allowToggle defaultIndex={0}>
<AccordionItem borderTop={'none'} borderBottom={'none'}>
<AccordionButton
w={'auto'}
bg={'white'}
borderRadius={'md'}
borderWidth={'1px'}
borderColor={'myGray.200'}
boxShadow={'1'}
pl={3}
pr={2.5}
py={1}
_hover={{
bg: 'auto'
}}
>
<HStack mr={2} spacing={1}>
<MyIcon name={'core/chat/think'} w={'0.85rem'} />
<Box fontSize={'sm'}>{t('chat:ai_reasoning')}</Box>
</HStack>
{showAnimation && <MyIcon name={'common/loading'} w={'0.85rem'} />}
<AccordionIcon color={'myGray.600'} ml={5} />
</AccordionButton>
<AccordionPanel
py={0}
pr={0}
pl={3}
mt={2}
borderLeft={'2px solid'}
borderColor={'myGray.300'}
color={'myGray.500'}
>
<Markdown source={content} />
</AccordionPanel>
</AccordionItem>
</Accordion>
);
});
const RenderUserSelectInteractive = React.memo(function RenderInteractive({
interactive
}: {
@@ -290,6 +340,8 @@ const AIResponseBox = ({ value, isLastResponseValue, isChatting }: props) => {
return (
<RenderText showAnimation={isChatting && isLastResponseValue} text={value.text.content} />
);
if (value.type === ChatItemValueTypeEnum.reasoning && value.reasoning)
return <RenderResoningContent showAnimation={isChatting} content={value.reasoning.content} />;
if (value.type === ChatItemValueTypeEnum.tool && value.tools)
return <RenderTool showAnimation={isChatting} tools={value.tools} />;
if (value.type === ChatItemValueTypeEnum.interactive && value.interactive) {

View File

@@ -803,6 +803,10 @@ const ModelEditModal = ({
<JsonEditor
value={JSON.stringify(getValues('defaultConfig'), null, 2)}
onChange={(e) => {
if (!e) {
setValue('defaultConfig', undefined);
return;
}
try {
setValue('defaultConfig', JSON.parse(e));
} catch (error) {
@@ -1009,6 +1013,10 @@ const ModelEditModal = ({
value={JSON.stringify(getValues('defaultConfig'), null, 2)}
resize
onChange={(e) => {
if (!e) {
setValue('defaultConfig', undefined);
return;
}
try {
setValue('defaultConfig', JSON.parse(e));
} catch (error) {

View File

@@ -14,7 +14,7 @@ import Avatar from '@fastgpt/web/components/common/Avatar';
import Tag from '@fastgpt/web/components/common/Tag';
import { useTranslation } from 'next-i18next';
import React, { useMemo, useState } from 'react';
import React, { useMemo, useRef, useState } from 'react';
import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
import { useContextSelector } from 'use-context-selector';
import { TeamContext } from '../context';
@@ -50,6 +50,8 @@ function GroupEditModal({ onClose, editGroupId }: { onClose: () => void; editGro
const refetchMembers = useContextSelector(TeamContext, (v) => v.refetchMembers);
const MemberScrollData = useContextSelector(TeamContext, (v) => v.MemberScrollData);
const [hoveredMemberId, setHoveredMemberId] = useState<string>();
const selectedMembersRef = useRef<HTMLDivElement>(null);
const [members, setMembers] = useState(group?.members || []);
const [searchKey, setSearchKey] = useState('');
@@ -155,7 +157,7 @@ function GroupEditModal({ onClose, editGroupId }: { onClose: () => void; editGro
setSearchKey(e.target.value);
}}
/>
<MemberScrollData mt={3} flex={'1 0 0'} h={0}>
<MemberScrollData mt={3} flexGrow="1" overflow={'auto'} maxH={'400px'}>
{filtered.map((member) => {
return (
<HStack
@@ -185,7 +187,7 @@ function GroupEditModal({ onClose, editGroupId }: { onClose: () => void; editGro
</Flex>
<Flex borderLeft="1px" borderColor="myGray.200" flexDirection="column" p="4" h={'100%'}>
<Box mt={2}>{t('common:chosen') + ': ' + members.length}</Box>
<MemberScrollData mt={3} flex={'1 0 0'} h={0}>
<MemberScrollData ScrollContainerRef={selectedMembersRef} mt={3} flex={'1 0 0'} h={0}>
{members.map((member) => {
return (
<HStack

View File

@@ -169,8 +169,8 @@ function MemberTable({ Tabs }: { Tabs: React.ReactNode }) {
</Flex>
<Box flex={'1 0 0'} overflow={'auto'}>
<TableContainer overflow={'unset'} fontSize={'sm'}>
<MemberScrollData>
<MemberScrollData>
<TableContainer overflow={'unset'} fontSize={'sm'}>
<Table overflow={'unset'}>
<Thead>
<Tr bgColor={'white !important'}>
@@ -246,9 +246,9 @@ function MemberTable({ Tabs }: { Tabs: React.ReactNode }) {
))}
</Tbody>
</Table>
</MemberScrollData>
<ConfirmRemoveMemberModal />
</TableContainer>
<ConfirmRemoveMemberModal />
</TableContainer>
</MemberScrollData>
</Box>
<ConfirmLeaveTeamModal />

View File

@@ -121,36 +121,34 @@ function OrgMemberManageModal({
setSearchKey(e.target.value);
}}
/>
<Flex flexDirection="column" mt={3} flexGrow="1" overflow={'auto'} maxH={'400px'}>
<MemberScrollData>
{filterMembers.map((member) => {
return (
<HStack
py="2"
px={3}
borderRadius={'md'}
alignItems="center"
key={member.tmbId}
cursor={'pointer'}
_hover={{
bg: 'myGray.50',
...(!isSelected(member.tmbId) ? { svg: { color: 'myGray.50' } } : {})
}}
_notLast={{ mb: 2 }}
onClick={() => handleToggleSelect(member.tmbId)}
>
<Checkbox
isChecked={!!isSelected(member.tmbId)}
icon={<CheckboxIcon name={'common/check'} />}
pointerEvents="none"
/>
<Avatar src={member.avatar} w="1.5rem" borderRadius={'50%'} />
<Box>{member.memberName}</Box>
</HStack>
);
})}
</MemberScrollData>
</Flex>
<MemberScrollData mt={3} flexGrow="1" overflow={'auto'} maxH={'400px'}>
{filterMembers.map((member) => {
return (
<HStack
py="2"
px={3}
borderRadius={'md'}
alignItems="center"
key={member.tmbId}
cursor={'pointer'}
_hover={{
bg: 'myGray.50',
...(!isSelected(member.tmbId) ? { svg: { color: 'myGray.50' } } : {})
}}
_notLast={{ mb: 2 }}
onClick={() => handleToggleSelect(member.tmbId)}
>
<Checkbox
isChecked={!!isSelected(member.tmbId)}
icon={<CheckboxIcon name={'common/check'} />}
pointerEvents="none"
/>
<Avatar src={member.avatar} w="1.5rem" borderRadius={'50%'} />
<Box>{member.memberName}</Box>
</HStack>
);
})}
</MemberScrollData>
</Flex>
<Flex borderLeft="1px" borderColor="myGray.200" flexDirection="column" p="4" h={'100%'}>
<Box mt={2}>{`${t('common:chosen')}:${selectedMembers.length}`}</Box>

View File

@@ -38,7 +38,9 @@ const SelectAiModelRender = ({ item, inputs = [], nodeId }: RenderInputProps) =>
(input) => input.key === NodeInputKeyEnum.aiChatIsResponseText
)?.value,
aiChatVision:
inputs.find((input) => input.key === NodeInputKeyEnum.aiChatVision)?.value ?? true
inputs.find((input) => input.key === NodeInputKeyEnum.aiChatVision)?.value ?? true,
aiChatReasoning:
inputs.find((input) => input.key === NodeInputKeyEnum.aiChatReasoning)?.value ?? true
}),
[inputs]
);

View File

@@ -186,6 +186,12 @@ export const streamFetch = ({
text: item
});
}
const reasoningText = parseJson.choices?.[0]?.delta?.reasoning_content || '';
onMessage({
event,
reasoningText
});
} else if (event === SseResponseEventEnum.fastAnswer) {
const text = parseJson.choices?.[0]?.delta?.content || '';
pushDataToQueue({

View File

@@ -1,7 +1,7 @@
import { parseCurl } from '@fastgpt/global/common/string/http';
import { AppTypeEnum } from '@fastgpt/global/core/app/constants';
import { AppSchema } from '@fastgpt/global/core/app/type';
import { WorkflowIOValueTypeEnum } from '@fastgpt/global/core/workflow/constants';
import { NodeInputKeyEnum, WorkflowIOValueTypeEnum } from '@fastgpt/global/core/workflow/constants';
import {
FlowNodeInputTypeEnum,
FlowNodeOutputTypeEnum,
@@ -150,7 +150,7 @@ export const emptyTemplates: Record<
key: 'temperature',
renderTypeList: [FlowNodeInputTypeEnum.hidden],
label: '',
value: 0,
value: undefined,
valueType: WorkflowIOValueTypeEnum.number,
min: 0,
max: 10,
@@ -160,7 +160,7 @@ export const emptyTemplates: Record<
key: 'maxToken',
renderTypeList: [FlowNodeInputTypeEnum.hidden],
label: '',
value: 2000,
value: undefined,
valueType: WorkflowIOValueTypeEnum.number,
min: 100,
max: 4000,
@@ -221,6 +221,13 @@ export const emptyTemplates: Record<
debugLabel: i18nT('common:core.module.Dataset quote.label'),
description: '',
valueType: WorkflowIOValueTypeEnum.datasetQuote
},
{
key: NodeInputKeyEnum.aiChatReasoning,
renderTypeList: [FlowNodeInputTypeEnum.hidden],
label: '',
valueType: WorkflowIOValueTypeEnum.boolean,
value: true
}
],
outputs: [