perf: isPc check;perf: dataset max token checker (#4872)

* perf: isPc check

* perf: dataset max token checker

* perf: dataset max token checker
This commit is contained in:
Archer
2025-05-22 18:40:29 +08:00
committed by GitHub
parent 50481f4ca8
commit 6a6719e93d
8 changed files with 91 additions and 90 deletions

View File

@@ -24,6 +24,8 @@ import FormLabel from '@fastgpt/web/components/common/MyBox/FormLabel';
import ValueTypeLabel from './render/ValueTypeLabel';
import MyIcon from '@fastgpt/web/components/common/Icon';
import { getWebLLMModel } from '@/web/common/system/utils';
import InputSlider from '@fastgpt/web/components/common/MySlider/InputSlider';
import MyNumberInput from '@fastgpt/web/components/common/Input/NumberInput';
const NodeDatasetConcat = ({ data, selected }: NodeProps<FlowNodeItemType>) => {
const { t } = useTranslation();
@@ -32,35 +34,58 @@ const NodeDatasetConcat = ({ data, selected }: NodeProps<FlowNodeItemType>) => {
const CustomComponent = useMemo(() => {
const quoteList = inputs.filter((item) => item.canEdit);
const tokenLimit = (() => {
let maxTokens = 16000;
const maxTokens = (() => {
let maxTokens = 0;
nodeList.forEach((item) => {
if ([FlowNodeTypeEnum.chatNode, FlowNodeTypeEnum.tools].includes(item.flowNodeType)) {
const model =
item.inputs.find((item) => item.key === NodeInputKeyEnum.aiModel)?.value || '';
const quoteMaxToken = getWebLLMModel(model)?.quoteMaxToken || 16000;
const quoteMaxToken = getWebLLMModel(model)?.quoteMaxToken || 0;
maxTokens = Math.max(maxTokens, quoteMaxToken);
}
});
return maxTokens;
return maxTokens ? maxTokens : undefined;
})();
const maxTokenStep = (() => {
if (!maxTokens || maxTokens < 8000) return 80;
return Math.ceil(maxTokens / 80 / 100) * 100;
})();
return {
[NodeInputKeyEnum.datasetMaxTokens]: (item: FlowNodeInputItemType) => (
<Box px={2}>
<MySlider
markList={[
{ label: '100', value: 100 },
{ label: tokenLimit, value: tokenLimit }
]}
width={'100%'}
[NodeInputKeyEnum.datasetMaxTokens]: (item: FlowNodeInputItemType) =>
maxTokens ? (
<Box px={2} bg={'white'} py={2} border={'base'} borderRadius={'md'}>
<InputSlider
min={100}
max={maxTokens}
step={maxTokenStep}
value={item.value}
onChange={(e) => {
onChangeNode({
nodeId,
type: 'updateInput',
key: item.key,
value: {
...item,
value: e
}
});
}}
/>
</Box>
) : (
<MyNumberInput
size={'sm'}
min={100}
max={tokenLimit}
step={50}
max={1000000}
step={100}
value={item.value}
name={NodeInputKeyEnum.datasetMaxTokens}
bg={'white'}
onChange={(e) => {
onChangeNode({
nodeId,
@@ -73,8 +98,7 @@ const NodeDatasetConcat = ({ data, selected }: NodeProps<FlowNodeItemType>) => {
});
}}
/>
</Box>
),
),
[NodeInputKeyEnum.datasetQuoteList]: (item: FlowNodeInputItemType) => {
return (
<>

View File

@@ -12,7 +12,6 @@ import SearchParamsTip from '@/components/core/dataset/SearchParamsTip';
import { useContextSelector } from 'use-context-selector';
import { WorkflowContext } from '@/pageComponents/app/detail/WorkflowComponents/context';
import { getWebLLMModel } from '@/web/common/system/utils';
import { defaultDatasetMaxTokens } from '@fastgpt/global/core/app/constants';
import { type AppDatasetSearchParamsType } from '@fastgpt/global/core/app/type';
const SelectDatasetParam = ({ inputs = [], nodeId }: RenderInputProps) => {
@@ -36,19 +35,19 @@ const SelectDatasetParam = ({ inputs = [], nodeId }: RenderInputProps) => {
});
const tokenLimit = useMemo(() => {
let maxTokens = defaultDatasetMaxTokens;
let maxTokens = 0;
nodeList.forEach((item) => {
if ([FlowNodeTypeEnum.chatNode, FlowNodeTypeEnum.tools].includes(item.flowNodeType)) {
const model =
item.inputs.find((item) => item.key === NodeInputKeyEnum.aiModel)?.value || '';
const quoteMaxToken = getWebLLMModel(model)?.quoteMaxToken || defaultDatasetMaxTokens;
const quoteMaxToken = getWebLLMModel(model)?.quoteMaxToken ?? 0;
maxTokens = Math.max(maxTokens, quoteMaxToken);
}
});
return maxTokens;
return maxTokens ? maxTokens : undefined;
}, [nodeList]);
const { isOpen, onOpen, onClose } = useDisclosure();