v4.6.4-Outlink (#589)

This commit is contained in:
Archer
2023-12-12 14:42:20 +08:00
committed by GitHub
parent d2d7eac9e0
commit e18c79ca71
79 changed files with 1094 additions and 762 deletions

View File

@@ -6,11 +6,13 @@ import { useTranslation } from 'next-i18next';
import { updateChatUserFeedback } from '@/web/core/chat/api';
const FeedbackModal = ({
appId,
chatId,
chatItemId,
onSuccess,
onClose
}: {
appId: string;
chatId: string;
chatItemId: string;
onSuccess: (e: string) => void;
@@ -23,6 +25,7 @@ const FeedbackModal = ({
mutationFn: async () => {
const val = ref.current?.value || t('core.chat.feedback.No Content');
return updateChatUserFeedback({
appId,
chatId,
chatItemId,
userBadFeedback: val

View File

@@ -429,7 +429,7 @@ ${images.map((img) => JSON.stringify({ src: img.src })).join('\n')}
>
{isChatting ? (
<MyIcon
className={styles.stopIcon}
animation={'zoomStopIcon 0.4s infinite alternate'}
width={['22px', '25px']}
height={['22px', '25px']}
cursor={'pointer'}

View File

@@ -1,7 +1,3 @@
.stopIcon {
animation: zoomStopIcon 0.4s infinite alternate;
}
.statusAnimation {
animation: statusBox 0.8s linear infinite alternate;
}

View File

@@ -102,9 +102,13 @@ type Props = {
userGuideModule?: ModuleItemType;
showFileSelector?: boolean;
active?: boolean; // can use
// not chat test params
appId?: string;
chatId?: string;
shareId?: string;
outLinkUid?: string;
onUpdateVariable?: (e: Record<string, any>) => void;
onStartChat?: (e: StartChatFnProps) => Promise<{
responseText: string;
@@ -125,6 +129,7 @@ const ChatBox = (
userGuideModule,
showFileSelector,
active = true,
appId,
chatId,
shareId,
outLinkUid,
@@ -711,7 +716,7 @@ const ChatBox = (
return;
}
return () => {
if (!item.dataId || !chatId) return;
if (!item.dataId || !chatId || !appId) return;
const isGoodFeedback = !!item.userGoodFeedback;
setChatHistory((state) =>
@@ -726,6 +731,7 @@ const ChatBox = (
);
try {
updateChatUserFeedback({
appId,
chatId,
chatItemId: item.dataId,
shareId,
@@ -738,7 +744,7 @@ const ChatBox = (
onCloseUserLike={
feedbackType === FeedbackTypeEnum.admin
? () => {
if (!item.dataId || !chatId) return;
if (!item.dataId || !chatId || !appId) return;
setChatHistory((state) =>
state.map((chatItem) =>
chatItem.dataId === item.dataId
@@ -747,6 +753,7 @@ const ChatBox = (
)
);
updateChatUserFeedback({
appId,
chatId,
chatItemId: item.dataId,
userGoodFeedback: undefined
@@ -760,7 +767,7 @@ const ChatBox = (
}
if (item.userBadFeedback) {
return () => {
if (!item.dataId || !chatId) return;
if (!item.dataId || !chatId || !appId) return;
setChatHistory((state) =>
state.map((chatItem) =>
chatItem.dataId === item.dataId
@@ -770,6 +777,7 @@ const ChatBox = (
);
try {
updateChatUserFeedback({
appId,
chatId,
chatItemId: item.dataId,
shareId,
@@ -886,8 +894,9 @@ const ChatBox = (
/>
) : null}
{/* user feedback modal */}
{!!feedbackId && chatId && (
{!!feedbackId && chatId && appId && (
<FeedbackModal
appId={appId}
chatId={chatId}
chatItemId={feedbackId}
onClose={() => setFeedbackId(undefined)}
@@ -915,8 +924,9 @@ const ChatBox = (
)
);
try {
if (!chatId) return;
if (!chatId || !appId) return;
updateChatUserFeedback({
appId,
chatId,
chatItemId: readFeedbackData.chatItemId
});
@@ -948,8 +958,9 @@ const ChatBox = (
)
);
if (readFeedbackData && chatId) {
if (readFeedbackData && chatId && appId) {
updateChatUserFeedback({
appId,
chatId,
chatItemId: readFeedbackData.chatItemId,
userBadFeedback: undefined

View File

@@ -10,6 +10,7 @@ import {
Image
} from '@chakra-ui/react';
import MyIcon from '../Icon';
import { useSystemStore } from '@/web/common/system/useSystemStore';
export interface MyModalProps extends ModalContentProps {
iconSrc?: string;
@@ -30,12 +31,13 @@ const MyModal = ({
maxW = ['90vw', '600px'],
...props
}: MyModalProps) => {
const { isPc } = useSystemStore();
return (
<Modal
isOpen={isOpen}
onClose={() => onClose && onClose()}
autoFocus={false}
isCentered={isCentered}
isCentered={isPc ? isCentered : true}
>
<ModalOverlay />
<ModalContent

View File

@@ -5,11 +5,12 @@ import { useTranslation } from 'next-i18next';
// @ts-ignore
interface Props extends GridProps {
list: { icon?: string; title: string; desc?: string; value: string | number }[];
list: { icon?: string; title: string | React.ReactNode; desc?: string; value: any }[];
iconSize?: string;
align?: 'top' | 'center';
value: string | number;
onChange: (e: string | number) => void;
value: any;
hiddenCircle?: boolean;
onChange: (e: any) => void;
}
const MyRadio = ({
@@ -17,6 +18,8 @@ const MyRadio = ({
value,
align = 'center',
iconSize = '18px',
hiddenCircle = false,
p,
onChange,
...props
}: Props) => {
@@ -32,7 +35,8 @@ const MyRadio = ({
userSelect={'none'}
py={3}
pl={'14px'}
pr={'36px'}
pr={hiddenCircle ? '14px' : '36px'}
p={p !== undefined ? `${p} !important` : undefined}
border={theme.borders.sm}
borderWidth={'1.5px'}
borderRadius={'md'}
@@ -50,6 +54,7 @@ const MyRadio = ({
})}
_after={{
content: '""',
display: hiddenCircle ? 'none' : 'block',
position: 'absolute',
right: '14px',
w: '16px',
@@ -79,8 +84,8 @@ const MyRadio = ({
)}
</>
)}
<Box pr={2}>
<Box>{t(item.title)}</Box>
<Box pr={hiddenCircle ? 0 : 2} color={'myGray.800'}>
<Box>{typeof item.title === 'string' ? t(item.title) : item.title}</Box>
{!!item.desc && (
<Box fontSize={['xs', 'sm']} color={'myGray.500'}>
{t(item.desc)}

View File

@@ -10,13 +10,13 @@ import React, {
} from 'react';
import { Box, Flex, IconButton } from '@chakra-ui/react';
import MyIcon from '@/components/Icon';
import { FlowNodeTypeEnum } from '@fastgpt/global/core/module/node/constant';
import { streamFetch } from '@/web/common/api/fetch';
import MyTooltip from '@/components/MyTooltip';
import { useUserStore } from '@/web/support/user/useUserStore';
import ChatBox, { type ComponentRef, type StartChatFnProps } from '@/components/ChatBox';
import { getGuideModule } from '@fastgpt/global/core/module/utils';
import { checkChatSupportSelectFileByModules } from '@/web/core/chat/utils';
import { ModuleInputKeyEnum } from '@fastgpt/global/core/module/constants';
export type ChatTestComponentRef = {
resetChatTest: () => void;
@@ -40,10 +40,18 @@ const ChatTest = (
const startChat = useCallback(
async ({ chatList, controller, generatingMessage, variables }: StartChatFnProps) => {
const historyMaxLen =
modules
?.find((item) => item.flowType === FlowNodeTypeEnum.historyNode)
?.inputs?.find((item) => item.key === 'maxContext')?.value || 0;
let historyMaxLen = 6;
modules.forEach((module) => {
module.inputs.forEach((input) => {
if (
(input.key === ModuleInputKeyEnum.history ||
input.key === ModuleInputKeyEnum.historyMaxAmount) &&
typeof input.value === 'number'
) {
historyMaxLen = Math.max(historyMaxLen, input.value);
}
});
});
const history = chatList.slice(-historyMaxLen - 2, -2);
// 流请求,获取数据

View File

@@ -28,7 +28,7 @@ import React, {
import { customAlphabet } from 'nanoid';
import { appModule2FlowEdge, appModule2FlowNode } from '@/utils/adapt';
import { useToast } from '@/web/common/hooks/useToast';
import { FlowNodeInputTypeEnum, FlowNodeTypeEnum } from '@fastgpt/global/core/module/node/constant';
import { FlowNodeTypeEnum } from '@fastgpt/global/core/module/node/constant';
import { ModuleDataTypeEnum } from '@fastgpt/global/core/module/constants';
import { useTranslation } from 'next-i18next';
import { ModuleItemType } from '@fastgpt/global/core/module/type.d';
@@ -449,9 +449,9 @@ export function flowNode2Modules({
flowType: item.data.flowType,
showStatus: item.data.showStatus,
position: item.position,
inputs: item.data.inputs.map((item) => ({
...item,
connected: Boolean(item.value ?? item.connected ?? item.type !== FlowNodeInputTypeEnum.target)
inputs: item.data.inputs.map((input) => ({
...input,
connected: false
})),
outputs: item.data.outputs.map((item) => ({
...item,
@@ -462,10 +462,11 @@ export function flowNode2Modules({
// update inputs and outputs
modules.forEach((module) => {
module.inputs.forEach((input) => {
input.connected =
input.connected ||
!!edges.find((edge) => edge.target === module.moduleId && edge.targetHandle === input.key);
input.connected = !!edges.find(
(edge) => edge.target === module.moduleId && edge.targetHandle === input.key
);
});
module.outputs.forEach((output) => {
output.targets = edges
.filter(

View File

@@ -1,4 +1,4 @@
import React, { useEffect, useMemo, useState } from 'react';
import React, { useMemo, useState } from 'react';
import {
Box,
Button,
@@ -12,9 +12,7 @@ import {
Flex,
Switch,
Input,
Grid,
FormControl,
useTheme,
Image,
Table,
Thead,
@@ -39,6 +37,7 @@ import MyTooltip from '@/components/MyTooltip';
import { variableTip } from '@fastgpt/global/core/module/template/tip';
import { useTranslation } from 'next-i18next';
import { useToast } from '@/web/common/hooks/useToast';
import MyRadio from '@/components/common/MyRadio';
const VariableEdit = ({
variables,
@@ -49,26 +48,28 @@ const VariableEdit = ({
}) => {
const { t } = useTranslation();
const { toast } = useToast();
const theme = useTheme();
const [refresh, setRefresh] = useState(false);
const VariableTypeList = [
{
label: t('core.module.variable.input type'),
icon: 'core/app/variable/input',
key: VariableInputEnum.input
},
{
label: t('core.module.variable.textarea type'),
icon: 'core/app/variable/textarea',
key: VariableInputEnum.textarea
},
{
label: t('core.module.variable.select type'),
icon: 'core/app/variable/select',
key: VariableInputEnum.select
}
];
const VariableTypeList = useMemo(
() => [
{
title: t('core.module.variable.input type'),
icon: 'core/app/variable/input',
value: VariableInputEnum.input
},
{
title: t('core.module.variable.textarea type'),
icon: 'core/app/variable/textarea',
value: VariableInputEnum.textarea
},
{
title: t('core.module.variable.select type'),
icon: 'core/app/variable/select',
value: VariableInputEnum.select
}
],
[t]
);
const { isOpen: isOpenEdit, onOpen: onOpenEdit, onClose: onCloseEdit } = useDisclosure();
const {
@@ -102,9 +103,9 @@ const VariableEdit = ({
const formatVariables = useMemo(() => {
return variables.map((item) => ({
...item,
icon: VariableTypeList.find((type) => type.key === item.type)?.icon
icon: VariableTypeList.find((type) => type.value === item.type)?.icon
}));
}, [variables]);
}, [VariableTypeList, variables]);
return (
<Box>
@@ -206,38 +207,18 @@ const VariableEdit = ({
<Box mt={5} mb={2}>
{t('core.module.Field Type')}
</Box>
<Grid gridTemplateColumns={'repeat(3,1fr)'} gridGap={4}>
{VariableTypeList.map((item) => (
<Flex
key={item.key}
px={3}
py={3}
border={theme.borders.base}
borderRadius={'md'}
cursor={'pointer'}
{...(item.key === getValuesEdit('variable.type')
? {
bg: 'myBlue.100',
borderColor: 'myBlue.600',
color: 'myBlue.600',
fontWeight: 'bold'
}
: {
color: 'myGray.600',
_hover: {
boxShadow: 'md'
},
onClick: () => {
setValuesEdit('variable.type', item.key);
setRefresh(!refresh);
}
})}
>
<MyIcon name={item.icon as any} w={'16px'} />
<Box ml={2}>{item.label}</Box>
</Flex>
))}
</Grid>
<MyRadio
gridGap={4}
gridTemplateColumns={'repeat(3,1fr)'}
value={getValuesEdit('variable.type')}
list={VariableTypeList}
color={'myGray.600'}
hiddenCircle
onChange={(e) => {
setValuesEdit('variable.type', e as any);
setRefresh(!refresh);
}}
/>
{getValuesEdit('variable.type') === VariableInputEnum.input && (
<>

View File

@@ -97,7 +97,7 @@ const NodeCQNode = ({ data }: NodeProps<FlowModuleItemType>) => {
});
}}
/>
<SourceHandle handleKey={item.key} valueType={ModuleDataTypeEnum.boolean} />
<SourceHandle handleKey={item.key} valueType={ModuleDataTypeEnum.string} />
</Box>
</Box>
))}

View File

@@ -29,7 +29,7 @@ import TargetHandle from './TargetHandle';
import MyIcon from '@/components/Icon';
import { useTranslation } from 'next-i18next';
import type { AIChatModuleProps } from '@fastgpt/global/core/module/node/type.d';
import { chatModelList } from '@/web/common/system/staticData';
import { chatModelList, cqModelList } from '@/web/common/system/staticData';
import { formatPrice } from '@fastgpt/global/support/wallet/bill/tools';
import { useDatasetStore } from '@/web/core/dataset/store/dataset';
import type { SelectedDatasetType } from '@fastgpt/global/core/module/api.d';
@@ -229,8 +229,11 @@ const RenderInput = ({
{item.type === FlowNodeInputTypeEnum.aiSettings && (
<AISetting inputs={sortInputs} item={item} moduleId={moduleId} />
)}
{item.type === FlowNodeInputTypeEnum.selectChatModel && (
<SelectChatModelRender inputs={sortInputs} item={item} moduleId={moduleId} />
{[
FlowNodeInputTypeEnum.selectChatModel,
FlowNodeInputTypeEnum.selectCQModel
].includes(item.type as any) && (
<SelectAIModelRender inputs={sortInputs} item={item} moduleId={moduleId} />
)}
{item.type === FlowNodeInputTypeEnum.selectDataset && (
<SelectDatasetRender item={item} moduleId={moduleId} />
@@ -446,12 +449,21 @@ const AISetting = React.memo(function AISetting({ inputs = [], moduleId }: Rende
);
});
const SelectChatModelRender = React.memo(function SelectChatModelRender({
const SelectAIModelRender = React.memo(function SelectAIModelRender({
inputs = [],
item,
moduleId
}: RenderProps) {
const modelList = chatModelList || [];
const modelList = (() => {
if (item.type === FlowNodeInputTypeEnum.selectChatModel) return chatModelList;
if (item.type === FlowNodeInputTypeEnum.selectCQModel) return cqModelList;
return [];
})().map((item) => ({
model: item.model,
name: item.name,
maxResponse: item.maxResponse,
price: item.price
}));
const onChangeModel = useCallback(
(e: string) => {

View File

@@ -44,6 +44,7 @@ export type getHistoriesProps = {
};
export type UpdateHistoryProps = {
appId: string;
chatId: string;
customTitle?: string;
top?: boolean;
@@ -52,6 +53,7 @@ export type UpdateHistoryProps = {
};
export type DelHistoryProps = {
appId: string;
chatId: string;
shareId?: string;
outLinkUid?: string;
@@ -64,6 +66,7 @@ export type ClearHistoriesProps = {
/* -------- chat item ---------- */
export type DeleteChatItemProps = {
appId: string;
chatId: string;
contentId?: string;
shareId?: string;

View File

@@ -18,46 +18,40 @@ A2:
`
};
export const Prompt_ExtractJson = `你可以从 "对话记录" 中提取指定信息,并返回一个 JSON 对象JSON 对象要求:
1. JSON 对象仅包含字段说明中的值。
2. 字段说明中的 required 决定 JSON 对象是否必须存在该字段。
3. 必须存在的字段,值可以为空字符串或根据提取要求来设置,不能随机生成值。
提取要求:
"""
export const Prompt_ExtractJson = `你可以从 <对话记录></对话记录> 中提取指定 JSON 信息,你仅需返回 JSON 字符串,无需回答问题。
<提取要求>
{{description}}
"""
</提取要求>
<字段说明>
1. 下面的 JSON 字符串均按照 JSON Schema 的规则描述。
2. key 代表字段名description 代表字段的描述required 代表字段是否必须。
3. 如果字段内容为空,你可以返回空字符串。
字段说明:
"""
{{json}}
"""
</字段说明>
对话记录:
"""
<对话记录>
{{text}}
"""
</对话记录>
`;
export const Prompt_CQJson = `我会给你几个问题类型,请参考额外的背景知识(可能为空)和对话内容,判断我本次的问题类型,并返回对应类型的 ID格式为 JSON 字符串:
"""
'{"type":"问题类型的 ID"}'
'{"问题类型":"类型的 ID"}'
"""
问题类型
"""
<问题类型>
{{typeList}}
"""
</问题类型>
额外背景知识:
"""
<背景知识>
{{systemPrompt}}
"""
</背景知识>
对话内容
"""
<对话内容>
{{text}}
"""
</对话内容>
`;
export const Prompt_QuestionGuide = `我不太清楚问你什么问题,请帮我生成 3 个问题引导我继续提问。问题的长度应小于20个字符按 JSON 格式返回: ["问题1", "问题2", "问题3"]`;

View File

@@ -71,47 +71,6 @@ function simpleChatTemplate({
}
]
},
{
moduleId: 'history',
name: '聊天记录',
avatar: '/imgs/module/history.png',
flowType: 'historyNode',
position: {
x: 452.5466249541586,
y: 1276.3930310334215
},
inputs: [
{
key: 'maxContext',
type: 'numberInput',
label: '最长记录数',
value: 10,
min: 0,
max: 50,
connected: true
},
{
key: 'history',
type: 'hidden',
label: '聊天记录',
connected: true
}
],
outputs: [
{
key: 'history',
label: '聊天记录',
valueType: 'chatHistory',
type: 'source',
targets: [
{
moduleId: 'chatModule',
key: 'history'
}
]
}
]
},
{
moduleId: 'chatModule',
name: 'AI 对话',
@@ -191,7 +150,6 @@ function simpleChatTemplate({
type: 'hidden',
label: '引用内容模板',
valueType: 'string',
value: '',
connected: true
},
{
@@ -199,7 +157,6 @@ function simpleChatTemplate({
type: 'hidden',
label: '引用内容提示词',
valueType: 'string',
value: '',
connected: true
},
{
@@ -234,7 +191,8 @@ function simpleChatTemplate({
type: 'target',
label: 'core.module.input.label.chat history',
valueType: 'chatHistory',
connected: true
connected: true,
value: 8
},
{
key: 'userChatInput',
@@ -318,47 +276,6 @@ function datasetTemplate({
}
]
},
{
moduleId: 'history',
name: '聊天记录',
avatar: '/imgs/module/history.png',
flowType: 'historyNode',
position: {
x: 452.5466249541586,
y: 1276.3930310334215
},
inputs: [
{
key: 'maxContext',
type: 'numberInput',
label: '最长记录数',
value: 6,
min: 0,
max: 50,
connected: true
},
{
key: 'history',
type: 'hidden',
label: '聊天记录',
connected: true
}
],
outputs: [
{
key: 'history',
label: '聊天记录',
valueType: 'chatHistory',
type: 'source',
targets: [
{
moduleId: 'chatModule',
key: 'history'
}
]
}
]
},
{
moduleId: 'datasetSearch',
name: '知识库搜索',
@@ -541,7 +458,6 @@ function datasetTemplate({
type: 'hidden',
label: '引用内容模板',
valueType: 'string',
value: '',
connected: true
},
{
@@ -549,7 +465,6 @@ function datasetTemplate({
type: 'hidden',
label: '引用内容提示词',
valueType: 'string',
value: '',
connected: true
},
{
@@ -584,7 +499,8 @@ function datasetTemplate({
type: 'target',
label: 'core.module.input.label.chat history',
valueType: 'chatHistory',
connected: true
connected: true,
value: 8
},
{
key: 'userChatInput',

View File

@@ -89,18 +89,19 @@ function chatModelInput(formData: AppSimpleEditFormType): FlowNodeInputItemType[
label: '触发器',
connected: formData.dataset.datasets.length > 0 && !!formData.dataset.searchEmptyText
},
{
key: 'history',
type: 'target',
label: 'core.module.input.label.chat history',
connected: true,
value: 6
},
{
key: 'quoteQA',
type: 'target',
label: '引用内容',
connected: formData.dataset.datasets.length > 0
},
{
key: 'history',
type: 'target',
label: '聊天记录',
connected: true
},
{
key: 'userChatInput',
type: 'target',
@@ -139,41 +140,6 @@ function simpleChatTemplate(formData: AppSimpleEditFormType): ModuleItemType[] {
},
moduleId: 'userChatInput'
},
{
name: '聊天记录',
flowType: FlowNodeTypeEnum.historyNode,
inputs: [
{
key: 'maxContext',
value: 6,
connected: true,
type: 'numberInput',
label: '最长记录数'
},
{
key: 'history',
type: 'hidden',
label: '聊天记录',
connected: true
}
],
outputs: [
{
key: 'history',
targets: [
{
moduleId: 'chatModule',
key: 'history'
}
]
}
],
position: {
x: 452.5466249541586,
y: 1276.3930310334215
},
moduleId: 'history'
},
{
name: 'AI 对话',
flowType: FlowNodeTypeEnum.chatNode,
@@ -238,41 +204,6 @@ function datasetTemplate(formData: AppSimpleEditFormType): ModuleItemType[] {
},
moduleId: 'userChatInput'
},
{
name: '聊天记录',
flowType: FlowNodeTypeEnum.historyNode,
inputs: [
{
key: 'maxContext',
value: 6,
connected: true,
type: 'numberInput',
label: '最长记录数'
},
{
key: 'history',
type: 'hidden',
label: '聊天记录',
connected: true
}
],
outputs: [
{
key: 'history',
targets: [
{
moduleId: 'chatModule',
key: 'history'
}
]
}
],
position: {
x: 452.5466249541586,
y: 1276.3930310334215
},
moduleId: 'history'
},
{
name: '知识库搜索',
flowType: FlowNodeTypeEnum.datasetSearchNode,

View File

@@ -64,8 +64,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
appId,
modules,
variables,
params: {
history,
histories: history,
startParams: {
userChatInput: prompt
},
stream: true,

View File

@@ -10,11 +10,12 @@ import { autChatCrud } from '@/service/support/permission/auth/chat';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
await connectToDatabase();
const { chatId, shareId, outLinkUid } = req.query as DelHistoryProps;
const { appId, chatId, shareId, outLinkUid } = req.query as DelHistoryProps;
await autChatCrud({
req,
authToken: true,
appId,
chatId,
shareId,
outLinkUid,

View File

@@ -7,7 +7,7 @@ import { autChatCrud } from '@/service/support/permission/auth/chat';
/* 初始化我的聊天框,需要身份验证 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
const { chatId, chatItemId, shareId, outLinkUid, userBadFeedback, userGoodFeedback } =
const { appId, chatId, chatItemId, shareId, outLinkUid, userBadFeedback, userGoodFeedback } =
req.body as UpdateChatFeedbackProps;
try {
@@ -16,6 +16,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
await autChatCrud({
req,
authToken: true,
appId,
chatId,
shareId,
outLinkUid,

View File

@@ -8,7 +8,7 @@ import type { DeleteChatItemProps } from '@/global/core/chat/api.d';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
await connectToDatabase();
const { chatId, contentId, shareId, outLinkUid } = req.query as DeleteChatItemProps;
const { appId, chatId, contentId, shareId, outLinkUid } = req.query as DeleteChatItemProps;
if (!contentId || !chatId) {
return jsonRes(res);
@@ -17,6 +17,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
await autChatCrud({
req,
authToken: true,
appId,
chatId,
shareId,
outLinkUid,

View File

@@ -9,11 +9,12 @@ import { autChatCrud } from '@/service/support/permission/auth/chat';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
await connectToDatabase();
const { chatId, shareId, outLinkUid, customTitle, top } = req.body as UpdateHistoryProps;
const { appId, chatId, shareId, outLinkUid, customTitle, top } = req.body as UpdateHistoryProps;
await autChatCrud({
req,
authToken: true,
appId,
chatId,
shareId,
outLinkUid,

View File

@@ -16,7 +16,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
const [userPlugins, plusPlugins] = await Promise.all([
MongoPlugin.find({ teamId }).lean(),
global.systemEnv.pluginBaseUrl ? GET<PluginTemplateType[]>('/core/plugin/getTemplates') : []
global.systemEnv?.pluginBaseUrl ? GET<PluginTemplateType[]>('/core/plugin/getTemplates') : []
]);
const data: FlowModuleTemplateType[] = [

View File

@@ -21,6 +21,7 @@ import { SimpleModeTemplate_FastGPT_Universal } from '@/global/core/app/constant
import { getSimpleTemplatesFromPlus } from '@/service/core/app/utils';
import { PluginTypeEnum } from '@fastgpt/global/core/plugin/constants';
import { getFastGPTFeConfig } from '@fastgpt/service/common/system/config/controller';
import { connectToDatabase } from '@/service/mongo';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
await getInitConfig();
@@ -68,6 +69,7 @@ const defaultFeConfigs: FeConfigsType = {
export async function getInitConfig() {
try {
if (global.feConfigs) return;
await connectToDatabase();
initGlobal();
const filename =

View File

@@ -180,6 +180,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
req,
authToken: true,
authApiKey: true,
appId: app._id,
chatId,
shareId,
outLinkUid,
@@ -188,7 +189,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
// get and concat history
const { history } = await getChatItems({ chatId, limit: 30, field: `dataId obj value` });
const concatHistory = history.concat(chatMessages);
const concatHistories = history.concat(chatMessages);
/* start flow controller */
const { responseData, answerText } = await dispatchModules({
@@ -200,8 +201,8 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
teamId: user.team.teamId,
tmbId: user.team.tmbId,
variables,
params: {
history: concatHistory,
histories: concatHistories,
startParams: {
userChatInput: question.value
},
stream,

View File

@@ -1,4 +1,4 @@
import React, { useRef, useState } from 'react';
import React, { useCallback, useRef, useState } from 'react';
import { Box, Flex, IconButton, useTheme, useDisclosure } from '@chakra-ui/react';
import { SmallCloseIcon } from '@chakra-ui/icons';
import { ModuleItemType } from '@fastgpt/global/core/module/type';
@@ -14,6 +14,7 @@ import MyTooltip from '@/components/MyTooltip';
import ChatTest, { type ChatTestComponentRef } from '@/components/core/module/Flow/ChatTest';
import { flowNode2Modules, useFlowProviderStore } from '@/components/core/module/Flow/FlowProvider';
import { useAppStore } from '@/web/core/app/store/useAppStore';
import { useToast } from '@/web/common/hooks/useToast';
const ImportSettings = dynamic(() => import('@/components/core/module/Flow/ImportSettings'));
@@ -31,6 +32,7 @@ const RenderHeaderContainer = React.memo(function RenderHeaderContainer({
setTestModules: React.Dispatch<ModuleItemType[] | undefined>;
}) {
const theme = useTheme();
const { toast } = useToast();
const { t } = useTranslation();
const { copyData } = useCopyData();
const { isOpen: isOpenImport, onOpen: onOpenImport, onClose: onCloseImport } = useDisclosure();
@@ -38,8 +40,8 @@ const RenderHeaderContainer = React.memo(function RenderHeaderContainer({
const { nodes, edges, onFixView } = useFlowProviderStore();
const { mutate: onclickSave, isLoading } = useRequest({
mutationFn: () => {
const flow2ModulesAndCheck = useCallback(
(tip = false) => {
const modules = flowNode2Modules({ nodes, edges });
// check required connect
for (let i = 0; i < modules.length; i++) {
@@ -51,12 +53,24 @@ const RenderHeaderContainer = React.memo(function RenderHeaderContainer({
return false;
})
) {
return Promise.reject(`${item.name}】存在未填或未连接参数`);
const msg = `${item.name}】存在未填或未连接参数`;
tip &&
toast({
status: 'warning',
title: msg
});
return Promise.reject(msg);
}
}
return modules;
},
[edges, nodes, toast]
);
const { mutate: onclickSave, isLoading } = useRequest({
mutationFn: async () => {
return updateAppDetail(app._id, {
modules,
modules: await flow2ModulesAndCheck(),
type: AppTypeEnum.advanced,
permission: undefined
});
@@ -139,8 +153,8 @@ const RenderHeaderContainer = React.memo(function RenderHeaderContainer({
borderRadius={'lg'}
aria-label={'save'}
variant={'base'}
onClick={() => {
setTestModules(flowNode2Modules({ nodes, edges }));
onClick={async () => {
setTestModules(await flow2ModulesAndCheck(true));
}}
/>
</MyTooltip>

View File

@@ -321,6 +321,7 @@ function DetailLogsModal({
showMarkIcon
showVoiceIcon={false}
userGuideModule={chat?.app?.userGuideModule}
appId={appId}
chatId={chatId}
/>
</Box>

View File

@@ -1,9 +0,0 @@
import { OutLinkSchema } from '@fastgpt/global/support/outLink/type';
import React from 'react';
import MyModal from '@/components/MyModal';
const EmbModal = ({ share }: { share: OutLinkSchema }) => {
return <MyModal isOpen>EmbModal</MyModal>;
};
export default EmbModal;

View File

@@ -0,0 +1,209 @@
import { OutLinkSchema } from '@fastgpt/global/support/outLink/type';
import React, { useCallback, useState } from 'react';
import MyModal from '@/components/MyModal';
import { useTranslation } from 'next-i18next';
import { Box, Flex, FlexProps, Grid, Image, ModalBody, Switch, useTheme } from '@chakra-ui/react';
import MyRadio from '@/components/common/MyRadio';
import { useForm } from 'react-hook-form';
import MyIcon from '@/components/Icon';
import { useCopyData } from '@/web/common/hooks/useCopyData';
import { useSelectFile } from '@/web/common/file/hooks/useSelectFile';
import { fileToBase64 } from '@/web/common/file/utils';
enum UsingWayEnum {
link = 'link',
iframe = 'iframe',
script = 'script'
}
const SelectUsingWayModal = ({ share, onClose }: { share: OutLinkSchema; onClose: () => void }) => {
const { t } = useTranslation();
const theme = useTheme();
const { copyData } = useCopyData();
const { File, onOpen } = useSelectFile({
multiple: false,
fileType: 'image/*'
});
const VariableTypeList = [
{
title: <Image src={'/imgs/outlink/link.svg'} alt={''} />,
value: UsingWayEnum.link
},
{
title: <Image src={'/imgs/outlink/iframe.svg'} alt={''} />,
value: UsingWayEnum.iframe
},
{
title: <Image src={'/imgs/outlink/script.svg'} alt={''} />,
value: UsingWayEnum.script
}
];
const [refresh, setRefresh] = useState(false);
const { getValues, setValue, register, watch } = useForm({
defaultValues: {
usingWay: UsingWayEnum.link,
showHistory: true,
scriptIconCanDrag: true,
scriptDefaultOpen: true,
scriptOpenIcon:
'data:image/svg+xml;base64,PHN2ZyB0PSIxNjkwNTMyNzg1NjY0IiBjbGFzcz0iaWNvbiIgdmlld0JveD0iMCAwIDEwMjQgMTAyNCIgdmVyc2lvbj0iMS4xIiB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHAtaWQ9IjQxMzIiIHhtbG5zOnhsaW5rPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5L3hsaW5rIj48cGF0aCBkPSJNNTEyIDMyQzI0Ny4wNCAzMiAzMiAyMjQgMzIgNDY0QTQxMC4yNCA0MTAuMjQgMCAwIDAgMTcyLjQ4IDc2OEwxNjAgOTY1LjEyYTI1LjI4IDI1LjI4IDAgMCAwIDM5LjA0IDIyLjRsMTY4LTExMkE1MjguNjQgNTI4LjY0IDAgMCAwIDUxMiA4OTZjMjY0Ljk2IDAgNDgwLTE5MiA0ODAtNDMyUzc3Ni45NiAzMiA1MTIgMzJ6IG0yNDQuOCA0MTZsLTM2MS42IDMwMS43NmExMi40OCAxMi40OCAwIDAgMS0xOS44NC0xMi40OGw1OS4yLTIzMy45MmgtMTYwYTEyLjQ4IDEyLjQ4IDAgMCAxLTcuMzYtMjMuMzZsMzYxLjYtMzAxLjc2YTEyLjQ4IDEyLjQ4IDAgMCAxIDE5Ljg0IDEyLjQ4bC01OS4yIDIzMy45MmgxNjBhMTIuNDggMTIuNDggMCAwIDEgOCAyMi4wOHoiIGZpbGw9IiM0ZTgzZmQiIHAtaWQ9IjQxMzMiPjwvcGF0aD48L3N2Zz4=',
scriptCloseIcon:
'data:image/svg+xml;base64,PHN2ZyB0PSIxNjkwNTM1NDQxNTI2IiBjbGFzcz0iaWNvbiIgdmlld0JveD0iMCAwIDEwMjQgMTAyNCIgdmVyc2lvbj0iMS4xIiB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHAtaWQ9IjYzNjciIHhtbG5zOnhsaW5rPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5L3hsaW5rIj48cGF0aCBkPSJNNTEyIDEwMjRBNTEyIDUxMiAwIDEgMSA1MTIgMGE1MTIgNTEyIDAgMCAxIDAgMTAyNHpNMzA1Ljk1NjU3MSAzNzAuMzk1NDI5TDQ0Ny40ODggNTEyIDMwNS45NTY1NzEgNjUzLjYwNDU3MWE0NS41NjggNDUuNTY4IDAgMSAwIDY0LjQzODg1OCA2NC40Mzg4NThMNTEyIDU3Ni41MTJsMTQxLjYwNDU3MSAxNDEuNTMxNDI5YTQ1LjU2OCA0NS41NjggMCAwIDAgNjQuNDM4ODU4LTY0LjQzODg1OEw1NzYuNTEyIDUxMmwxNDEuNTMxNDI5LTE0MS42MDQ1NzFhNDUuNTY4IDQ1LjU2OCAwIDEgMC02NC40Mzg4NTgtNjQuNDM4ODU4TDUxMiA0NDcuNDg4IDM3MC4zOTU0MjkgMzA1Ljk1NjU3MWE0NS41NjggNDUuNTY4IDAgMCAwLTY0LjQzODg1OCA2NC40Mzg4NTh6IiBmaWxsPSIjNGU4M2ZkIiBwLWlkPSI2MzY4Ij48L3BhdGg+PC9zdmc+'
}
});
const selectFile = useCallback(
async (files: File[], key: 'scriptOpenIcon' | 'scriptCloseIcon') => {
const file = files[0];
if (!file) return;
// image to base64
const base64 = await fileToBase64(file);
setValue(key, base64);
},
[setValue]
);
watch(() => {
setRefresh(!refresh);
});
const linkUrl = `${location?.origin}/chat/share?shareId=${share?.shareId}${
getValues('showHistory') ? '' : '&showHistory=0'
}`;
const wayMap = {
[UsingWayEnum.link]: {
blockTitle: t('core.app.outLink.Link block title'),
code: linkUrl
},
[UsingWayEnum.iframe]: {
blockTitle: t('core.app.outLink.Iframe block title'),
code: `<iframe
src="${linkUrl}"
style="width: 100%; height: 100%;"
frameborder="0"
allow="microphone"
/>`
},
[UsingWayEnum.script]: {
blockTitle: t('core.app.outLink.Script block title'),
code: `<script
src="${location?.origin}/js/iframe.js"
id="chatbot-iframe"
data-bot-src="${linkUrl}"
data-default-open="${getValues('scriptDefaultOpen') ? 'true' : 'false'}"
data-drag="${getValues('scriptIconCanDrag') ? 'true' : 'false'}"
data-open-icon="${getValues('scriptOpenIcon')}"
data-close-icon="${getValues('scriptCloseIcon')}"
defer
/>`
}
};
const gridItemStyle: FlexProps = {
alignItems: 'center',
bg: 'myWhite.600',
p: 2,
borderRadius: 'md',
border: theme.borders.sm
};
return (
<MyModal
isOpen
iconSrc="/imgs/modal/usingWay.svg"
title={t('core.app.outLink.Select Using Way')}
onClose={onClose}
maxW={['90vw', '700px']}
>
<ModalBody py={4}>
<MyRadio
gridGap={2}
gridTemplateColumns={['repeat(1,1fr)', 'repeat(3,1fr)']}
value={getValues('usingWay')}
list={VariableTypeList}
hiddenCircle
p={0}
onChange={(e) => {
setValue('usingWay', e);
}}
/>
{/* config */}
<Grid gridTemplateColumns={['repeat(3,1fr)']} gridGap={4} my={5}>
<Flex {...gridItemStyle}>
<Box flex={1}>{t('core.app.outLink.Show History')}</Box>
<Switch {...register('showHistory')} />
</Flex>
{getValues('usingWay') === UsingWayEnum.script && (
<>
<Flex {...gridItemStyle}>
<Box flex={1}>{t('core.app.outLink.Can Drag')}</Box>
<Switch {...register('scriptIconCanDrag')} />
</Flex>
<Flex {...gridItemStyle}>
<Box flex={1}>{t('core.app.outLink.Default open')}</Box>
<Switch {...register('scriptDefaultOpen')} />
</Flex>
<Flex {...gridItemStyle}>
<Box flex={1}>{t('core.app.outLink.Script Open Icon')}</Box>
<Image
src={getValues('scriptOpenIcon')}
alt={''}
w={'20px'}
h={'20px'}
cursor={'pointer'}
onClick={() => onOpen('scriptOpenIcon')}
/>
</Flex>
<Flex {...gridItemStyle}>
<Box flex={1}>{t('core.app.outLink.Script Close Icon')}</Box>
<Image
src={getValues('scriptCloseIcon')}
alt={''}
w={'20px'}
h={'20px'}
cursor={'pointer'}
onClick={() => onOpen('scriptCloseIcon')}
/>
</Flex>
</>
)}
</Grid>
{/* code */}
<Box borderRadius={'md'} bg={'myGray.100'} overflow={'hidden'}>
<Flex
p={3}
bg={'myWhite.500'}
border={theme.borders.base}
borderTopLeftRadius={'md'}
borderTopRightRadius={'md'}
>
<Box flex={1}>{wayMap[getValues('usingWay')].blockTitle}</Box>
<MyIcon
name={'copy'}
w={'16px'}
color={'myGray.600'}
cursor={'pointer'}
_hover={{ color: 'myBlue.600' }}
onClick={() => {
copyData(wayMap[getValues('usingWay')].code);
}}
/>
</Flex>
<Box whiteSpace={'pre'} p={3} overflowX={'auto'}>
{wayMap[getValues('usingWay')].code}
</Box>
</Box>
</ModalBody>
<File onSelect={selectFile} />
</MyModal>
);
};
export default SelectUsingWayModal;

View File

@@ -34,7 +34,7 @@ import { formatTimeToChatTime } from '@/utils/tools';
import { useCopyData } from '@/web/common/hooks/useCopyData';
import { useForm } from 'react-hook-form';
import { defaultOutLinkForm } from '@/constants/app';
import type { OutLinkEditType } from '@fastgpt/global/support/outLink/type.d';
import type { OutLinkEditType, OutLinkSchema } from '@fastgpt/global/support/outLink/type.d';
import { useRequest } from '@/web/common/hooks/useRequest';
import { formatPrice } from '@fastgpt/global/support/wallet/bill/tools';
import { OutLinkTypeEnum } from '@fastgpt/global/support/outLink/constant';
@@ -45,12 +45,16 @@ import MyTooltip from '@/components/MyTooltip';
import MyModal from '@/components/MyModal';
import dayjs from 'dayjs';
import { getDocPath } from '@/web/common/system/doc';
import dynamic from 'next/dynamic';
const SelectUsingWayModal = dynamic(() => import('./SelectUsingWayModal'));
const Share = ({ appId }: { appId: string }) => {
const { t } = useTranslation();
const { Loading, setIsLoading } = useLoading();
const { copyData } = useCopyData();
const [editLinkData, setEditLinkData] = useState<OutLinkEditType>();
const [selectedLinkData, setSelectedLinkData] = useState<OutLinkSchema>();
const { toast } = useToast();
const {
@@ -141,6 +145,15 @@ const Share = ({ appId }: { appId: string }) => {
<MyIcon name={'more'} w={'14px'} p={2} />
</MenuButton>
<MenuList color={'myGray.700'} minW={`120px !important`} zIndex={10}>
<MenuItem
onClick={() => {
setSelectedLinkData(item);
}}
py={[2, 3]}
>
<MyIcon name={'copy'} w={['14px', '16px']} />
<Box ml={[1, 2]}>{t('core.app.outLink.Select Mode')}</Box>
</MenuItem>
<MenuItem
onClick={() =>
setEditLinkData({
@@ -155,28 +168,6 @@ const Share = ({ appId }: { appId: string }) => {
<MyIcon name={'edit'} w={['14px', '16px']} />
<Box ml={[1, 2]}>{t('common.Edit')}</Box>
</MenuItem>
<MenuItem
onClick={() => {
const url = `${location.origin}/chat/share?shareId=${item.shareId}`;
copyData(url, '已复制分享链接,可直接分享使用');
}}
py={[2, 3]}
>
<MyIcon name={'copy'} w={['14px', '16px']} />
<Box ml={[1, 2]}>{t('common.Copy')}</Box>
</MenuItem>
<MenuItem
onClick={() => {
const url = `${location.origin}/chat/share?shareId=${item.shareId}`;
const src = `${location.origin}/js/iframe.js`;
const script = `<script src="${src}" id="fastgpt-iframe" data-src="${url}" data-color="#4e83fd"></script>`;
copyData(script, '已复制嵌入 Script可在应用 HTML 底部嵌入', 3000);
}}
py={[2, 3]}
>
<MyIcon name={'apiLight'} w={['14px', '16px']} />
<Box ml={[1, 2]}>{t('outlink.Copy IFrame')}</Box>
</MenuItem>
<MenuItem
onClick={async () => {
setIsLoading(true);
@@ -232,6 +223,12 @@ const Share = ({ appId }: { appId: string }) => {
onClose={() => setEditLinkData(undefined)}
/>
)}
{!!selectedLinkData && (
<SelectUsingWayModal
share={selectedLinkData}
onClose={() => setSelectedLinkData(undefined)}
/>
)}
<Loading loading={isFetching} fixed={false} />
</Box>
);
@@ -290,7 +287,7 @@ function EditLinkModal({
>
<ModalBody>
<Flex alignItems={'center'}>
<Box flex={'0 0 90px'}>{t('Name')}:</Box>
<Box flex={'0 0 90px'}>{t('Name')}</Box>
<Input
placeholder={t('outlink.Link Name') || 'Link Name'}
maxLength={20}
@@ -303,7 +300,7 @@ function EditLinkModal({
<>
<Flex alignItems={'center'} mt={4}>
<Flex flex={'0 0 90px'} alignItems={'center'}>
QPM:
QPM
<MyTooltip label={t('outlink.QPM Tips' || '')}>
<QuestionOutlineIcon ml={1} />
</MyTooltip>
@@ -320,7 +317,7 @@ function EditLinkModal({
</Flex>
<Flex alignItems={'center'} mt={4}>
<Flex flex={'0 0 90px'} alignItems={'center'}>
{t('common.Max credit')}:
{t('common.Max credit')}
<MyTooltip label={t('common.Max credit tips' || '')}>
<QuestionOutlineIcon ml={1} />
</MyTooltip>
@@ -336,7 +333,7 @@ function EditLinkModal({
</Flex>
<Flex alignItems={'center'} mt={4}>
<Flex flex={'0 0 90px'} alignItems={'center'}>
{t('common.Expired Time')}:
{t('common.Expired Time')}
</Flex>
<Input
type="datetime-local"
@@ -351,7 +348,7 @@ function EditLinkModal({
/>
</Flex>
<Flex alignItems={'center'} mt={4}>
<Flex flex={'0 0 90px'}>
<Flex flex={'0 0 90px'} alignItems={'center'}>
{t('outlink.token auth')}
<MyTooltip label={t('outlink.token auth Tips') || ''}>
<QuestionOutlineIcon ml={1} />
@@ -359,6 +356,7 @@ function EditLinkModal({
</Flex>
<Input
placeholder={t('outlink.token auth Tips') || ''}
fontSize={'sm'}
{...register('limit.hookUrl')}
/>
</Flex>
@@ -375,7 +373,7 @@ function EditLinkModal({
<Flex alignItems={'center'} mt={4}>
<Flex flex={'0 0 90px'} alignItems={'center'}>
{t('outlink.Response Detail')}:
{t('outlink.Response Detail')}
<MyTooltip label={t('outlink.Response Detail tips' || '')}>
<QuestionOutlineIcon ml={1} />
</MyTooltip>

View File

@@ -51,6 +51,7 @@ import { SimpleModeTemplate_FastGPT_Universal } from '@/global/core/app/constant
import QGSwitch from '@/components/core/module/Flow/components/modules/QGSwitch';
import TTSSelect from '@/components/core/module/Flow/components/modules/TTSSelect';
import VariableEdit from '@/components/core/module/Flow/components/modules/VariableEdit';
import { ModuleInputKeyEnum } from '@fastgpt/global/core/module/constants';
const InfoModal = dynamic(() => import('../InfoModal'));
const DatasetSelectModal = dynamic(() => import('@/components/core/module/DatasetSelectModal'));
@@ -635,10 +636,19 @@ function ChatTest({ appId }: { appId: string }) {
const startChat = useCallback(
async ({ chatList, controller, generatingMessage, variables }: StartChatFnProps) => {
const historyMaxLen =
modules
?.find((item) => item.flowType === FlowNodeTypeEnum.historyNode)
?.inputs?.find((item) => item.key === 'maxContext')?.value || 0;
let historyMaxLen = 0;
modules.forEach((module) => {
module.inputs.forEach((input) => {
if (
(input.key === ModuleInputKeyEnum.history ||
input.key === ModuleInputKeyEnum.historyMaxAmount) &&
typeof input.value === 'number'
) {
historyMaxLen = Math.max(historyMaxLen, input.value);
}
});
});
const history = chatList.slice(-historyMaxLen - 2, -2);
// 流请求,获取数据

View File

@@ -15,6 +15,7 @@ const ChatHeader = ({
appAvatar,
chatModels,
appId,
showHistory,
onOpenSlider
}: {
history: ChatItemType[];
@@ -22,6 +23,7 @@ const ChatHeader = ({
appAvatar: string;
chatModels?: string[];
appId?: string;
showHistory?: boolean;
onOpenSlider: () => void;
}) => {
const router = useRouter();
@@ -63,7 +65,16 @@ const ChatHeader = ({
</>
) : (
<>
<MyIcon name={'menu'} w={'20px'} h={'20px'} color={'myGray.900'} onClick={onOpenSlider} />
{showHistory && (
<MyIcon
name={'menu'}
w={'20px'}
h={'20px'}
color={'myGray.900'}
onClick={onOpenSlider}
/>
)}
<Flex px={3} alignItems={'center'} flex={'1 0 0'} w={0} justifyContent={'center'}>
<Avatar src={appAvatar} w={'16px'} />
<Box

View File

@@ -1,15 +1,3 @@
.stopIcon {
animation: zoomStopIcon 0.4s infinite alternate;
}
@keyframes zoomStopIcon {
0% {
transform: scale(0.8);
}
100% {
transform: scale(1.2);
}
}
.newChat {
.modelListContainer {
height: 0;

View File

@@ -297,7 +297,7 @@ const Chat = ({ appId, chatId }: { appId: string; chatId: string }) => {
onCloseSlider();
}
}}
onDelHistory={delOneHistory}
onDelHistory={(e) => delOneHistory({ ...e, appId })}
onClearHistory={() => {
clearHistories({ appId });
router.replace({
@@ -307,10 +307,11 @@ const Chat = ({ appId, chatId }: { appId: string; chatId: string }) => {
});
}}
onSetHistoryTop={(e) => {
updateHistory(e);
updateHistory({ ...e, appId });
}}
onSetCustomTitle={async (e) => {
updateHistory({
appId,
chatId: e.chatId,
title: e.title,
customTitle: e.title
@@ -334,6 +335,7 @@ const Chat = ({ appId, chatId }: { appId: string; chatId: string }) => {
history={chatData.history}
chatModels={chatData.app.chatModels}
onOpenSlider={onOpenSlider}
showHistory
/>
{/* chat box */}
@@ -348,7 +350,8 @@ const Chat = ({ appId, chatId }: { appId: string; chatId: string }) => {
feedbackType={'user'}
onUpdateVariable={(e) => {}}
onStartChat={startChat}
onDelMessage={(e) => delOneHistoryItem({ ...e, chatId })}
onDelMessage={(e) => delOneHistoryItem({ ...e, appId, chatId })}
appId={appId}
chatId={chatId}
/>
</Box>

View File

@@ -300,7 +300,9 @@ const OutLink = ({
onCloseSlider();
}
}}
onDelHistory={({ chatId }) => delOneHistory({ chatId, shareId, outLinkUid })}
onDelHistory={({ chatId }) =>
delOneHistory({ appId: chatData.appId, chatId, shareId, outLinkUid })
}
onClearHistory={() => {
clearHistories({ shareId, outLinkUid });
router.replace({
@@ -313,12 +315,14 @@ const OutLink = ({
onSetHistoryTop={(e) => {
updateHistory({
...e,
appId: chatData.appId,
shareId,
outLinkUid
});
}}
onSetCustomTitle={async (e) => {
updateHistory({
appId: chatData.appId,
chatId: e.chatId,
title: e.title,
customTitle: e.title,
@@ -343,6 +347,7 @@ const OutLink = ({
appAvatar={chatData.app.avatar}
appName={chatData.app.name}
history={chatData.history}
showHistory={showHistory === '1'}
onOpenSlider={onOpenSlider}
/>
{/* chat box */}
@@ -357,7 +362,10 @@ const OutLink = ({
feedbackType={'user'}
onUpdateVariable={(e) => {}}
onStartChat={startChat}
onDelMessage={(e) => delOneHistoryItem({ ...e, chatId, shareId, outLinkUid })}
onDelMessage={(e) =>
delOneHistoryItem({ ...e, appId: chatData.appId, chatId, shareId, outLinkUid })
}
appId={chatData.appId}
chatId={chatId}
shareId={shareId}
outLinkUid={outLinkUid}

View File

@@ -3,7 +3,7 @@ import { GET } from '@fastgpt/service/common/api/plusRequest';
export async function getSimpleTemplatesFromPlus(): Promise<AppSimpleEditConfigTemplateType[]> {
try {
if (!global.systemEnv.pluginBaseUrl) return [];
if (!global.systemEnv?.pluginBaseUrl) return [];
return GET<AppSimpleEditConfigTemplateType[]>('/core/app/getSimpleTemplates');
} catch (error) {

View File

@@ -112,7 +112,7 @@ export async function searchDatasetData(props: SearchProps) {
limit: maxTokens,
searchMode = DatasetSearchModeEnum.embedding
} = props;
searchMode = global.systemEnv.pluginBaseUrl ? searchMode : DatasetSearchModeEnum.embedding;
searchMode = global.systemEnv?.pluginBaseUrl ? searchMode : DatasetSearchModeEnum.embedding;
// Compatible with topk limit
if (maxTokens < 50) {

View File

@@ -10,11 +10,12 @@ import { replaceVariable } from '@fastgpt/global/common/string/tools';
import { Prompt_CQJson } from '@/global/core/prompt/agent';
import { FunctionModelItemType } from '@fastgpt/global/core/ai/model.d';
import { getCQModel } from '@/service/core/ai/model';
import { getHistories } from '../utils';
type Props = ModuleDispatchProps<{
[ModuleInputKeyEnum.aiModel]: string;
[ModuleInputKeyEnum.aiSystemPrompt]?: string;
[ModuleInputKeyEnum.history]?: ChatItemType[];
[ModuleInputKeyEnum.history]?: ChatItemType[] | number;
[ModuleInputKeyEnum.userChatInput]: string;
[ModuleInputKeyEnum.agents]: ClassifyQuestionAgentItemType[];
}>;
@@ -23,13 +24,14 @@ type CQResponse = {
[key: string]: any;
};
const agentFunName = 'agent_user_question';
const agentFunName = 'classify_question';
/* request openai chat */
export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse> => {
const {
user,
inputs: { model, agents, userChatInput }
histories,
inputs: { model, history = 6, agents, userChatInput }
} = props as Props;
if (!userChatInput) {
@@ -42,11 +44,13 @@ export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse
if (cqModel.functionCall) {
return functionCall({
...props,
histories: getHistories(history, histories),
cqModel
});
}
return completions({
...props,
histories: getHistories(history, histories),
cqModel
});
})();
@@ -54,7 +58,7 @@ export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse
const result = agents.find((item) => item.key === arg?.type) || agents[agents.length - 1];
return {
[result.key]: 1,
[result.key]: result.value,
[ModuleOutputKeyEnum.responseData]: {
price: user.openaiAccount?.key ? 0 : cqModel.price * tokens,
model: cqModel.name || '',
@@ -69,18 +73,19 @@ export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse
async function functionCall({
user,
cqModel,
inputs: { agents, systemPrompt, history = [], userChatInput }
histories,
inputs: { agents, systemPrompt, userChatInput }
}: Props & { cqModel: FunctionModelItemType }) {
const messages: ChatItemType[] = [
...history,
...histories,
{
obj: ChatRoleEnum.Human,
value: systemPrompt
? `补充的背景知识:
"""
? `<背景知识>
${systemPrompt}
"""
我的问题: ${userChatInput}
</背景知识>
问题: "${userChatInput}"
`
: userChatInput
}
@@ -95,18 +100,19 @@ ${systemPrompt}
// function body
const agentFunction = {
name: agentFunName,
description: '根据对话记录及补充的背景知识,判断用户的问题类型,并返回对应的字段',
description: '根据对话记录及补充的背景知识,对问题进行分类,并返回对应的类型字段',
parameters: {
type: 'object',
properties: {
type: {
type: 'string',
description: `判断用户的问题类型,并返回对应的字段。下面是几种问题类型: ${agents
description: `问题类型。下面是几种可选的问题类型: ${agents
.map((item) => `${item.value},返回:'${item.key}'`)
.join('')}`,
enum: agents.map((item) => item.key)
}
}
},
required: ['type']
}
};
const ai = getAIApi(user.openaiAccount, 48000);
@@ -115,12 +121,19 @@ ${systemPrompt}
model: cqModel.model,
temperature: 0,
messages: [...adaptMessages],
function_call: { name: agentFunName },
functions: [agentFunction]
tools: [
{
type: 'function',
function: agentFunction
}
],
tool_choice: { type: 'function', function: { name: agentFunName } }
});
try {
const arg = JSON.parse(response.choices?.[0]?.message?.function_call?.arguments || '');
const arg = JSON.parse(
response?.choices?.[0]?.message?.tool_calls?.[0]?.function?.arguments || ''
);
return {
arg,
@@ -130,7 +143,7 @@ ${systemPrompt}
console.log(agentFunction.parameters);
console.log(response.choices?.[0]?.message);
console.log('Your model may not support function_call', error);
console.log('Your model may not support toll_call', error);
return {
arg: {},
@@ -142,15 +155,16 @@ ${systemPrompt}
async function completions({
cqModel,
user,
inputs: { agents, systemPrompt = '', history = [], userChatInput }
histories,
inputs: { agents, systemPrompt = '', userChatInput }
}: Props & { cqModel: FunctionModelItemType }) {
const messages: ChatItemType[] = [
{
obj: ChatRoleEnum.Human,
value: replaceVariable(cqModel.functionPrompt || Prompt_CQJson, {
systemPrompt,
typeList: agents.map((item) => `ID: "${item.key}", 问题类型:${item.value}`).join('\n'),
text: `${history.map((item) => `${item.obj}:${item.value}`).join('\n')}
typeList: agents.map((item) => `{"${item.value}": ${item.key}}`).join('\n'),
text: `${histories.map((item) => `${item.obj}:${item.value}`).join('\n')}
Human:${userChatInput}`
})
}

View File

@@ -9,6 +9,7 @@ import type { ModuleDispatchProps } from '@/types/core/chat/type';
import { Prompt_ExtractJson } from '@/global/core/prompt/agent';
import { replaceVariable } from '@fastgpt/global/common/string/tools';
import { FunctionModelItemType } from '@fastgpt/global/core/ai/model.d';
import { getHistories } from '../utils';
type Props = ModuleDispatchProps<{
[ModuleInputKeyEnum.history]?: ChatItemType[];
@@ -23,12 +24,13 @@ type Response = {
[ModuleOutputKeyEnum.responseData]: moduleDispatchResType;
};
const agentFunName = 'agent_extract_data';
const agentFunName = 'extract_json_data';
export async function dispatchContentExtract(props: Props): Promise<Response> {
const {
user,
inputs: { content, description, extractKeys }
histories,
inputs: { content, history = 6, description, extractKeys }
} = props;
if (!content) {
@@ -41,11 +43,13 @@ export async function dispatchContentExtract(props: Props): Promise<Response> {
if (extractModel.functionCall) {
return functionCall({
...props,
histories: getHistories(history, histories),
extractModel
});
}
return completions({
...props,
histories: getHistories(history, histories),
extractModel
});
})();
@@ -88,13 +92,24 @@ export async function dispatchContentExtract(props: Props): Promise<Response> {
async function functionCall({
extractModel,
user,
inputs: { history = [], content, extractKeys, description }
histories,
inputs: { content, extractKeys, description }
}: Props & { extractModel: FunctionModelItemType }) {
const messages: ChatItemType[] = [
...history,
...histories,
{
obj: ChatRoleEnum.Human,
value: content
value: `<任务描述>
${description || '根据用户要求提取适当的 JSON 字符串。'}
- 如果字段为空,你返回空字符串。
- 不要换行。
- 结合历史记录和文本进行提取。
</任务描述>
<文本>
${content}
</文本>`
}
];
const filterMessages = ChatContextFilter({
@@ -120,7 +135,7 @@ async function functionCall({
// function body
const agentFunction = {
name: agentFunName,
description: `${description}\n如果内容不存在返回空字符串。`,
description,
parameters: {
type: 'object',
properties,
@@ -134,17 +149,24 @@ async function functionCall({
model: extractModel.model,
temperature: 0,
messages: [...adaptMessages],
function_call: { name: agentFunName },
functions: [agentFunction]
tools: [
{
type: 'function',
function: agentFunction
}
],
tool_choice: { type: 'function', function: { name: agentFunName } }
});
const arg: Record<string, any> = (() => {
try {
return JSON.parse(response.choices?.[0]?.message?.function_call?.arguments || '{}');
return JSON.parse(
response?.choices?.[0]?.message?.tool_calls?.[0]?.function?.arguments || '{}'
);
} catch (error) {
console.log(agentFunction.parameters);
console.log(response.choices?.[0]?.message);
console.log('Your model may not support function_call', error);
console.log(response.choices?.[0]?.message?.tool_calls?.[0]?.function);
console.log('Your model may not support tool_call', error);
return {};
}
})();
@@ -159,7 +181,8 @@ async function functionCall({
async function completions({
extractModel,
user,
inputs: { history = [], content, extractKeys, description }
histories,
inputs: { content, extractKeys, description }
}: Props & { extractModel: FunctionModelItemType }) {
const messages: ChatItemType[] = [
{
@@ -169,12 +192,10 @@ async function completions({
json: extractKeys
.map(
(item) =>
`key="${item.key}",描述="${item.desc}"required="${
item.required ? 'true' : 'false'
}"`
`{"key":"${item.key}", "description":"${item.required}", "required":${item.required}}}`
)
.join('\n'),
text: `${history.map((item) => `${item.obj}:${item.value}`).join('\n')}
text: `${histories.map((item) => `${item.obj}:${item.value}`).join('\n')}
Human: ${content}`
})
}

View File

@@ -22,11 +22,12 @@ import { getChatModel, ModelTypeEnum } from '@/service/core/ai/model';
import type { SearchDataResponseItemType } from '@fastgpt/global/core/dataset/type';
import { formatStr2ChatContent } from '@fastgpt/service/core/chat/utils';
import { ModuleInputKeyEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
import { getHistories } from '../utils';
export type ChatProps = ModuleDispatchProps<
AIChatModuleProps & {
[ModuleInputKeyEnum.userChatInput]: string;
[ModuleInputKeyEnum.history]?: ChatItemType[];
[ModuleInputKeyEnum.history]?: ChatItemType[] | number;
[ModuleInputKeyEnum.aiChatDatasetQuote]?: SearchDataResponseItemType[];
}
>;
@@ -43,12 +44,13 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
stream = false,
detail = false,
user,
histories,
outputs,
inputs: {
model,
temperature = 0,
maxToken = 4000,
history = [],
history = 6,
quoteQA = [],
userChatInput,
isResponseAnswerText = true,
@@ -63,6 +65,8 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
stream = stream && isResponseAnswerText;
const chatHistories = getHistories(history, histories);
// temperature adapt
const modelConstantsData = getChatModel(model);
@@ -88,7 +92,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
const { messages, filterMessages } = getChatMessages({
model: modelConstantsData,
history,
histories: chatHistories,
quoteText,
quotePrompt,
userChatInput,
@@ -265,14 +269,14 @@ function filterQuote({
function getChatMessages({
quotePrompt,
quoteText,
history = [],
histories = [],
systemPrompt,
userChatInput,
model
}: {
quotePrompt?: string;
quoteText: string;
history: ChatProps['inputs']['history'];
histories: ChatItemType[];
systemPrompt: string;
userChatInput: string;
model: ChatModelItemType;
@@ -293,7 +297,7 @@ function getChatMessages({
}
]
: []),
...history,
...histories,
{
obj: ChatRoleEnum.Human,
value: question
@@ -319,7 +323,7 @@ function getMaxTokens({
}: {
maxToken: number;
model: ChatModelItemType;
filterMessages: ChatProps['inputs']['history'];
filterMessages: ChatItemType[];
}) {
const tokensLimit = model.maxContext;

View File

@@ -3,8 +3,8 @@ import { ModuleInputKeyEnum } from '@fastgpt/global/core/module/constants';
import { ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
import { RunningModuleItemType } from '@/types/app';
import { ModuleDispatchProps } from '@/types/core/chat/type';
import type { ChatHistoryItemResType } from '@fastgpt/global/core/chat/type.d';
import { FlowNodeTypeEnum } from '@fastgpt/global/core/module/node/constant';
import type { ChatHistoryItemResType, ChatItemType } from '@fastgpt/global/core/chat/type.d';
import { FlowNodeInputTypeEnum, FlowNodeTypeEnum } from '@fastgpt/global/core/module/node/constant';
import { ModuleItemType } from '@fastgpt/global/core/module/type';
import { UserType } from '@fastgpt/global/support/user/type';
import { replaceVariable } from '@fastgpt/global/common/string/tools';
@@ -25,7 +25,6 @@ import { dispatchAppRequest } from './tools/runApp';
import { dispatchRunPlugin } from './plugin/run';
import { dispatchPluginInput } from './plugin/runInput';
import { dispatchPluginOutput } from './plugin/runOutput';
import { AuthUserTypeEnum } from '@fastgpt/global/support/permission/constant';
/* running */
export async function dispatchModules({
@@ -36,7 +35,8 @@ export async function dispatchModules({
appId,
modules,
chatId,
params = {},
histories = [],
startParams = {},
variables = {},
stream = false,
detail = false
@@ -48,7 +48,8 @@ export async function dispatchModules({
appId: string;
modules: ModuleItemType[];
chatId?: string;
params?: Record<string, any>;
histories: ChatItemType[];
startParams?: Record<string, any>;
variables?: Record<string, any>;
stream?: boolean;
detail?: boolean;
@@ -185,6 +186,7 @@ export async function dispatchModules({
stream,
detail,
variables,
histories,
outputs: module.outputs,
inputs: params
};
@@ -230,7 +232,12 @@ export async function dispatchModules({
// start process width initInput
const initModules = runningModules.filter((item) => initRunningModuleType[item.flowType]);
initModules.map((module) => moduleInput(module, params));
initModules.map((module) =>
moduleInput(module, {
...startParams,
history: [] // abandon history field. History module will get histories from other fields.
})
);
await checkModulesCanRun(initModules);
// focus try to run pluginOutput
@@ -252,45 +259,54 @@ function loadModules(
modules: ModuleItemType[],
variables: Record<string, any>
): RunningModuleItemType[] {
return modules.map((module) => {
return {
moduleId: module.moduleId,
name: module.name,
flowType: module.flowType,
showStatus: module.showStatus,
inputs: module.inputs
.filter((item) => item.connected || item.value !== undefined) // filter unconnected target input
.map((item) => {
if (typeof item.value !== 'string') {
return modules
.filter((item) => {
return ![FlowNodeTypeEnum.userGuide].includes(item.moduleId as any);
})
.map((module) => {
return {
moduleId: module.moduleId,
name: module.name,
flowType: module.flowType,
showStatus: module.showStatus,
inputs: module.inputs
.filter(
(item) =>
item.type === FlowNodeInputTypeEnum.systemInput ||
item.connected ||
item.value !== undefined
) // filter unconnected target input
.map((item) => {
if (typeof item.value !== 'string') {
return {
key: item.key,
value: item.value
};
}
// variables replace
const replacedVal = replaceVariable(item.value, variables);
return {
key: item.key,
value: item.value
value: replacedVal
};
}
// variables replace
const replacedVal = replaceVariable(item.value, variables);
return {
}),
outputs: module.outputs
.map((item) => ({
key: item.key,
value: replacedVal
};
}),
outputs: module.outputs
.map((item) => ({
key: item.key,
answer: item.key === ModuleOutputKeyEnum.answerText,
value: undefined,
targets: item.targets
}))
.sort((a, b) => {
// finish output always at last
if (a.key === ModuleOutputKeyEnum.finish) return 1;
if (b.key === ModuleOutputKeyEnum.finish) return -1;
return 0;
})
};
});
answer: item.key === ModuleOutputKeyEnum.answerText,
value: undefined,
targets: item.targets
}))
.sort((a, b) => {
// finish output always at last
if (a.key === ModuleOutputKeyEnum.finish) return 1;
if (b.key === ModuleOutputKeyEnum.finish) return -1;
return 0;
})
};
});
}
/* sse response modules staus */

View File

@@ -1,17 +1,19 @@
import { ModuleInputKeyEnum } from '@fastgpt/global/core/module/constants';
import type { ChatItemType } from '@fastgpt/global/core/chat/type.d';
import type { ModuleDispatchProps } from '@/types/core/chat/type';
import { getHistories } from '../utils';
export type HistoryProps = ModuleDispatchProps<{
maxContext: number;
maxContext?: number;
[ModuleInputKeyEnum.history]: ChatItemType[];
}>;
export const dispatchHistory = (props: Record<string, any>) => {
const {
inputs: { maxContext = 5, history = [] }
histories,
inputs: { maxContext }
} = props as HistoryProps;
return {
history: maxContext > 0 ? history.slice(-maxContext) : []
history: getHistories(maxContext, histories)
};
};

View File

@@ -35,7 +35,7 @@ export const dispatchRunPlugin = async (props: RunPluginProps): Promise<RunPlugi
...module,
showStatus: false
})),
params: data
startParams: data
});
const output = responseData.find((item) => item.moduleType === FlowNodeTypeEnum.pluginOutput);

View File

@@ -56,8 +56,8 @@ export const dispatchAppRequest = async (props: Props): Promise<Response> => {
...props,
appId: app.id,
modules: appData.modules,
params: {
history,
histories: history,
startParams: {
userChatInput
}
});

View File

@@ -0,0 +1,9 @@
import type { ChatItemType } from '@fastgpt/global/core/chat/type.d';
export const getHistories = (history?: ChatItemType[] | number, histories: ChatItemType[] = []) => {
if (!history) return [];
if (typeof history === 'number') return histories.slice(-history);
if (Array.isArray(history)) return history;
return [];
};

View File

@@ -1,7 +1,7 @@
import { GET } from '@fastgpt/service/common/api/plusRequest';
export const authTeamBalance = async (teamId: string) => {
if (global.systemEnv.pluginBaseUrl) {
if (global.systemEnv?.pluginBaseUrl) {
return GET('/support/permission/authBalance', { teamId });
}
return true;

View File

@@ -11,12 +11,14 @@ import { TeamMemberRoleEnum } from '@fastgpt/global/support/user/team/constant';
token: team owner and chat owner have all permissions
*/
export async function autChatCrud({
appId,
chatId,
shareId,
outLinkUid,
per = 'owner',
...props
}: AuthModeType & {
appId: string;
chatId?: string;
shareId?: string;
outLinkUid?: string;
@@ -28,7 +30,7 @@ export async function autChatCrud({
const isOutLink = Boolean(shareId && outLinkUid);
if (!chatId) return { isOutLink, uid: outLinkUid };
const chat = await MongoChat.findOne({ chatId }).lean();
const chat = await MongoChat.findOne({ appId, chatId }).lean();
if (!chat) return { isOutLink, uid: outLinkUid };

View File

@@ -2,6 +2,6 @@ import { POST } from '@fastgpt/service/common/api/plusRequest';
import { SendInformProps } from '@fastgpt/global/support/user/inform/type';
export function sendOneInform(data: SendInformProps) {
if (!global.systemEnv.pluginBaseUrl) return;
if (!global.systemEnv?.pluginBaseUrl) return;
return POST('/support/user/inform/create', data);
}

View File

@@ -8,7 +8,7 @@ import { defaultQGModels } from '@fastgpt/global/core/ai/model';
import { POST } from '@fastgpt/service/common/api/plusRequest';
export function createBill(data: CreateBillProps) {
if (!global.systemEnv.pluginBaseUrl) return;
if (!global.systemEnv?.pluginBaseUrl) return;
if (data.total === 0) {
addLog.info('0 Bill', data);
}
@@ -17,7 +17,7 @@ export function createBill(data: CreateBillProps) {
} catch (error) {}
}
export function concatBill(data: ConcatBillProps) {
if (!global.systemEnv.pluginBaseUrl) return;
if (!global.systemEnv?.pluginBaseUrl) return;
if (data.total === 0) {
addLog.info('0 Bill', data);
}

View File

@@ -2,6 +2,7 @@ import type { NextApiResponse } from 'next';
import { RunningModuleItemType } from '@/types/app';
import type { UserType } from '@fastgpt/global/support/user/type';
import { AuthUserTypeEnum } from '@fastgpt/global/support/permission/constant';
import { ChatItemType } from '@fastgpt/global/core/chat/type';
// module dispatch props type
export type ModuleDispatchProps<T> = {
@@ -14,6 +15,7 @@ export type ModuleDispatchProps<T> = {
stream: boolean;
detail: boolean; // response detail
variables: Record<string, any>;
histories: ChatItemType[];
outputs: RunningModuleItemType['outputs'];
inputs: T;
};

View File

@@ -12,9 +12,10 @@ export const useSelectFile = (props?: {
const { fileType = '*', multiple = false, maxCount = 10 } = props || {};
const { toast } = useToast();
const SelectFileDom = useRef<HTMLInputElement>(null);
const openSign = useRef<any>();
const File = useCallback(
({ onSelect }: { onSelect: (e: File[]) => void }) => (
({ onSelect }: { onSelect: (e: File[], sign?: any) => void }) => (
<Box position={'absolute'} w={0} h={0} overflow={'hidden'}>
<input
ref={SelectFileDom}
@@ -29,7 +30,7 @@ export const useSelectFile = (props?: {
title: t('file.Select a maximum of 10 files')
});
}
onSelect(Array.from(e.target.files));
onSelect(Array.from(e.target.files), openSign.current);
}}
/>
</Box>
@@ -37,7 +38,8 @@ export const useSelectFile = (props?: {
[fileType, maxCount, multiple]
);
const onOpen = useCallback(() => {
const onOpen = useCallback((sign?: any) => {
openSign.current = sign;
SelectFileDom.current && SelectFileDom.current.click();
}, []);

View File

@@ -233,10 +233,10 @@ export const fileDownload = ({
};
export const fileToBase64 = (file: File) => {
return new Promise((resolve, reject) => {
return new Promise<string>((resolve, reject) => {
const reader = new FileReader();
reader.readAsDataURL(file);
reader.onload = () => resolve(reader.result);
reader.onload = () => resolve(reader.result as string);
reader.onerror = (error) => reject(error);
});
};

View File

@@ -27,7 +27,6 @@ export const appTemplates: (AppItemType & {
key: 'welcomeText',
type: 'input',
label: '开场白',
value: '',
connected: true
}
],
@@ -64,46 +63,6 @@ export const appTemplates: (AppItemType & {
}
]
},
{
moduleId: 'history',
name: '聊天记录',
flowType: 'historyNode',
position: {
x: 452.5466249541586,
y: 1276.3930310334215
},
inputs: [
{
key: 'maxContext',
type: 'numberInput',
label: '最长记录数',
value: 6,
min: 0,
max: 50,
connected: true
},
{
key: 'history',
type: 'hidden',
label: '聊天记录',
connected: true
}
],
outputs: [
{
key: 'history',
label: '聊天记录',
valueType: 'chatHistory',
type: 'source',
targets: [
{
moduleId: 'chatModule',
key: 'history'
}
]
}
]
},
{
moduleId: 'chatModule',
name: 'AI 对话',
@@ -171,7 +130,6 @@ export const appTemplates: (AppItemType & {
'模型固定的引导词,通过调整该内容,可以引导模型聊天方向。该内容会被固定在上下文的开头。可使用变量,例如 {{language}}',
placeholder:
'模型固定的引导词,通过调整该内容,可以引导模型聊天方向。该内容会被固定在上下文的开头。可使用变量,例如 {{language}}',
value: '',
connected: true
},
{
@@ -193,7 +151,8 @@ export const appTemplates: (AppItemType & {
type: 'target',
label: '聊天记录',
valueType: 'chatHistory',
connected: true
connected: true,
value: 6
},
{
key: 'userChatInput',
@@ -285,46 +244,6 @@ export const appTemplates: (AppItemType & {
}
]
},
{
moduleId: 'history',
name: '聊天记录',
flowType: 'historyNode',
position: {
x: 452.5466249541586,
y: 1276.3930310334215
},
inputs: [
{
key: 'maxContext',
type: 'numberInput',
label: '最长记录数',
value: 6,
min: 0,
max: 50,
connected: true
},
{
key: 'history',
type: 'hidden',
label: '聊天记录',
connected: true
}
],
outputs: [
{
key: 'history',
label: '聊天记录',
valueType: 'chatHistory',
type: 'source',
targets: [
{
moduleId: 'chatModule',
key: 'history'
}
]
}
]
},
{
moduleId: 'datasetSearch',
name: '知识库搜索',
@@ -495,7 +414,6 @@ export const appTemplates: (AppItemType & {
'模型固定的引导词,通过调整该内容,可以引导模型聊天方向。该内容会被固定在上下文的开头。可使用变量,例如 {{language}}',
placeholder:
'模型固定的引导词,通过调整该内容,可以引导模型聊天方向。该内容会被固定在上下文的开头。可使用变量,例如 {{language}}',
value: '',
connected: true
},
{
@@ -517,7 +435,8 @@ export const appTemplates: (AppItemType & {
type: 'target',
label: '聊天记录',
valueType: 'chatHistory',
connected: true
connected: true,
value: 6
},
{
key: 'userChatInput',
@@ -678,46 +597,6 @@ export const appTemplates: (AppItemType & {
}
]
},
{
moduleId: 'history',
name: '聊天记录',
flowType: 'historyNode',
position: {
x: 452.5466249541586,
y: 1276.3930310334215
},
inputs: [
{
key: 'maxContext',
type: 'numberInput',
label: '最长记录数',
value: 2,
min: 0,
max: 50,
connected: true
},
{
key: 'history',
type: 'hidden',
label: '聊天记录',
connected: true
}
],
outputs: [
{
key: 'history',
label: '聊天记录',
valueType: 'chatHistory',
type: 'source',
targets: [
{
moduleId: 'chatModule',
key: 'history'
}
]
}
]
},
{
moduleId: 'chatModule',
name: 'AI 对话',
@@ -793,7 +672,6 @@ export const appTemplates: (AppItemType & {
type: 'hidden',
label: '引用内容模板',
valueType: 'string',
value: '',
connected: true
},
{
@@ -801,7 +679,6 @@ export const appTemplates: (AppItemType & {
type: 'hidden',
label: '引用内容提示词',
valueType: 'string',
value: '',
connected: true
},
{
@@ -824,7 +701,8 @@ export const appTemplates: (AppItemType & {
type: 'target',
label: '聊天记录',
valueType: 'chatHistory',
connected: true
connected: true,
value: 6
},
{
key: 'userChatInput',
@@ -906,46 +784,6 @@ export const appTemplates: (AppItemType & {
}
]
},
{
moduleId: 'xj0c9p',
name: '聊天记录',
flowType: 'historyNode',
position: {
x: 1770.497690708367,
y: 1820.2355054321215
},
inputs: [
{
key: 'maxContext',
type: 'numberInput',
label: '最长记录数',
value: 6,
min: 0,
max: 50,
connected: true
},
{
key: 'history',
type: 'hidden',
label: '聊天记录',
connected: true
}
],
outputs: [
{
key: 'history',
label: '聊天记录',
valueType: 'chatHistory',
type: 'source',
targets: [
{
moduleId: 'nlfwkc',
key: 'history'
}
]
}
]
},
{
moduleId: 'remuj3',
name: '问题分类',
@@ -980,7 +818,8 @@ export const appTemplates: (AppItemType & {
type: 'target',
label: '聊天记录',
valueType: 'chatHistory',
connected: true
connected: true,
value: 6
},
{
key: 'userChatInput',
@@ -997,15 +836,15 @@ export const appTemplates: (AppItemType & {
value: [
{
value: '打招呼、问候等问题',
key: 'fasw'
key: 'wqre'
},
{
value: '“laf” 的问题',
key: 'fqsw'
value: '关于 xxx 的问题',
key: 'sdfa'
},
{
value: '商务问题',
key: 'fesw'
key: 'agex'
},
{
value: '其他问题',
@@ -1017,7 +856,7 @@ export const appTemplates: (AppItemType & {
],
outputs: [
{
key: 'fasw',
key: 'wqre',
label: '',
type: 'hidden',
targets: [
@@ -1028,7 +867,7 @@ export const appTemplates: (AppItemType & {
]
},
{
key: 'fqsw',
key: 'sdfa',
label: '',
type: 'hidden',
targets: [
@@ -1039,7 +878,7 @@ export const appTemplates: (AppItemType & {
]
},
{
key: 'fesw',
key: 'agex',
label: '',
type: 'hidden',
targets: [
@@ -1255,7 +1094,6 @@ export const appTemplates: (AppItemType & {
type: 'hidden',
label: '引用内容模板',
valueType: 'string',
value: '',
connected: true
},
{
@@ -1263,7 +1101,6 @@ export const appTemplates: (AppItemType & {
type: 'hidden',
label: '引用内容提示词',
valueType: 'string',
value: '',
connected: true
},
{
@@ -1286,7 +1123,8 @@ export const appTemplates: (AppItemType & {
type: 'target',
label: '聊天记录',
valueType: 'chatHistory',
connected: true
connected: true,
value: 6
},
{
key: 'userChatInput',
@@ -1324,46 +1162,6 @@ export const appTemplates: (AppItemType & {
}
]
},
{
moduleId: 's4v9su',
name: '聊天记录',
flowType: 'historyNode',
position: {
x: 193.3803955457983,
y: 1316.251200765746
},
inputs: [
{
key: 'maxContext',
type: 'numberInput',
label: '最长记录数',
value: 2,
min: 0,
max: 50,
connected: true
},
{
key: 'history',
type: 'hidden',
label: '聊天记录',
connected: true
}
],
outputs: [
{
key: 'history',
label: '聊天记录',
valueType: 'chatHistory',
type: 'source',
targets: [
{
moduleId: 'remuj3',
key: 'history'
}
]
}
]
},
{
moduleId: 'fljhzy',
name: '知识库搜索',

View File

@@ -21,7 +21,6 @@ import { ModuleTemplateTypeEnum } from '@fastgpt/global/core/module/constants';
export const appSystemModuleTemplates: FlowModuleTemplateType[] = [
UserGuideModule,
UserInputModule,
HistoryModule,
AiChatModule,
AssignedAnswerModule,
DatasetSearchModule,
@@ -33,7 +32,6 @@ export const appSystemModuleTemplates: FlowModuleTemplateType[] = [
export const pluginSystemModuleTemplates: FlowModuleTemplateType[] = [
PluginInputModule,
PluginOutputModule,
HistoryModule,
AiChatModule,
AssignedAnswerModule,
DatasetSearchModule,