fix: variable input and update chat time

This commit is contained in:
archer
2023-08-31 18:09:12 +08:00
parent 3420f677b6
commit b22c878cf9
15 changed files with 30 additions and 48 deletions

View File

@@ -104,7 +104,6 @@ export async function pushDataToKb({
// count q token
const token = modelToolMap.countTokens({
model: 'gpt-3.5-turbo',
messages: [{ obj: 'System', value: item.q }]
});

View File

@@ -69,7 +69,7 @@ export async function getVector({
.then(async (res) => {
if (!res.data?.data?.[0]?.embedding) {
// @ts-ignore
return Promise.reject(res.data?.error?.message || 'Embedding Error');
return Promise.reject(res.data?.error?.message || 'Embedding API Error');
}
return {
tokenLen: res.data.usage.total_tokens || 0,

View File

@@ -4,13 +4,10 @@ import { jsonRes } from '@/service/response';
import { authUser } from '@/service/utils/auth';
import type { ChatItemType } from '@/types/chat';
import { countOpenAIToken } from '@/utils/plugin/openai';
import { OpenAiChatEnum } from '@/constants/model';
type ModelType = `${OpenAiChatEnum}`;
type Props = {
messages: ChatItemType[];
model: ModelType;
model: string;
maxLen: number;
};
type Response = ChatItemType[];
@@ -28,7 +25,6 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
return jsonRes<Response>(res, {
data: gpt_chatItemTokenSlice({
messages,
model,
maxToken: maxLen
})
});
@@ -42,11 +38,9 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
export function gpt_chatItemTokenSlice({
messages,
model = 'gpt-3.5-turbo',
maxToken
}: {
messages: ChatItemType[];
model?: string;
maxToken: number;
}) {
let result: ChatItemType[] = [];
@@ -54,7 +48,7 @@ export function gpt_chatItemTokenSlice({
for (let i = 0; i < messages.length; i++) {
const msgs = [...result, messages[i]];
const tokens = countOpenAIToken({ messages: msgs, model });
const tokens = countOpenAIToken({ messages: msgs });
if (tokens < maxToken) {
result = msgs;

View File

@@ -35,7 +35,6 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
// token check
const token = modelToolMap.countTokens({
model: 'gpt-3.5-turbo',
messages: [{ obj: 'System', value: q }]
});

View File

@@ -10,7 +10,7 @@ import type { VariableItemType } from '@/types/app';
import MyIcon from '@/components/Icon';
import { customAlphabet } from 'nanoid';
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 6);
import VariableEditModal from '../../../VariableEditModal';
import VariableEditModal, { addVariable } from '../../../VariableEditModal';
export const defaultVariable: VariableItemType = {
id: nanoid(),
@@ -105,7 +105,7 @@ const NodeUserGuide = ({ data }: NodeProps<FlowModuleItemType>) => {
variant={'base'}
leftIcon={<AddIcon fontSize={'10px'} />}
onClick={() => {
const newVariable = { ...defaultVariable, id: nanoid() };
const newVariable = addVariable();
updateVariables(variables.concat(newVariable));
setEditVariable(newVariable);
}}

View File

@@ -532,6 +532,13 @@ const Settings = ({ appId }: { appId: string }) => {
variables.map((item) => (item.id === variable.id ? variable : item))
);
} else {
// auth same key
if (variables.find((item) => item.key === variable.key)) {
return toast({
status: 'warning',
title: t('app.Variable Key Repeat Tip')
});
}
appendVariable(variable);
}

View File

@@ -204,6 +204,6 @@ export const defaultVariable: VariableItemType = {
enums: [{ value: '' }]
};
export const addVariable = () => {
const newVariable = { ...defaultVariable, id: nanoid() };
const newVariable = { ...defaultVariable, key: nanoid(), id: nanoid() };
return newVariable;
};