perf: qa default value

This commit is contained in:
archer
2023-09-07 17:30:04 +08:00
parent 1b7f87752a
commit 971c9cb291
6 changed files with 37 additions and 10 deletions

View File

@@ -48,7 +48,7 @@ FastGPT 是一个基于 LLM 大语言模型的知识库问答系统,提供开
- [x] 支持 url 读取、 CSV 批量导入 - [x] 支持 url 读取、 CSV 批量导入
- [x] 支持知识库单独设置向量模型 - [x] 支持知识库单独设置向量模型
- [x] 源文件存储 - [x] 源文件存储
- [x] 文件学习 Agent - [ ] 文件学习 Agent
3. 多种效果测试渠道 3. 多种效果测试渠道
- [x] 知识库单点搜索测试 - [x] 知识库单点搜索测试
- [x] 对话时反馈引用并可修改与删除 - [x] 对话时反馈引用并可修改与删除

View File

@@ -68,6 +68,7 @@ export async function getVector({
) )
.then(async (res) => { .then(async (res) => {
if (!res.data?.data?.[0]?.embedding) { if (!res.data?.data?.[0]?.embedding) {
console.log(res.data);
// @ts-ignore // @ts-ignore
return Promise.reject(res.data?.error?.message || 'Embedding API Error'); return Promise.reject(res.data?.error?.message || 'Embedding API Error');
} }

View File

@@ -20,7 +20,7 @@ const CreateFileModal = ({
}); });
return ( return (
<MyModal title={t('file.Create File')} isOpen onClose={onClose} w={'600px'} top={'15vh'}> <MyModal title={t('file.Create File')} isOpen onClose={() => {}} w={'600px'} top={'15vh'}>
<ModalBody> <ModalBody>
<Box mb={1} fontSize={'sm'}> <Box mb={1} fontSize={'sm'}>

View File

@@ -10,6 +10,7 @@ import { ChatCompletionRequestMessage } from 'openai';
import { modelToolMap } from '@/utils/plugin'; import { modelToolMap } from '@/utils/plugin';
import { gptMessage2ChatType } from '@/utils/adapt'; import { gptMessage2ChatType } from '@/utils/adapt';
import { addLog } from '../utils/tools'; import { addLog } from '../utils/tools';
import { splitText2Chunks } from '@/utils/file';
const reduceQueue = () => { const reduceQueue = () => {
global.qaQueueLen = global.qaQueueLen > 0 ? global.qaQueueLen - 1 : 0; global.qaQueueLen = global.qaQueueLen > 0 ? global.qaQueueLen - 1 : 0;
@@ -212,5 +213,16 @@ function formatSplitText(text: string) {
} }
} }
// empty result. direct split chunk
if (result.length === 0) {
const splitRes = splitText2Chunks({ text: text, maxLen: 500 });
splitRes.chunks.forEach((item) => {
result.push({
q: item,
a: ''
});
});
}
return result; return result;
} }

View File

@@ -44,7 +44,7 @@ export const jsonRes = <T = any>(
if (typeof error === 'string') { if (typeof error === 'string') {
msg = error; msg = error;
} else if (proxyError[error?.code]) { } else if (proxyError[error?.code]) {
msg = '接口连接异常'; msg = '网络连接异常';
} else if (error?.response?.data?.error?.message) { } else if (error?.response?.data?.error?.message) {
msg = error?.response?.data?.error?.message; msg = error?.response?.data?.error?.message;
} else if (openaiAccountError[error?.response?.data?.error?.code]) { } else if (openaiAccountError[error?.response?.data?.error?.code]) {
@@ -85,7 +85,7 @@ export const sseErrRes = (res: NextApiResponse, error: any) => {
if (typeof error === 'string') { if (typeof error === 'string') {
msg = error; msg = error;
} else if (proxyError[error?.code]) { } else if (proxyError[error?.code]) {
msg = '接口连接异常'; msg = '网络连接异常';
} else if (error?.response?.data?.error?.message) { } else if (error?.response?.data?.error?.message) {
msg = error?.response?.data?.error?.message; msg = error?.response?.data?.error?.message;
} else if (openaiAccountError[error?.response?.data?.error?.code]) { } else if (openaiAccountError[error?.response?.data?.error?.code]) {

View File

@@ -51,9 +51,18 @@ export function countOpenAIToken({ messages }: { messages: ChatItemType[] }) {
const adaptMessages = adaptChatItem_openAI({ messages, reserveId: true }); const adaptMessages = adaptChatItem_openAI({ messages, reserveId: true });
const token = adaptMessages.reduce((sum, item) => { const token = adaptMessages.reduce((sum, item) => {
const text = `${item.role}\n${item.content}`; const text = `${item.role}\n${item.content}`;
const enc = getOpenAiEncMap();
const encodeText = enc.encode(text); /* use textLen as tokens if encode error */
const tokens = encodeText.length + 3; // 补充估算值 const tokens = (() => {
try {
const enc = getOpenAiEncMap();
const encodeText = enc.encode(text);
return encodeText.length + 3; // 补充估算值
} catch (error) {
return text.length;
}
})();
return sum + tokens; return sum + tokens;
}, 0); }, 0);
@@ -62,9 +71,14 @@ export function countOpenAIToken({ messages }: { messages: ChatItemType[] }) {
export const openAiSliceTextByToken = ({ text, length }: { text: string; length: number }) => { export const openAiSliceTextByToken = ({ text, length }: { text: string; length: number }) => {
const enc = getOpenAiEncMap(); const enc = getOpenAiEncMap();
const encodeText = enc.encode(text);
const decoder = new TextDecoder(); try {
return decoder.decode(enc.decode(encodeText.slice(0, length))); const encodeText = enc.encode(text);
const decoder = new TextDecoder();
return decoder.decode(enc.decode(encodeText.slice(0, length)));
} catch (error) {
return text.slice(0, length);
}
}; };
export const authOpenAiKey = async (key: string) => { export const authOpenAiKey = async (key: string) => {