V4.6.9-first commit (#899)

* perf: insert mongo dataset data session

* perf: dataset data index

* remove delay

* rename bill schema

* rename bill record

* perf: bill table

* perf: prompt

* perf: sub plan

* change the usage count

* feat: usage bill

* publish usages

* doc

* 新增团队聊天功能 (#20)

* perf: doc

* feat 添加标签部分

feat 信息团队标签配置

feat 新增团队同步管理

feat team分享页面

feat 完成team分享页面

feat 实现模糊搜索

style 格式化

fix 修复迷糊匹配

style 样式修改

fix 团队标签功能修复

* fix 修复鉴权功能

* merge 合并代码

* fix 修复引用错误

* fix 修复pr问题

* fix 修复ts格式问题

---------

Co-authored-by: archer <545436317@qq.com>
Co-authored-by: liuxingwan <liuxingwan.lxw@alibaba-inc.com>

* update extra plan

* fix: ts

* format

* perf: bill field

* feat: standard plan

* fix: ts

* feat 个人账号页面修改 (#22)

* feat 添加标签部分

feat 信息团队标签配置

feat 新增团队同步管理

feat team分享页面

feat 完成team分享页面

feat 实现模糊搜索

style 格式化

fix 修复迷糊匹配

style 样式修改

fix 团队标签功能修复

* fix 修复鉴权功能

* merge 合并代码

* fix 修复引用错误

* fix 修复pr问题

* fix 修复ts格式问题

* feat 修改个人账号页

---------

Co-authored-by: liuxingwan <liuxingwan.lxw@alibaba-inc.com>

* sub plan page (#23)

* fix chunk index; error page text

* feat: dataset process Integral prediction

* feat: stand plan field

* feat: sub plan limit

* perf: index

* query extension

* perf: share link push app name

* perf: plan point unit

* perf: get sub plan

* perf: account page

* feat 新增套餐详情弹窗代码 (#24)

* merge 合并代码

* fix 新增套餐详情弹框

* fix 修复pr问题

* feat: change http node input to prompt editor (#21)

* feat: change http node input to prompt editor

* fix

* split PromptEditor to HttpInput

* Team plans (#25)

* perf: pay check

* perf: team plan test

* plan limit check

* replace sensitive text

* perf: fix some null

* collection null check

* perf: plans modal

* perf: http module

* pacakge (#26)

* individuation page and pay modal amount (#27)

* feat: individuation page

* team chat config

* pay modal

* plan count and replace invalid chars (#29)

* fix: user oneapi

* fix: training queue

* fix: qa queue

* perf: remove space chars

* replace invalid chars

* change httpinput dropdown menu (#28)

* perf: http

* reseet free plan

* perf: plan code to packages

* remove llm config to package

* perf: code

* perf: faq

* fix: get team plan

---------

Co-authored-by: yst <77910600+yu-and-liu@users.noreply.github.com>
Co-authored-by: liuxingwan <liuxingwan.lxw@alibaba-inc.com>
Co-authored-by: heheer <71265218+newfish-cmyk@users.noreply.github.com>
This commit is contained in:
Archer
2024-02-28 13:19:15 +08:00
committed by GitHub
parent 32686f9e3e
commit 064c64e74c
282 changed files with 7223 additions and 4731 deletions

View File

@@ -1,6 +1,5 @@
import { initSystemConfig } from '@/pages/api/common/system/getInitData';
import { generateQA } from '@/service/events/generateQA';
import { generateVector } from '@/service/events/generateVector';
import { startQueue } from '@/service/utils/tools';
import { setCron } from '@fastgpt/service/common/system/cron';
export const startCron = () => {
@@ -17,7 +16,6 @@ export const setUpdateSystemConfigCron = () => {
export const setTrainingQueueCron = () => {
setCron('*/1 * * * *', () => {
generateVector();
generateQA();
startQueue();
});
};

View File

@@ -1,42 +0,0 @@
export const getLLMModel = (model?: string) => {
return global.llmModels.find((item) => item.model === model) ?? global.llmModels[0];
};
export const getDatasetModel = (model?: string) => {
return (
global.llmModels?.filter((item) => item.datasetProcess)?.find((item) => item.model === model) ??
global.llmModels[0]
);
};
export const getVectorModel = (model?: string) => {
return global.vectorModels.find((item) => item.model === model) || global.vectorModels[0];
};
export function getAudioSpeechModel(model?: string) {
return (
global.audioSpeechModels.find((item) => item.model === model) || global.audioSpeechModels[0]
);
}
export function getWhisperModel(model?: string) {
return global.whisperModel;
}
export function getReRankModel(model?: string) {
return global.reRankModels.find((item) => item.model === model);
}
export enum ModelTypeEnum {
llm = 'llm',
vector = 'vector',
audioSpeech = 'audioSpeech',
whisper = 'whisper',
rerank = 'rerank'
}
export const getModelMap = {
[ModelTypeEnum.llm]: getLLMModel,
[ModelTypeEnum.vector]: getVectorModel,
[ModelTypeEnum.audioSpeech]: getAudioSpeechModel,
[ModelTypeEnum.whisper]: getWhisperModel,
[ModelTypeEnum.rerank]: getReRankModel
};

View File

@@ -6,11 +6,9 @@ import {
} from '@fastgpt/global/core/dataset/controller';
import {
insertDatasetDataVector,
recallFromVectorStore,
updateDatasetDataVector
recallFromVectorStore
} from '@fastgpt/service/common/vectorStore/controller';
import {
DatasetDataIndexTypeEnum,
DatasetSearchModeEnum,
DatasetSearchModeMap,
SearchScoreTypeEnum
@@ -22,6 +20,7 @@ import { deleteDatasetDataVector } from '@fastgpt/service/common/vectorStore/con
import { getVectorsByText } from '@fastgpt/service/core/ai/embedding';
import { MongoDatasetCollection } from '@fastgpt/service/core/dataset/collection/schema';
import {
DatasetDataItemType,
DatasetDataSchemaType,
DatasetDataWithCollectionType,
SearchDataResponseItemType
@@ -34,8 +33,9 @@ import type {
PushDatasetDataResponse
} from '@fastgpt/global/core/dataset/api.d';
import { pushDataListToTrainingQueue } from '@fastgpt/service/core/dataset/training/controller';
import { getVectorModel } from '../../ai/model';
import { ModuleInputKeyEnum } from '@fastgpt/global/core/module/constants';
import { getVectorModel } from '@fastgpt/service/core/ai/model';
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
import { startQueue } from '@/service/utils/tools';
export async function pushDataToTrainingQueue(
props: {
@@ -49,6 +49,8 @@ export async function pushDataToTrainingQueue(
datasetModelList: global.llmModels
});
startQueue();
return result;
}
@@ -78,7 +80,7 @@ export async function insertData2Dataset({
return Promise.reject("teamId and tmbId can't be the same");
}
const qaStr = `${q}\n${a}`.trim();
const qaStr = getDefaultIndex({ q, a }).text;
// empty indexes check, if empty, create default index
indexes =
@@ -86,10 +88,16 @@ export async function insertData2Dataset({
? indexes.map((index) => ({
...index,
dataId: undefined,
defaultIndex: indexes?.length === 1 && index.text === qaStr ? true : index.defaultIndex
defaultIndex: index.text.trim() === qaStr
}))
: [getDefaultIndex({ q, a })];
if (!indexes.find((index) => index.defaultIndex)) {
indexes.unshift(getDefaultIndex({ q, a }));
}
indexes = indexes.slice(0, 6);
// insert to vector store
const result = await Promise.all(
indexes.map((item) =>
@@ -113,7 +121,7 @@ export async function insertData2Dataset({
a,
fullTextToken: jiebaSplit({ text: qaStr }),
chunkIndex,
indexes: indexes.map((item, i) => ({
indexes: indexes?.map((item, i) => ({
...item,
dataId: result[i].insertId
}))
@@ -128,8 +136,10 @@ export async function insertData2Dataset({
/**
* update data
* 1. compare indexes
* 2. update pg data
* 3. update mongo data
* 2. insert new pg data
* session run:
* 3. update mongo data(session run)
* 4. delete old pg data
*/
export async function updateData2Dataset({
dataId,
@@ -141,31 +151,30 @@ export async function updateData2Dataset({
if (!Array.isArray(indexes)) {
return Promise.reject('indexes is required');
}
const qaStr = `${q}\n${a}`.trim();
const qaStr = getDefaultIndex({ q, a }).text;
// patch index and update pg
const mongoData = await MongoDatasetData.findById(dataId);
if (!mongoData) return Promise.reject('core.dataset.error.Data not found');
// make sure have one index
if (indexes.length === 0) {
const databaseDefaultIndex = mongoData.indexes.find((index) => index.defaultIndex);
indexes = [
getDefaultIndex({
q,
a,
dataId: databaseDefaultIndex ? String(databaseDefaultIndex.dataId) : undefined
})
];
// remove defaultIndex
let formatIndexes = indexes.map((index) => ({
...index,
text: index.text.trim(),
defaultIndex: index.text.trim() === qaStr
}));
if (!formatIndexes.find((index) => index.defaultIndex)) {
const defaultIndex = mongoData.indexes.find((index) => index.defaultIndex);
formatIndexes.unshift(defaultIndex ? defaultIndex : getDefaultIndex({ q, a }));
}
formatIndexes = formatIndexes.slice(0, 6);
// patch indexes, create, update, delete
const patchResult: PatchIndexesProps[] = [];
// find database indexes in new Indexes, if have not, delete it
for (const item of mongoData.indexes) {
const index = indexes.find((index) => index.dataId === item.dataId);
const index = formatIndexes.find((index) => index.dataId === item.dataId);
if (!index) {
patchResult.push({
type: 'delete',
@@ -173,35 +182,34 @@ export async function updateData2Dataset({
});
}
}
for (const item of indexes) {
for (const item of formatIndexes) {
const index = mongoData.indexes.find((index) => index.dataId === item.dataId);
// in database, update
if (index) {
// manual update index
// default index update
if (index.defaultIndex && index.text !== qaStr) {
patchResult.push({
type: 'update',
index: {
//@ts-ignore
...index.toObject(),
text: qaStr
}
});
continue;
}
// custom index update
if (index.text !== item.text) {
patchResult.push({
type: 'update',
index: item
});
} else if (index.defaultIndex && index.text !== qaStr) {
// update default index
patchResult.push({
type: 'update',
index: {
...item,
type:
item.type === DatasetDataIndexTypeEnum.qa && !a
? DatasetDataIndexTypeEnum.chunk
: item.type,
text: qaStr
}
});
} else {
patchResult.push({
type: 'unChange',
index: item
});
continue;
}
patchResult.push({
type: 'unChange',
index: item
});
} else {
// not in database, create
patchResult.push({
@@ -215,10 +223,12 @@ export async function updateData2Dataset({
mongoData.updateTime = new Date();
await mongoData.save();
// update vector
const result = await Promise.all(
patchResult.map(async (item) => {
if (item.type === 'create') {
// insert vector
const clonePatchResult2Insert: PatchIndexesProps[] = JSON.parse(JSON.stringify(patchResult));
const insertResult = await Promise.all(
clonePatchResult2Insert.map(async (item) => {
// insert new vector and update dateId
if (item.type === 'create' || item.type === 'update') {
const result = await insertDatasetDataVector({
query: item.index.text,
model: getVectorModel(model),
@@ -229,50 +239,54 @@ export async function updateData2Dataset({
item.index.dataId = result.insertId;
return result;
}
if (item.type === 'update' && item.index.dataId) {
const result = await updateDatasetDataVector({
teamId: mongoData.teamId,
datasetId: mongoData.datasetId,
collectionId: mongoData.collectionId,
id: item.index.dataId,
query: item.index.text,
model: getVectorModel(model)
});
item.index.dataId = result.insertId;
return result;
}
if (item.type === 'delete' && item.index.dataId) {
await deleteDatasetDataVector({
teamId: mongoData.teamId,
id: item.index.dataId
});
return {
charsLength: 0
};
}
return {
charsLength: 0
};
})
);
const charsLength = insertResult.reduce((acc, cur) => acc + cur.charsLength, 0);
// console.log(clonePatchResult2Insert);
await mongoSessionRun(async (session) => {
// update mongo
const newIndexes = clonePatchResult2Insert
.filter((item) => item.type !== 'delete')
.map((item) => item.index);
// update mongo other data
mongoData.q = q || mongoData.q;
mongoData.a = a ?? mongoData.a;
mongoData.fullTextToken = jiebaSplit({ text: mongoData.q + mongoData.a });
// @ts-ignore
mongoData.indexes = newIndexes;
await mongoData.save({ session });
const charsLength = result.reduce((acc, cur) => acc + cur.charsLength, 0);
const newIndexes = patchResult.filter((item) => item.type !== 'delete').map((item) => item.index);
// update mongo other data
mongoData.q = q || mongoData.q;
mongoData.a = a ?? mongoData.a;
mongoData.fullTextToken = jiebaSplit({ text: mongoData.q + mongoData.a });
// @ts-ignore
mongoData.indexes = newIndexes;
await mongoData.save();
// delete vector
const deleteIdList = patchResult
.filter((item) => item.type === 'delete' || item.type === 'update')
.map((item) => item.index.dataId)
.filter(Boolean);
if (deleteIdList.length > 0) {
await deleteDatasetDataVector({
teamId: mongoData.teamId,
idList: deleteIdList as string[]
});
}
});
return {
charsLength
};
}
export const deleteDatasetData = async (data: DatasetDataItemType) => {
await mongoSessionRun(async (session) => {
await MongoDatasetData.findByIdAndDelete(data.id, { session });
await deleteDatasetDataVector({
teamId: data.teamId,
idList: data.indexes.map((item) => item.dataId)
});
});
};
type SearchDatasetDataProps = {
teamId: string;
model: string;
@@ -371,14 +385,18 @@ export async function searchDatasetData(props: SearchDatasetDataProps) {
const formatResult = concatResults
.map((data, index) => {
if (!data.collectionId) {
console.log('Collection is not found', data);
}
const result: SearchDataResponseItemType = {
id: String(data._id),
q: data.q,
a: data.a,
chunkIndex: data.chunkIndex,
datasetId: String(data.datasetId),
collectionId: String(data.collectionId._id),
sourceName: data.collectionId.name || '',
collectionId: String(data.collectionId?._id),
sourceName: data.collectionId?.name || '',
sourceId: data.collectionId?.fileId || data.collectionId?.rawLink,
score: [{ type: SearchScoreTypeEnum.embedding, value: data.score, index }]
};
@@ -481,7 +499,7 @@ export async function searchDatasetData(props: SearchDatasetDataProps) {
}))
});
if (!Array.isArray(results)) {
if (results.length === 0) {
usingReRank = false;
return [];
}

View File

@@ -1,3 +0,0 @@
export function getLikeSql(searchText?: string) {
return searchText ? `AND (index ILIKE '%${searchText}%' OR content ILIKE '%${searchText}%')` : '';
}

View File

@@ -1,25 +1,22 @@
import { MongoDatasetTraining } from '@fastgpt/service/core/dataset/training/schema';
import { pushQABill } from '@/service/support/wallet/bill/push';
import { DatasetDataIndexTypeEnum, TrainingModeEnum } from '@fastgpt/global/core/dataset/constants';
import { sendOneInform } from '../support/user/inform/api';
import { pushQAUsage } from '@/service/support/wallet/usage/push';
import { TrainingModeEnum } from '@fastgpt/global/core/dataset/constants';
import { getAIApi } from '@fastgpt/service/core/ai/config';
import type { ChatMessageItemType } from '@fastgpt/global/core/ai/type.d';
import { addLog } from '@fastgpt/service/common/system/log';
import { splitText2Chunks } from '@fastgpt/global/common/string/textSplitter';
import { replaceVariable } from '@fastgpt/global/common/string/tools';
import { Prompt_AgentQA } from '@/global/core/prompt/agent';
import { getErrText } from '@fastgpt/global/common/error/utils';
import { authTeamBalance } from '../support/permission/auth/bill';
import type { PushDatasetDataChunkProps } from '@fastgpt/global/core/dataset/api.d';
import { UserErrEnum } from '@fastgpt/global/common/error/code/user';
import { lockTrainingDataByTeamId } from '@fastgpt/service/core/dataset/training/controller';
import { pushDataToTrainingQueue } from '@/service/core/dataset/data/controller';
import { getLLMModel } from '../core/ai/model';
import { getLLMModel } from '@fastgpt/service/core/ai/model';
import { checkInvalidChunkAndLock, checkTeamAiPointsAndLock } from './utils';
import { countGptMessagesChars } from '@fastgpt/service/core/chat/utils';
const reduceQueue = () => {
global.qaQueueLen = global.qaQueueLen > 0 ? global.qaQueueLen - 1 : 0;
return global.vectorQueueLen === 0;
return global.qaQueueLen === 0;
};
export async function generateQA(): Promise<any> {
@@ -86,26 +83,11 @@ export async function generateQA(): Promise<any> {
reduceQueue();
return generateQA();
}
console.log('Start QA Training');
// auth balance
try {
await authTeamBalance(data.teamId);
} catch (error: any) {
if (error?.statusText === UserErrEnum.balanceNotEnough) {
// send inform and lock data
try {
sendOneInform({
type: 'system',
title: '文本训练任务中止',
content:
'该团队账号余额不足,文本训练任务中止,重新充值后将会继续。暂停的任务将在 7 天后被删除。',
tmbId: data.tmbId
});
console.log('余额不足暂停【QA】生成任务');
lockTrainingDataByTeamId(data.teamId);
} catch (error) {}
}
if (!(await checkTeamAiPointsAndLock(data.teamId, data.tmbId))) {
console.log('balance not enough');
reduceQueue();
return generateQA();
}
@@ -137,6 +119,12 @@ ${replaceVariable(Prompt_AgentQA.fixedText, { text })}`;
const qaArr = formatSplitText(answer, text); // 格式化后的QA对
addLog.info(`QA Training Finish`, {
time: `${(Date.now() - startTime) / 1000}s`,
splitLength: qaArr.length,
usage: chatResponse.usage
});
// get vector and insert
const { insertLen } = await pushDataToTrainingQueue({
teamId: data.teamId,
@@ -153,18 +141,12 @@ ${replaceVariable(Prompt_AgentQA.fixedText, { text })}`;
// delete data from training
await MongoDatasetTraining.findByIdAndDelete(data._id);
addLog.info(`QA Training Finish`, {
time: `${(Date.now() - startTime) / 1000}s`,
splitLength: qaArr.length,
usage: chatResponse.usage
});
// add bill
if (insertLen > 0) {
pushQABill({
pushQAUsage({
teamId: data.teamId,
tmbId: data.tmbId,
charsLength: `${prompt}${answer}`.length,
charsLength: countGptMessagesChars(messages).length,
billId: data.billId,
model
});
@@ -176,32 +158,8 @@ ${replaceVariable(Prompt_AgentQA.fixedText, { text })}`;
generateQA();
} catch (err: any) {
reduceQueue();
// log
if (err?.response) {
addLog.info('openai error: 生成QA错误', {
status: err.response?.status,
stateusText: err.response?.statusText,
data: err.response?.data
});
} else {
console.log(err);
addLog.error(getErrText(err, '生成 QA 错误'));
}
// message error or openai account error
if (
err?.message === 'invalid message format' ||
err.response?.data?.error?.type === 'invalid_request_error' ||
err?.code === 500
) {
addLog.info('invalid message format', {
text
});
try {
await MongoDatasetTraining.findByIdAndUpdate(data._id, {
lockTime: new Date('2998/5/5')
});
} catch (error) {}
if (await checkInvalidChunkAndLock({ err, data, errText: 'QA模型调用失败' })) {
return generateQA();
}
@@ -230,7 +188,6 @@ function formatSplitText(text: string, rawText: string) {
indexes: [
{
defaultIndex: true,
type: DatasetDataIndexTypeEnum.qa,
text: `${q}\n${a.trim().replace(/\n\s*/g, '\n')}`
}
]
@@ -248,7 +205,6 @@ function formatSplitText(text: string, rawText: string) {
indexes: [
{
defaultIndex: true,
type: DatasetDataIndexTypeEnum.chunk,
text: chunk
}
]

View File

@@ -1,13 +1,9 @@
import { insertData2Dataset } from '@/service/core/dataset/data/controller';
import { MongoDatasetTraining } from '@fastgpt/service/core/dataset/training/schema';
import { TrainingModeEnum } from '@fastgpt/global/core/dataset/constants';
import { sendOneInform } from '../support/user/inform/api';
import { addLog } from '@fastgpt/service/common/system/log';
import { getErrText } from '@fastgpt/global/common/error/utils';
import { authTeamBalance } from '@/service/support/permission/auth/bill';
import { pushGenerateVectorBill } from '@/service/support/wallet/bill/push';
import { UserErrEnum } from '@fastgpt/global/common/error/code/user';
import { lockTrainingDataByTeamId } from '@fastgpt/service/core/dataset/training/controller';
import { pushGenerateVectorUsage } from '@/service/support/wallet/usage/push';
import { checkInvalidChunkAndLock, checkTeamAiPointsAndLock } from './utils';
import { delay } from '@fastgpt/global/common/system/utils';
const reduceQueue = () => {
global.vectorQueueLen = global.vectorQueueLen > 0 ? global.vectorQueueLen - 1 : 0;
@@ -19,7 +15,6 @@ const reduceQueue = () => {
export async function generateVector(): Promise<any> {
if (global.vectorQueueLen >= global.systemEnv.vectorMaxProcess) return;
global.vectorQueueLen++;
const start = Date.now();
// get training data
@@ -92,24 +87,7 @@ export async function generateVector(): Promise<any> {
}
// auth balance
try {
await authTeamBalance(data.teamId);
} catch (error: any) {
if (error?.statusText === UserErrEnum.balanceNotEnough) {
// send inform and lock data
try {
sendOneInform({
type: 'system',
title: '文本训练任务中止',
content:
'该团队账号余额不足,文本训练任务中止,重新充值后将会继续。暂停的任务将在 7 天后被删除。',
tmbId: data.tmbId
});
console.log('余额不足,暂停【向量】生成任务');
lockTrainingDataByTeamId(data.teamId);
} catch (error) {}
}
if (!(await checkTeamAiPointsAndLock(data.teamId, data.tmbId))) {
reduceQueue();
return generateVector();
}
@@ -124,7 +102,7 @@ export async function generateVector(): Promise<any> {
return;
}
// insert data to pg
// insert to dataset
const { charsLength } = await insertData2Dataset({
teamId: data.teamId,
tmbId: data.tmbId,
@@ -137,8 +115,8 @@ export async function generateVector(): Promise<any> {
model: data.model
});
// push bill
pushGenerateVectorBill({
// push usage
pushGenerateVectorUsage({
teamId: data.teamId,
tmbId: data.tmbId,
charsLength,
@@ -154,34 +132,8 @@ export async function generateVector(): Promise<any> {
console.log(`embedding finished, time: ${Date.now() - start}ms`);
} catch (err: any) {
reduceQueue();
// log
if (err?.response) {
addLog.info('openai error: 生成向量错误', {
status: err.response?.status,
stateusText: err.response?.statusText,
data: err.response?.data
});
} else {
console.log(err);
addLog.error(getErrText(err, '生成向量错误'));
}
// message error or openai account error
if (
err?.message === 'invalid message format' ||
err.response?.data?.error?.type === 'invalid_request_error' ||
err?.code === 500
) {
addLog.info('Lock training data');
console.log(err?.code);
console.log(err.response?.data?.error?.type);
console.log(err?.message);
try {
await MongoDatasetTraining.findByIdAndUpdate(data._id, {
lockTime: new Date('2998/5/5')
});
} catch (error) {}
if (await checkInvalidChunkAndLock({ err, data, errText: '向量模型调用失败' })) {
return generateVector();
}

View File

@@ -0,0 +1,69 @@
import { TeamErrEnum } from '@fastgpt/global/common/error/code/team';
import { checkTeamAIPoints } from '@fastgpt/service/support/permission/teamLimit';
import { sendOneInform } from '../support/user/inform/api';
import { lockTrainingDataByTeamId } from '@fastgpt/service/core/dataset/training/controller';
import { DatasetTrainingSchemaType } from '@fastgpt/global/core/dataset/type';
import { addLog } from '@fastgpt/service/common/system/log';
import { MongoDatasetTraining } from '@fastgpt/service/core/dataset/training/schema';
import { getErrText } from '@fastgpt/global/common/error/utils';
export const checkTeamAiPointsAndLock = async (teamId: string, tmbId: string) => {
try {
await checkTeamAIPoints(teamId);
return true;
} catch (error: any) {
if (error === TeamErrEnum.aiPointsNotEnough) {
// send inform and lock data
try {
sendOneInform({
type: 'system',
title: '文本训练任务中止',
content:
'该团队账号AI积分不足文本训练任务中止重新充值后将会继续。暂停的任务将在 7 天后被删除。',
tmbId: tmbId
});
console.log('余额不足,暂停【向量】生成任务');
lockTrainingDataByTeamId(teamId);
} catch (error) {}
}
return false;
}
};
export const checkInvalidChunkAndLock = async ({
err,
errText,
data
}: {
err: any;
errText: string;
data: DatasetTrainingSchemaType;
}) => {
if (err?.response) {
addLog.info(`openai error: ${errText}`, {
status: err.response?.status,
stateusText: err.response?.statusText,
data: err.response?.data
});
} else {
console.log(err);
addLog.error(getErrText(err, errText));
}
if (
err?.message === 'invalid message format' ||
err?.type === 'invalid_request_error' ||
err?.code === 500
) {
addLog.info('Lock training data');
console.log(err);
try {
await MongoDatasetTraining.findByIdAndUpdate(data._id, {
lockTime: new Date('2998/5/5')
});
} catch (error) {}
return true;
}
return false;
};

View File

@@ -1,17 +1,20 @@
import { adaptChat2GptMessages } from '@fastgpt/global/core/chat/adapt';
import { ChatContextFilter } from '@fastgpt/service/core/chat/utils';
import { ChatContextFilter, countMessagesChars } from '@fastgpt/service/core/chat/utils';
import type { moduleDispatchResType, ChatItemType } from '@fastgpt/global/core/chat/type.d';
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import { getAIApi } from '@fastgpt/service/core/ai/config';
import type { ClassifyQuestionAgentItemType } from '@fastgpt/global/core/module/type.d';
import type {
ClassifyQuestionAgentItemType,
ModuleDispatchResponse
} from '@fastgpt/global/core/module/type.d';
import { ModuleInputKeyEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
import { replaceVariable } from '@fastgpt/global/common/string/tools';
import { Prompt_CQJson } from '@/global/core/prompt/agent';
import { LLMModelItemType } from '@fastgpt/global/core/ai/model.d';
import { ModelTypeEnum, getLLMModel } from '@/service/core/ai/model';
import { ModelTypeEnum, getLLMModel } from '@fastgpt/service/core/ai/model';
import { getHistories } from '../utils';
import { formatModelPrice2Store } from '@/service/support/wallet/bill/utils';
import { formatModelChars2Points } from '@/service/support/wallet/usage/utils';
type Props = ModuleDispatchProps<{
[ModuleInputKeyEnum.aiModel]: string;
@@ -20,10 +23,9 @@ type Props = ModuleDispatchProps<{
[ModuleInputKeyEnum.userChatInput]: string;
[ModuleInputKeyEnum.agents]: ClassifyQuestionAgentItemType[];
}>;
type CQResponse = {
[ModuleOutputKeyEnum.responseData]: moduleDispatchResType;
type CQResponse = ModuleDispatchResponse<{
[key: string]: any;
};
}>;
const agentFunName = 'classify_question';
@@ -31,6 +33,7 @@ const agentFunName = 'classify_question';
export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse> => {
const {
user,
module: { name },
histories,
params: { model, history = 6, agents, userChatInput }
} = props as Props;
@@ -43,7 +46,7 @@ export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse
const chatHistories = getHistories(history, histories);
const { arg, inputTokens, outputTokens } = await (async () => {
const { arg, charsLength } = await (async () => {
if (cqModel.toolChoice) {
return toolChoice({
...props,
@@ -60,25 +63,31 @@ export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse
const result = agents.find((item) => item.key === arg?.type) || agents[agents.length - 1];
const { total, modelName } = formatModelPrice2Store({
const { totalPoints, modelName } = formatModelChars2Points({
model: cqModel.model,
inputLen: inputTokens,
outputLen: outputTokens,
type: ModelTypeEnum.llm
charsLength,
modelType: ModelTypeEnum.llm
});
return {
[result.key]: true,
[ModuleOutputKeyEnum.responseData]: {
price: user.openaiAccount?.key ? 0 : total,
totalPoints: user.openaiAccount?.key ? 0 : totalPoints,
model: modelName,
query: userChatInput,
inputTokens,
outputTokens,
charsLength,
cqList: agents,
cqResult: result.value,
contextTotalLen: chatHistories.length + 2
}
},
[ModuleOutputKeyEnum.moduleDispatchBills]: [
{
moduleName: name,
totalPoints: user.openaiAccount?.key ? 0 : totalPoints,
model: modelName,
charsLength
}
]
};
};
@@ -149,11 +158,13 @@ ${systemPrompt}
const arg = JSON.parse(
response?.choices?.[0]?.message?.tool_calls?.[0]?.function?.arguments || ''
);
const functionChars =
agentFunction.description.length +
agentFunction.parameters.properties.type.description.length;
return {
arg,
inputTokens: response.usage?.prompt_tokens || 0,
outputTokens: response.usage?.completion_tokens || 0
charsLength: countMessagesChars(messages) + functionChars
};
} catch (error) {
console.log(agentFunction.parameters);
@@ -163,8 +174,7 @@ ${systemPrompt}
return {
arg: {},
inputTokens: 0,
outputTokens: 0
charsLength: 0
};
}
}
@@ -206,8 +216,7 @@ async function completions({
agents.find((item) => answer.includes(item.key) || answer.includes(item.value))?.key || '';
return {
inputTokens: data.usage?.prompt_tokens || 0,
outputTokens: data.usage?.completion_tokens || 0,
charsLength: countMessagesChars(messages),
arg: { type: id }
};
}

View File

@@ -1,17 +1,20 @@
import { adaptChat2GptMessages } from '@fastgpt/global/core/chat/adapt';
import { ChatContextFilter } from '@fastgpt/service/core/chat/utils';
import { ChatContextFilter, countMessagesChars } from '@fastgpt/service/core/chat/utils';
import type { moduleDispatchResType, ChatItemType } from '@fastgpt/global/core/chat/type.d';
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import { getAIApi } from '@fastgpt/service/core/ai/config';
import type { ContextExtractAgentItemType } from '@fastgpt/global/core/module/type';
import type {
ContextExtractAgentItemType,
ModuleDispatchResponse
} from '@fastgpt/global/core/module/type';
import { ModuleInputKeyEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
import { Prompt_ExtractJson } from '@/global/core/prompt/agent';
import { replaceVariable } from '@fastgpt/global/common/string/tools';
import { LLMModelItemType } from '@fastgpt/global/core/ai/model.d';
import { getHistories } from '../utils';
import { ModelTypeEnum, getLLMModel } from '@/service/core/ai/model';
import { formatModelPrice2Store } from '@/service/support/wallet/bill/utils';
import { ModelTypeEnum, getLLMModel } from '@fastgpt/service/core/ai/model';
import { formatModelChars2Points } from '@/service/support/wallet/usage/utils';
type Props = ModuleDispatchProps<{
[ModuleInputKeyEnum.history]?: ChatItemType[];
@@ -20,18 +23,18 @@ type Props = ModuleDispatchProps<{
[ModuleInputKeyEnum.description]: string;
[ModuleInputKeyEnum.aiModel]: string;
}>;
type Response = {
type Response = ModuleDispatchResponse<{
[ModuleOutputKeyEnum.success]?: boolean;
[ModuleOutputKeyEnum.failed]?: boolean;
[ModuleOutputKeyEnum.contextExtractFields]: string;
[ModuleOutputKeyEnum.responseData]: moduleDispatchResType;
};
}>;
const agentFunName = 'extract_json_data';
export async function dispatchContentExtract(props: Props): Promise<Response> {
const {
user,
module: { name },
histories,
params: { content, history = 6, model, description, extractKeys }
} = props;
@@ -43,7 +46,7 @@ export async function dispatchContentExtract(props: Props): Promise<Response> {
const extractModel = getLLMModel(model);
const chatHistories = getHistories(history, histories);
const { arg, inputTokens, outputTokens } = await (async () => {
const { arg, charsLength } = await (async () => {
if (extractModel.toolChoice) {
return toolChoice({
...props,
@@ -80,11 +83,10 @@ export async function dispatchContentExtract(props: Props): Promise<Response> {
}
}
const { total, modelName } = formatModelPrice2Store({
const { totalPoints, modelName } = formatModelChars2Points({
model: extractModel.model,
inputLen: inputTokens,
outputLen: outputTokens,
type: ModelTypeEnum.llm
charsLength,
modelType: ModelTypeEnum.llm
});
return {
@@ -93,15 +95,22 @@ export async function dispatchContentExtract(props: Props): Promise<Response> {
[ModuleOutputKeyEnum.contextExtractFields]: JSON.stringify(arg),
...arg,
[ModuleOutputKeyEnum.responseData]: {
price: user.openaiAccount?.key ? 0 : total,
totalPoints: user.openaiAccount?.key ? 0 : totalPoints,
model: modelName,
query: content,
inputTokens,
outputTokens,
charsLength,
extractDescription: description,
extractResult: arg,
contextTotalLen: chatHistories.length + 2
}
},
[ModuleOutputKeyEnum.moduleDispatchBills]: [
{
moduleName: name,
totalPoints: user.openaiAccount?.key ? 0 : totalPoints,
model: modelName,
charsLength
}
]
};
}
@@ -193,10 +202,12 @@ ${description || '根据用户要求获取适当的 JSON 字符串。'}
}
})();
const functionChars =
description.length + extractKeys.reduce((sum, item) => sum + item.desc.length, 0);
return {
rawResponse: response?.choices?.[0]?.message?.tool_calls?.[0]?.function?.arguments || '',
inputTokens: response.usage?.prompt_tokens || 0,
outputTokens: response.usage?.completion_tokens || 0,
charsLength: countMessagesChars(messages) + functionChars,
arg
};
}
@@ -238,8 +249,6 @@ Human: ${content}`
stream: false
});
const answer = data.choices?.[0].message?.content || '';
const inputTokens = data.usage?.prompt_tokens || 0;
const outputTokens = data.usage?.completion_tokens || 0;
// parse response
const start = answer.indexOf('{');
@@ -248,8 +257,7 @@ Human: ${content}`
if (start === -1 || end === -1)
return {
rawResponse: answer,
inputTokens,
outputTokens,
charsLength: countMessagesChars(messages),
arg: {}
};
@@ -261,15 +269,14 @@ Human: ${content}`
try {
return {
rawResponse: answer,
inputTokens,
outputTokens,
charsLength: countMessagesChars(messages),
arg: JSON.parse(jsonStr) as Record<string, any>
};
} catch (error) {
return {
rawResponse: answer,
inputTokens,
outputTokens,
charsLength: countMessagesChars(messages),
arg: {}
};
}

View File

@@ -1,16 +1,16 @@
import type { NextApiResponse } from 'next';
import { ChatContextFilter } from '@fastgpt/service/core/chat/utils';
import { ChatContextFilter, countMessagesChars } from '@fastgpt/service/core/chat/utils';
import type { moduleDispatchResType, ChatItemType } from '@fastgpt/global/core/chat/type.d';
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import { sseResponseEventEnum } from '@fastgpt/service/common/response/constant';
import { textAdaptGptResponse } from '@/utils/adapt';
import { getAIApi } from '@fastgpt/service/core/ai/config';
import type { ChatCompletion, StreamChatType } from '@fastgpt/global/core/ai/type.d';
import { formatModelPrice2Store } from '@/service/support/wallet/bill/utils';
import { formatModelChars2Points } from '@/service/support/wallet/usage/utils';
import type { LLMModelItemType } from '@fastgpt/global/core/ai/model.d';
import { postTextCensor } from '@/service/common/censor';
import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/global/core/ai/constant';
import type { ModuleItemType } from '@fastgpt/global/core/module/type.d';
import type { ModuleDispatchResponse, ModuleItemType } from '@fastgpt/global/core/module/type.d';
import { countMessagesTokens, sliceMessagesTB } from '@fastgpt/global/common/string/tiktoken';
import { adaptChat2GptMessages } from '@fastgpt/global/core/chat/adapt';
import { Prompt_QuotePromptList, Prompt_QuoteTemplateList } from '@/global/core/prompt/AIChat';
@@ -18,7 +18,7 @@ import type { AIChatModuleProps } from '@fastgpt/global/core/module/node/type.d'
import { replaceVariable } from '@fastgpt/global/common/string/tools';
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
import { responseWrite, responseWriteController } from '@fastgpt/service/common/response';
import { getLLMModel, ModelTypeEnum } from '@/service/core/ai/model';
import { getLLMModel, ModelTypeEnum } from '@fastgpt/service/core/ai/model';
import type { SearchDataResponseItemType } from '@fastgpt/global/core/dataset/type';
import { formatStr2ChatContent } from '@fastgpt/service/core/chat/utils';
import { ModuleInputKeyEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
@@ -32,11 +32,10 @@ export type ChatProps = ModuleDispatchProps<
[ModuleInputKeyEnum.aiChatDatasetQuote]?: SearchDataResponseItemType[];
}
>;
export type ChatResponse = {
export type ChatResponse = ModuleDispatchResponse<{
[ModuleOutputKeyEnum.answerText]: string;
[ModuleOutputKeyEnum.responseData]: moduleDispatchResType;
[ModuleOutputKeyEnum.history]: ChatItemType[];
};
}>;
/* request openai chat */
export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResponse> => {
@@ -46,7 +45,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
detail = false,
user,
histories,
outputs,
module: { name, outputs },
params: {
model,
temperature = 0,
@@ -154,7 +153,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
}
);
const { answerText, inputTokens, outputTokens, completeMessages } = await (async () => {
const { answerText, completeMessages } = await (async () => {
if (stream) {
// sse response
const { answer } = await streamResponse({
@@ -172,17 +171,6 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
return {
answerText: answer,
inputTokens: countMessagesTokens({
messages: filterMessages
}),
outputTokens: countMessagesTokens({
messages: [
{
obj: ChatRoleEnum.AI,
value: answer
}
]
}),
completeMessages
};
} else {
@@ -196,33 +184,38 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
return {
answerText: answer,
inputTokens: unStreamResponse.usage?.prompt_tokens || 0,
outputTokens: unStreamResponse.usage?.completion_tokens || 0,
completeMessages
};
}
})();
const { total, modelName } = formatModelPrice2Store({
const charsLength = countMessagesChars(completeMessages);
const { totalPoints, modelName } = formatModelChars2Points({
model,
inputLen: inputTokens,
outputLen: outputTokens,
type: ModelTypeEnum.llm
charsLength,
modelType: ModelTypeEnum.llm
});
return {
answerText,
responseData: {
price: user.openaiAccount?.key ? 0 : total,
[ModuleOutputKeyEnum.responseData]: {
totalPoints: user.openaiAccount?.key ? 0 : totalPoints,
model: modelName,
inputTokens,
outputTokens,
charsLength,
query: `${userChatInput}`,
maxToken: max_tokens,
quoteList: filterQuoteQA,
historyPreview: getHistoryPreview(completeMessages),
contextTotalLen: completeMessages.length
},
[ModuleOutputKeyEnum.moduleDispatchBills]: [
{
moduleName: name,
totalPoints: user.openaiAccount?.key ? 0 : totalPoints,
model: modelName,
charsLength
}
],
history: completeMessages
};
};
@@ -249,30 +242,13 @@ function filterQuote({
// slice filterSearch
const filterQuoteQA = filterSearchResultsByMaxChars(quoteQA, model.quoteMaxToken);
// filterQuoteQA按collectionId聚合在一起后再按chunkIndex从小到大排序
const sortQuoteQAMap: Record<string, SearchDataResponseItemType[]> = {};
filterQuoteQA.forEach((item) => {
if (sortQuoteQAMap[item.collectionId]) {
sortQuoteQAMap[item.collectionId].push(item);
} else {
sortQuoteQAMap[item.collectionId] = [item];
}
});
const sortQuoteQAList = Object.values(sortQuoteQAMap);
sortQuoteQAList.forEach((qaList) => {
qaList.sort((a, b) => a.chunkIndex - b.chunkIndex);
});
const flatQuoteList = sortQuoteQAList.flat();
const quoteText =
flatQuoteList.length > 0
? `${flatQuoteList.map((item, index) => getValue(item, index)).join('\n')}`
filterQuoteQA.length > 0
? `${filterQuoteQA.map((item, index) => getValue(item, index).trim()).join('\n------\n')}`
: '';
return {
filterQuoteQA: flatQuoteList,
filterQuoteQA: filterQuoteQA,
quoteText
};
}

View File

@@ -1,15 +1,19 @@
import type { moduleDispatchResType } from '@fastgpt/global/core/chat/type.d';
import { formatModelPrice2Store } from '@/service/support/wallet/bill/utils';
import { formatModelChars2Points } from '@/service/support/wallet/usage/utils';
import type { SelectedDatasetType } from '@fastgpt/global/core/module/api.d';
import type { SearchDataResponseItemType } from '@fastgpt/global/core/dataset/type';
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
import { ModelTypeEnum, getLLMModel, getVectorModel } from '@/service/core/ai/model';
import type {
ModuleDispatchProps,
ModuleDispatchResponse
} from '@fastgpt/global/core/module/type.d';
import { ModelTypeEnum, getLLMModel, getVectorModel } from '@fastgpt/service/core/ai/model';
import { searchDatasetData } from '@/service/core/dataset/data/controller';
import { ModuleInputKeyEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
import { DatasetSearchModeEnum } from '@fastgpt/global/core/dataset/constants';
import { queryExtension } from '@fastgpt/service/core/ai/functions/queryExtension';
import { getHistories } from '../utils';
import { datasetSearchQueryExtension } from '@fastgpt/service/core/dataset/search/utils';
import { ChatModuleBillType } from '@fastgpt/global/support/wallet/bill/type';
import { checkTeamReRankPermission } from '@fastgpt/service/support/permission/teamLimit';
type DatasetSearchProps = ModuleDispatchProps<{
[ModuleInputKeyEnum.datasetSelectList]: SelectedDatasetType;
@@ -22,12 +26,11 @@ type DatasetSearchProps = ModuleDispatchProps<{
[ModuleInputKeyEnum.datasetSearchExtensionModel]: string;
[ModuleInputKeyEnum.datasetSearchExtensionBg]: string;
}>;
export type DatasetSearchResponse = {
[ModuleOutputKeyEnum.responseData]: moduleDispatchResType;
export type DatasetSearchResponse = ModuleDispatchResponse<{
[ModuleOutputKeyEnum.datasetIsEmpty]?: boolean;
[ModuleOutputKeyEnum.datasetUnEmpty]?: boolean;
[ModuleOutputKeyEnum.datasetQuoteQA]: SearchDataResponseItemType[];
};
}>;
export async function dispatchDatasetSearch(
props: DatasetSearchProps
@@ -35,6 +38,7 @@ export async function dispatchDatasetSearch(
const {
teamId,
histories,
module,
params: {
datasets = [],
similarity,
@@ -73,6 +77,8 @@ export async function dispatchDatasetSearch(
histories: getHistories(6, histories)
});
// console.log(concatQueries, rewriteQuery, aiExtensionResult);
// get vector
const vectorModel = getVectorModel(datasets[0]?.vectorModel?.model);
@@ -91,18 +97,18 @@ export async function dispatchDatasetSearch(
limit,
datasetIds: datasets.map((item) => item.datasetId),
searchMode,
usingReRank
usingReRank: usingReRank && (await checkTeamReRankPermission(teamId))
});
// count bill results
// vector
const { total, modelName } = formatModelPrice2Store({
const { totalPoints, modelName } = formatModelChars2Points({
model: vectorModel.model,
inputLen: charsLength,
type: ModelTypeEnum.vector
charsLength,
modelType: ModelTypeEnum.vector
});
const responseData: moduleDispatchResType & { price: number } = {
price: total,
const responseData: moduleDispatchResType & { totalPoints: number } = {
totalPoints,
query: concatQueries.join('\n'),
model: modelName,
charsLength,
@@ -111,28 +117,42 @@ export async function dispatchDatasetSearch(
searchMode,
searchUsingReRank: searchUsingReRank
};
const moduleDispatchBills: ChatModuleBillType[] = [
{
totalPoints,
moduleName: module.name,
model: modelName,
charsLength
}
];
if (aiExtensionResult) {
const { total, modelName } = formatModelPrice2Store({
const { totalPoints, modelName } = formatModelChars2Points({
model: aiExtensionResult.model,
inputLen: aiExtensionResult.inputTokens,
outputLen: aiExtensionResult.outputTokens,
type: ModelTypeEnum.llm
charsLength: aiExtensionResult.charsLength,
modelType: ModelTypeEnum.llm
});
responseData.price += total;
responseData.inputTokens = aiExtensionResult.inputTokens;
responseData.outputTokens = aiExtensionResult.outputTokens;
responseData.totalPoints += totalPoints;
responseData.charsLength = aiExtensionResult.charsLength;
responseData.extensionModel = modelName;
responseData.extensionResult =
aiExtensionResult.extensionQueries?.join('\n') ||
JSON.stringify(aiExtensionResult.extensionQueries);
moduleDispatchBills.push({
totalPoints,
moduleName: 'core.module.template.Query extension',
model: modelName,
charsLength: aiExtensionResult.charsLength
});
}
return {
isEmpty: searchRes.length === 0 ? true : undefined,
unEmpty: searchRes.length > 0 ? true : undefined,
quoteQA: searchRes,
responseData
responseData,
moduleDispatchBills
};
}

View File

@@ -23,11 +23,12 @@ import { dispatchContentExtract } from './agent/extract';
import { dispatchHttpRequest } from './tools/http';
import { dispatchHttp468Request } from './tools/http468';
import { dispatchAppRequest } from './tools/runApp';
import { dispatchCFR } from './tools/cfr';
import { dispatchQueryExtension } from './tools/queryExternsion';
import { dispatchRunPlugin } from './plugin/run';
import { dispatchPluginInput } from './plugin/runInput';
import { dispatchPluginOutput } from './plugin/runOutput';
import { valueTypeFormat } from './utils';
import { ChatModuleBillType } from '@fastgpt/global/support/wallet/bill/type';
const callbackMap: Record<`${FlowNodeTypeEnum}`, Function> = {
[FlowNodeTypeEnum.historyNode]: dispatchHistory,
@@ -44,7 +45,7 @@ const callbackMap: Record<`${FlowNodeTypeEnum}`, Function> = {
[FlowNodeTypeEnum.pluginModule]: dispatchRunPlugin,
[FlowNodeTypeEnum.pluginInput]: dispatchPluginInput,
[FlowNodeTypeEnum.pluginOutput]: dispatchPluginOutput,
[FlowNodeTypeEnum.cfr]: dispatchCFR,
[FlowNodeTypeEnum.queryExtension]: dispatchQueryExtension,
// none
[FlowNodeTypeEnum.userGuide]: () => Promise.resolve()
@@ -82,16 +83,19 @@ export async function dispatchModules({
// let storeData: Record<string, any> = {}; // after module used
let chatResponse: ChatHistoryItemResType[] = []; // response request and save to database
let chatAnswerText = ''; // AI answer
let chatModuleBills: ChatModuleBillType[] = [];
let runningTime = Date.now();
function pushStore(
{ inputs = [] }: RunningModuleItemType,
{
answerText = '',
responseData
responseData,
moduleDispatchBills
}: {
answerText?: string;
responseData?: ChatHistoryItemResType | ChatHistoryItemResType[];
moduleDispatchBills?: ChatModuleBillType[];
}
) {
const time = Date.now();
@@ -105,6 +109,9 @@ export async function dispatchModules({
});
}
}
if (moduleDispatchBills) {
chatModuleBills = chatModuleBills.concat(moduleDispatchBills);
}
runningTime = time;
const isResponseAnswerText =
@@ -158,6 +165,7 @@ export async function dispatchModules({
const filterModules = nextRunModules.filter((module) => {
if (set.has(module.moduleId)) return false;
set.add(module.moduleId);
``;
return true;
});
@@ -199,8 +207,7 @@ export async function dispatchModules({
user,
stream,
detail,
outputs: module.outputs,
inputs: module.inputs,
module,
params
};
@@ -237,10 +244,11 @@ export async function dispatchModules({
? params[ModuleOutputKeyEnum.userChatInput]
: undefined,
...dispatchRes,
[ModuleOutputKeyEnum.responseData]: formatResponseData
[ModuleOutputKeyEnum.responseData]: formatResponseData,
[ModuleOutputKeyEnum.moduleDispatchBills]:
dispatchRes[ModuleOutputKeyEnum.moduleDispatchBills]
});
}
// start process width initInput
const initModules = runningModules.filter((item) => initRunningModuleType[item.flowType]);
@@ -266,7 +274,8 @@ export async function dispatchModules({
return {
[ModuleOutputKeyEnum.answerText]: chatAnswerText,
[ModuleOutputKeyEnum.responseData]: chatResponse
[ModuleOutputKeyEnum.responseData]: chatResponse,
[ModuleOutputKeyEnum.moduleDispatchBills]: chatModuleBills
};
}

View File

@@ -1,4 +1,7 @@
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
import type {
ModuleDispatchProps,
ModuleDispatchResponse
} from '@fastgpt/global/core/module/type.d';
import { dispatchModules } from '../index';
import { FlowNodeTypeEnum } from '@fastgpt/global/core/module/node/constant';
import {
@@ -6,7 +9,6 @@ import {
ModuleInputKeyEnum,
ModuleOutputKeyEnum
} from '@fastgpt/global/core/module/constants';
import type { moduleDispatchResType } from '@fastgpt/global/core/chat/type.d';
import { getPluginRuntimeById } from '@fastgpt/service/core/plugin/controller';
import { authPluginCanUse } from '@fastgpt/service/support/permission/auth/plugin';
@@ -14,10 +16,9 @@ type RunPluginProps = ModuleDispatchProps<{
[ModuleInputKeyEnum.pluginId]: string;
[key: string]: any;
}>;
type RunPluginResponse = {
type RunPluginResponse = ModuleDispatchResponse<{
[ModuleOutputKeyEnum.answerText]: string;
[ModuleOutputKeyEnum.responseData]?: moduleDispatchResType;
};
}>;
export const dispatchRunPlugin = async (props: RunPluginProps): Promise<RunPluginResponse> => {
const {
@@ -58,7 +59,7 @@ export const dispatchRunPlugin = async (props: RunPluginProps): Promise<RunPlugi
return params;
})();
const { responseData, answerText } = await dispatchModules({
const { responseData, moduleDispatchBills, answerText } = await dispatchModules({
...props,
modules: plugin.modules.map((module) => ({
...module,
@@ -76,9 +77,9 @@ export const dispatchRunPlugin = async (props: RunPluginProps): Promise<RunPlugi
return {
answerText,
// responseData, // debug
responseData: {
[ModuleOutputKeyEnum.responseData]: {
moduleLogo: plugin.avatar,
price: responseData.reduce((sum, item) => sum + (item.price || 0), 0),
totalPoints: responseData.reduce((sum, item) => sum + (item.totalPoints || 0), 0),
runningTime: responseData.reduce((sum, item) => sum + (item.runningTime || 0), 0),
pluginOutput: output?.pluginOutput,
pluginDetail:
@@ -89,6 +90,14 @@ export const dispatchRunPlugin = async (props: RunPluginProps): Promise<RunPlugi
})
: undefined
},
[ModuleOutputKeyEnum.moduleDispatchBills]: [
{
moduleName: plugin.name,
totalPoints: moduleDispatchBills.reduce((sum, item) => sum + (item.totalPoints || 0), 0),
model: plugin.name,
charsLength: 0
}
],
...(output ? output.pluginOutput : {})
};
};

View File

@@ -14,7 +14,7 @@ export const dispatchPluginOutput = (props: PluginOutputProps): PluginOutputResp
return {
responseData: {
price: 0,
totalPoints: 0,
pluginOutput: params
}
};

View File

@@ -1,64 +0,0 @@
import type { ChatItemType, moduleDispatchResType } from '@fastgpt/global/core/chat/type.d';
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
import { ModuleInputKeyEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
import { ModelTypeEnum, getLLMModel } from '@/service/core/ai/model';
import { formatModelPrice2Store } from '@/service/support/wallet/bill/utils';
import { queryCfr } from '@fastgpt/service/core/ai/functions/cfr';
import { getHistories } from '../utils';
type Props = ModuleDispatchProps<{
[ModuleInputKeyEnum.aiModel]: string;
[ModuleInputKeyEnum.aiSystemPrompt]?: string;
[ModuleInputKeyEnum.history]?: ChatItemType[] | number;
[ModuleInputKeyEnum.userChatInput]: string;
}>;
type Response = {
[ModuleOutputKeyEnum.text]: string;
[ModuleOutputKeyEnum.responseData]?: moduleDispatchResType;
};
export const dispatchCFR = async ({
histories,
params: { model, systemPrompt, history, userChatInput }
}: Props): Promise<Response> => {
if (!userChatInput) {
return Promise.reject('Question is empty');
}
// none
// first chat and no system prompt
if (systemPrompt === 'none' || (histories.length === 0 && !systemPrompt)) {
return {
[ModuleOutputKeyEnum.text]: userChatInput
};
}
const cfrModel = getLLMModel(model);
const chatHistories = getHistories(history, histories);
const { cfrQuery, inputTokens, outputTokens } = await queryCfr({
chatBg: systemPrompt,
query: userChatInput,
histories: chatHistories,
model: cfrModel.model
});
const { total, modelName } = formatModelPrice2Store({
model: cfrModel.model,
inputLen: inputTokens,
outputLen: outputTokens,
type: ModelTypeEnum.llm
});
return {
[ModuleOutputKeyEnum.responseData]: {
price: total,
model: modelName,
inputTokens,
outputTokens,
query: userChatInput,
textOutput: cfrQuery
},
[ModuleOutputKeyEnum.text]: cfrQuery
};
};

View File

@@ -1,5 +1,8 @@
import type { moduleDispatchResType } from '@fastgpt/global/core/chat/type.d';
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
import type {
ModuleDispatchProps,
ModuleDispatchResponse
} from '@fastgpt/global/core/module/type.d';
import {
DYNAMIC_INPUT_KEY,
ModuleInputKeyEnum,
@@ -16,11 +19,10 @@ type HttpRequestProps = ModuleDispatchProps<{
[ModuleInputKeyEnum.httpHeaders]: string;
[key: string]: any;
}>;
type HttpResponse = {
type HttpResponse = ModuleDispatchResponse<{
[ModuleOutputKeyEnum.failed]?: boolean;
[ModuleOutputKeyEnum.responseData]: moduleDispatchResType;
[key: string]: any;
};
}>;
const flatDynamicParams = (params: Record<string, any>) => {
const dynamicParams = params[DYNAMIC_INPUT_KEY];
@@ -38,7 +40,7 @@ export const dispatchHttpRequest = async (props: HttpRequestProps): Promise<Http
chatId,
responseChatItemId,
variables,
outputs,
module: { outputs },
params: {
system_httpMethod: httpMethod = 'POST',
system_httpReqUrl: httpReqUrl,
@@ -97,8 +99,8 @@ export const dispatchHttpRequest = async (props: HttpRequestProps): Promise<Http
}
return {
responseData: {
price: 0,
[ModuleOutputKeyEnum.responseData]: {
totalPoints: 0,
body: formatBody,
httpResult: response
},
@@ -109,8 +111,8 @@ export const dispatchHttpRequest = async (props: HttpRequestProps): Promise<Http
return {
[ModuleOutputKeyEnum.failed]: true,
responseData: {
price: 0,
[ModuleOutputKeyEnum.responseData]: {
totalPoints: 0,
body: formatBody,
httpResult: { error }
}
@@ -139,6 +141,7 @@ async function fetchData({
'Content-Type': 'application/json',
...headers
},
timeout: 360000,
params: method === 'GET' ? query : {},
data: method === 'POST' ? body : {}
});

View File

@@ -1,5 +1,7 @@
import type { moduleDispatchResType } from '@fastgpt/global/core/chat/type.d';
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
import type {
ModuleDispatchProps,
ModuleDispatchResponse
} from '@fastgpt/global/core/module/type.d';
import {
DYNAMIC_INPUT_KEY,
ModuleInputKeyEnum,
@@ -24,11 +26,10 @@ type HttpRequestProps = ModuleDispatchProps<{
[DYNAMIC_INPUT_KEY]: Record<string, any>;
[key: string]: any;
}>;
type HttpResponse = {
type HttpResponse = ModuleDispatchResponse<{
[ModuleOutputKeyEnum.failed]?: boolean;
[ModuleOutputKeyEnum.responseData]: moduleDispatchResType;
[key: string]: any;
};
}>;
const UNDEFINED_SIGN = 'UNDEFINED_SIGN';
@@ -38,7 +39,7 @@ export const dispatchHttp468Request = async (props: HttpRequestProps): Promise<H
chatId,
responseChatItemId,
variables,
outputs,
module: { outputs },
histories,
params: {
system_httpMethod: httpMethod = 'POST',
@@ -119,8 +120,8 @@ export const dispatchHttp468Request = async (props: HttpRequestProps): Promise<H
}
return {
responseData: {
price: 0,
[ModuleOutputKeyEnum.responseData]: {
totalPoints: 0,
params: Object.keys(params).length > 0 ? params : undefined,
body: Object.keys(requestBody).length > 0 ? requestBody : undefined,
headers: Object.keys(headers).length > 0 ? headers : undefined,
@@ -131,8 +132,8 @@ export const dispatchHttp468Request = async (props: HttpRequestProps): Promise<H
} catch (error) {
return {
[ModuleOutputKeyEnum.failed]: true,
responseData: {
price: 0,
[ModuleOutputKeyEnum.responseData]: {
totalPoints: 0,
params: Object.keys(params).length > 0 ? params : undefined,
body: Object.keys(requestBody).length > 0 ? requestBody : undefined,
headers: Object.keys(headers).length > 0 ? headers : undefined,

View File

@@ -0,0 +1,77 @@
import type { ChatItemType } from '@fastgpt/global/core/chat/type.d';
import type {
ModuleDispatchProps,
ModuleDispatchResponse
} from '@fastgpt/global/core/module/type.d';
import { ModuleInputKeyEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
import { ModelTypeEnum, getLLMModel } from '@fastgpt/service/core/ai/model';
import { formatModelChars2Points } from '@/service/support/wallet/usage/utils';
import { queryExtension } from '@fastgpt/service/core/ai/functions/queryExtension';
import { getHistories } from '../utils';
import { hashStr } from '@fastgpt/global/common/string/tools';
type Props = ModuleDispatchProps<{
[ModuleInputKeyEnum.aiModel]: string;
[ModuleInputKeyEnum.aiSystemPrompt]?: string;
[ModuleInputKeyEnum.history]?: ChatItemType[] | number;
[ModuleInputKeyEnum.userChatInput]: string;
}>;
type Response = ModuleDispatchResponse<{
[ModuleOutputKeyEnum.text]: string;
}>;
export const dispatchQueryExtension = async ({
histories,
module,
params: { model, systemPrompt, history, userChatInput }
}: Props): Promise<Response> => {
if (!userChatInput) {
return Promise.reject('Question is empty');
}
const queryExtensionModel = getLLMModel(model);
const chatHistories = getHistories(history, histories);
const { extensionQueries, charsLength } = await queryExtension({
chatBg: systemPrompt,
query: userChatInput,
histories: chatHistories,
model: queryExtensionModel.model
});
extensionQueries.unshift(userChatInput);
const { totalPoints, modelName } = formatModelChars2Points({
model: queryExtensionModel.model,
charsLength,
modelType: ModelTypeEnum.llm
});
const set = new Set<string>();
const filterSameQueries = extensionQueries.filter((item) => {
// 删除所有的标点符号与空格等,只对文本进行比较
const str = hashStr(item.replace(/[^\p{L}\p{N}]/gu, ''));
if (set.has(str)) return false;
set.add(str);
return true;
});
return {
[ModuleOutputKeyEnum.responseData]: {
totalPoints,
model: modelName,
charsLength,
query: userChatInput,
textOutput: JSON.stringify(filterSameQueries)
},
[ModuleOutputKeyEnum.moduleDispatchBills]: [
{
moduleName: module.name,
totalPoints,
model: modelName,
charsLength
}
],
[ModuleOutputKeyEnum.text]: JSON.stringify(filterSameQueries)
};
};

View File

@@ -1,5 +1,8 @@
import type { moduleDispatchResType, ChatItemType } from '@fastgpt/global/core/chat/type.d';
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
import type {
ModuleDispatchProps,
ModuleDispatchResponse
} from '@fastgpt/global/core/module/type.d';
import { SelectAppItemType } from '@fastgpt/global/core/module/type';
import { dispatchModules } from '../index';
import { MongoApp } from '@fastgpt/service/core/app/schema';
@@ -15,21 +18,21 @@ type Props = ModuleDispatchProps<{
[ModuleInputKeyEnum.history]?: ChatItemType[] | number;
app: SelectAppItemType;
}>;
type Response = {
[ModuleOutputKeyEnum.responseData]: moduleDispatchResType[];
type Response = ModuleDispatchResponse<{
[ModuleOutputKeyEnum.answerText]: string;
[ModuleOutputKeyEnum.history]: ChatItemType[];
};
}>;
export const dispatchAppRequest = async (props: Props): Promise<Response> => {
const {
res,
user,
teamId,
stream,
detail,
histories,
params: { userChatInput, history, app }
} = props;
let start = Date.now();
if (!userChatInput) {
return Promise.reject('Input is empty');
@@ -37,7 +40,7 @@ export const dispatchAppRequest = async (props: Props): Promise<Response> => {
const appData = await MongoApp.findOne({
_id: app.id,
teamId: user.team.teamId
teamId
});
if (!appData) {
@@ -56,7 +59,7 @@ export const dispatchAppRequest = async (props: Props): Promise<Response> => {
const chatHistories = getHistories(history, histories);
const { responseData, answerText } = await dispatchModules({
const { responseData, moduleDispatchBills, answerText } = await dispatchModules({
...props,
appId: app.id,
modules: appData.modules,
@@ -78,7 +81,18 @@ export const dispatchAppRequest = async (props: Props): Promise<Response> => {
]);
return {
responseData,
[ModuleOutputKeyEnum.responseData]: {
moduleLogo: appData.avatar,
query: userChatInput,
textOutput: answerText,
totalPoints: responseData.reduce((sum, item) => sum + (item.totalPoints || 0), 0)
},
[ModuleOutputKeyEnum.moduleDispatchBills]: [
{
moduleName: appData.name,
totalPoints: responseData.reduce((sum, item) => sum + (item.totalPoints || 0), 0)
}
],
answerText: answerText,
history: completeMessages
};

View File

@@ -1,5 +1,5 @@
import { startQueue } from './utils/tools';
import { PRICE_SCALE } from '@fastgpt/global/support/wallet/bill/constants';
import { PRICE_SCALE } from '@fastgpt/global/support/wallet/constants';
import { MongoUser } from '@fastgpt/service/support/user/schema';
import { connectMongo } from '@fastgpt/service/common/mongo/init';
import { hashStr } from '@fastgpt/global/common/string/tools';

View File

@@ -1,9 +0,0 @@
import { GET } from '@fastgpt/service/common/api/plusRequest';
import { FastGPTProUrl } from '@fastgpt/service/common/system/constants';
export const authTeamBalance = async (teamId: string) => {
if (FastGPTProUrl) {
return GET('/support/permission/authBalance', { teamId });
}
return true;
};

View File

@@ -6,7 +6,6 @@ import { ChatErrEnum } from '@fastgpt/global/common/error/code/chat';
import { authUserRole } from '@fastgpt/service/support/permission/auth/user';
import { TeamMemberRoleEnum } from '@fastgpt/global/support/user/team/constant';
import { AuthResponseType } from '@fastgpt/global/support/permission/type';
/*
outLink: Must be the owner
token: team owner and chat owner have all permissions
@@ -15,12 +14,14 @@ export async function autChatCrud({
appId,
chatId,
shareId,
shareTeamId,
outLinkUid,
per = 'owner',
...props
}: AuthModeType & {
appId: string;
chatId?: string;
shareTeamId?: string;
shareId?: string;
outLinkUid?: string;
}): Promise<{
@@ -28,7 +29,7 @@ export async function autChatCrud({
isOutLink: boolean;
uid?: string;
}> {
const isOutLink = Boolean(shareId && outLinkUid);
const isOutLink = Boolean((shareId || shareTeamId) && outLinkUid);
if (!chatId) return { isOutLink, uid: outLinkUid };
const chat = await MongoChat.findOne({ appId, chatId }).lean();
@@ -46,6 +47,11 @@ export async function autChatCrud({
}
return Promise.reject(ChatErrEnum.unAuthChat);
}
if (shareTeamId && outLinkUid) {
if (chat.teamId == shareTeamId && chat.outLinkUid === outLinkUid) {
return { uid: outLinkUid };
}
}
// req auth
const { teamId, tmbId, role } = await authUserRole(props);

View File

@@ -24,6 +24,7 @@ export async function authDatasetData({
const data: DatasetDataItemType = {
id: String(datasetData._id),
teamId: datasetData.teamId,
q: datasetData.q,
a: datasetData.a,
chunkIndex: datasetData.chunkIndex,

View File

@@ -6,7 +6,7 @@ import type {
AuthOutLinkResponse
} from '@fastgpt/global/support/outLink/api.d';
import { authOutLinkValid } from '@fastgpt/service/support/permission/auth/outLink';
import { getUserAndAuthBalance } from '@fastgpt/service/support/user/controller';
import { getUserChatInfoAndAuthTeamPoints } from '@/service/support/permission/auth/team';
import { AuthUserTypeEnum } from '@fastgpt/global/support/permission/constant';
import { OutLinkErrEnum } from '@fastgpt/global/common/error/code/outLink';
import { OutLinkSchema } from '@fastgpt/global/support/outLink/type';
@@ -58,13 +58,15 @@ export async function authOutLinkChatStart({
// get outLink and app
const { shareChat, appId } = await authOutLinkValid({ shareId });
// check balance and chat limit
const [user, { uid }] = await Promise.all([
getUserAndAuthBalance({ tmbId: shareChat.tmbId, minBalance: 0 }),
// check ai points and chat limit
const [{ user }, { uid }] = await Promise.all([
getUserChatInfoAndAuthTeamPoints(shareChat.tmbId),
authOutLinkChatLimit({ outLink: shareChat, ip, outLinkUid, question })
]);
return {
teamId: shareChat.teamId,
tmbId: shareChat.tmbId,
authType: AuthUserTypeEnum.token,
responseDetail: shareChat.responseDetail,
user,

View File

@@ -0,0 +1,43 @@
import { UserErrEnum } from '@fastgpt/global/common/error/code/user';
import { TeamMemberWithUserSchema } from '@fastgpt/global/support/user/team/type';
import { MongoTeamMember } from '@fastgpt/service/support/user/team/teamMemberSchema';
import { MongoTeam } from '@fastgpt/service/support/user/team/teamSchema';
import { checkTeamAIPoints } from '@fastgpt/service/support/permission/teamLimit';
import axios from 'axios';
export async function getUserChatInfoAndAuthTeamPoints(tmbId: string) {
const tmb = (await MongoTeamMember.findById(tmbId, 'teamId userId').populate(
'userId',
'timezone openaiAccount'
)) as TeamMemberWithUserSchema;
if (!tmb) return Promise.reject(UserErrEnum.unAuthUser);
await checkTeamAIPoints(tmb.teamId);
return {
user: tmb.userId
};
}
type UserInfoType = {
data: {
uid: string;
tags: string[];
};
};
export async function getShareTeamUid(shareTeamId: string, authToken: string) {
try {
const teamInfo = await MongoTeam.findById(shareTeamId);
const tagsUrl = teamInfo?.tagsUrl;
const { data: userInfo } = await axios.post(tagsUrl + `/getUserInfo`, { autoken: authToken });
const uid = userInfo?.data?.uid;
if (uid) {
throw new Error('uid null');
}
return uid;
} catch (err) {
return '';
}
}

View File

@@ -0,0 +1,36 @@
import { POST } from '@fastgpt/service/common/api/plusRequest';
import type { AuthOutLinkChatProps } from '@fastgpt/global/support/outLink/api.d';
import type { chatAppListSchema } from '@fastgpt/global/core/chat/type.d';
import { getUserChatInfoAndAuthTeamPoints } from './team';
import { MongoTeam } from '@fastgpt/service/support/user/team/teamSchema';
import { MongoTeamMember } from '@fastgpt/service/support/user/team/teamMemberSchema';
export function authChatTeamInfo(data: { shareTeamId: string; authToken: string }) {
return POST<chatAppListSchema>('/core/chat/init', data);
}
export async function authTeamShareChatStart({
teamId,
ip,
outLinkUid,
question
}: AuthOutLinkChatProps & {
teamId: string;
}) {
// get outLink and app
const { teamInfo, uid } = await authChatTeamInfo({ shareTeamId: teamId, authToken: outLinkUid });
// check balance and chat limit
const tmb = await MongoTeamMember.findOne({ teamId, userId: String(teamInfo.ownerId) });
if (!tmb) {
throw new Error('can not find it');
}
const { user } = await getUserChatInfoAndAuthTeamPoints(String(tmb._id));
return {
user,
tmbId: String(tmb._id),
uid: uid
};
}

View File

@@ -1,23 +0,0 @@
import { ConcatBillProps, CreateBillProps } from '@fastgpt/global/support/wallet/bill/api';
import { addLog } from '@fastgpt/service/common/system/log';
import { POST } from '@fastgpt/service/common/api/plusRequest';
import { FastGPTProUrl } from '@fastgpt/service/common/system/constants';
export function createBill(data: CreateBillProps) {
if (!FastGPTProUrl) return;
if (data.total === 0) {
addLog.info('0 Bill', data);
}
try {
POST('/support/wallet/bill/createBill', data);
} catch (error) {}
}
export function concatBill(data: ConcatBillProps) {
if (!FastGPTProUrl) return;
if (data.total === 0) {
addLog.info('0 Bill', data);
}
try {
POST('/support/wallet/bill/concatBill', data);
} catch (error) {}
}

View File

@@ -1,327 +0,0 @@
import { BillSourceEnum } from '@fastgpt/global/support/wallet/bill/constants';
import { ModelTypeEnum } from '@/service/core/ai/model';
import type { ChatHistoryItemResType } from '@fastgpt/global/core/chat/type.d';
import { formatStorePrice2Read } from '@fastgpt/global/support/wallet/bill/tools';
import { addLog } from '@fastgpt/service/common/system/log';
import { PostReRankProps } from '@fastgpt/global/core/ai/api';
import { createBill, concatBill } from './controller';
import { formatModelPrice2Store } from '@/service/support/wallet/bill/utils';
export const pushChatBill = ({
appName,
appId,
teamId,
tmbId,
source,
response
}: {
appName: string;
appId: string;
teamId: string;
tmbId: string;
source: `${BillSourceEnum}`;
response: ChatHistoryItemResType[];
}) => {
const total = response.reduce((sum, item) => sum + (item.price || 0), 0);
createBill({
teamId,
tmbId,
appName,
appId,
total,
source,
list: response.map((item) => ({
moduleName: item.moduleName,
amount: item.price || 0,
model: item.model,
inputTokens: item.inputTokens,
outputTokens: item.outputTokens,
charsLength: item.charsLength
}))
});
addLog.info(`finish completions`, {
source,
teamId,
tmbId,
price: formatStorePrice2Read(total)
});
return { total };
};
export const pushQABill = async ({
teamId,
tmbId,
model,
charsLength,
billId
}: {
teamId: string;
tmbId: string;
model: string;
charsLength: number;
billId: string;
}) => {
// 计算价格
const { total } = formatModelPrice2Store({
model,
inputLen: charsLength,
type: ModelTypeEnum.llm
});
concatBill({
billId,
teamId,
tmbId,
total,
charsLength,
listIndex: 1
});
return { total };
};
export const pushGenerateVectorBill = ({
billId,
teamId,
tmbId,
charsLength,
model,
source = BillSourceEnum.fastgpt,
extensionModel,
extensionInputTokens,
extensionOutputTokens
}: {
billId?: string;
teamId: string;
tmbId: string;
charsLength: number;
model: string;
source?: `${BillSourceEnum}`;
extensionModel?: string;
extensionInputTokens?: number;
extensionOutputTokens?: number;
}) => {
const { total: totalVector, modelName: vectorModelName } = formatModelPrice2Store({
model,
inputLen: charsLength,
type: ModelTypeEnum.vector
});
const { extensionTotal, extensionModelName } = (() => {
if (!extensionModel || !extensionInputTokens || !extensionOutputTokens)
return {
extensionTotal: 0,
extensionModelName: ''
};
const { total, modelName } = formatModelPrice2Store({
model: extensionModel,
inputLen: extensionInputTokens,
outputLen: extensionOutputTokens,
type: ModelTypeEnum.llm
});
return {
extensionTotal: total,
extensionModelName: modelName
};
})();
const total = totalVector + extensionTotal;
// 插入 Bill 记录
if (billId) {
concatBill({
teamId,
tmbId,
total: totalVector,
billId,
charsLength,
listIndex: 0
});
} else {
createBill({
teamId,
tmbId,
appName: 'wallet.moduleName.index',
total,
source,
list: [
{
moduleName: 'wallet.moduleName.index',
amount: totalVector,
model: vectorModelName,
charsLength
},
...(extensionModel !== undefined
? [
{
moduleName: 'core.module.template.Query extension',
amount: extensionTotal,
model: extensionModelName,
inputTokens: extensionInputTokens,
outputTokens: extensionOutputTokens
}
]
: [])
]
});
}
return { total };
};
export const pushQuestionGuideBill = ({
inputTokens,
outputTokens,
teamId,
tmbId
}: {
inputTokens: number;
outputTokens: number;
teamId: string;
tmbId: string;
}) => {
const qgModel = global.llmModels[0];
const { total, modelName } = formatModelPrice2Store({
inputLen: inputTokens,
outputLen: outputTokens,
model: qgModel.model,
type: ModelTypeEnum.llm
});
createBill({
teamId,
tmbId,
appName: 'wallet.bill.Next Step Guide',
total,
source: BillSourceEnum.fastgpt,
list: [
{
moduleName: 'wallet.bill.Next Step Guide',
amount: total,
model: modelName,
inputTokens,
outputTokens
}
]
});
};
export function pushAudioSpeechBill({
appName = 'wallet.bill.Audio Speech',
model,
charsLength,
teamId,
tmbId,
source = BillSourceEnum.fastgpt
}: {
appName?: string;
model: string;
charsLength: number;
teamId: string;
tmbId: string;
source: `${BillSourceEnum}`;
}) {
const { total, modelName } = formatModelPrice2Store({
model,
inputLen: charsLength,
type: ModelTypeEnum.audioSpeech
});
createBill({
teamId,
tmbId,
appName,
total,
source,
list: [
{
moduleName: appName,
amount: total,
model: modelName,
charsLength
}
]
});
}
export function pushWhisperBill({
teamId,
tmbId,
duration
}: {
teamId: string;
tmbId: string;
duration: number;
}) {
const whisperModel = global.whisperModel;
if (!whisperModel) return;
const { total, modelName } = formatModelPrice2Store({
model: whisperModel.model,
inputLen: duration,
type: ModelTypeEnum.whisper,
multiple: 60
});
const name = 'wallet.bill.Whisper';
createBill({
teamId,
tmbId,
appName: name,
total,
source: BillSourceEnum.fastgpt,
list: [
{
moduleName: name,
amount: total,
model: modelName,
duration
}
]
});
}
export function pushReRankBill({
teamId,
tmbId,
source,
inputs
}: {
teamId: string;
tmbId: string;
source: `${BillSourceEnum}`;
inputs: PostReRankProps['inputs'];
}) {
const reRankModel = global.reRankModels[0];
if (!reRankModel) return { total: 0 };
const charsLength = inputs.reduce((sum, item) => sum + item.text.length, 0);
const { total, modelName } = formatModelPrice2Store({
model: reRankModel.model,
inputLen: charsLength,
type: ModelTypeEnum.rerank
});
const name = 'wallet.bill.ReRank';
createBill({
teamId,
tmbId,
appName: name,
total,
source,
list: [
{
moduleName: name,
amount: total,
model: modelName,
charsLength
}
]
});
return { total };
}

View File

@@ -1,54 +0,0 @@
import { ModelTypeEnum, getModelMap } from '@/service/core/ai/model';
import { AuthUserTypeEnum } from '@fastgpt/global/support/permission/constant';
import { BillSourceEnum, PRICE_SCALE } from '@fastgpt/global/support/wallet/bill/constants';
export function authType2BillSource({
authType,
shareId,
source
}: {
authType?: `${AuthUserTypeEnum}`;
shareId?: string;
source?: `${BillSourceEnum}`;
}) {
if (source) return source;
if (shareId) return BillSourceEnum.shareLink;
if (authType === AuthUserTypeEnum.apikey) return BillSourceEnum.api;
return BillSourceEnum.fastgpt;
}
export const formatModelPrice2Store = ({
model,
inputLen = 0,
outputLen = 0,
type,
multiple = 1000
}: {
model: string;
inputLen: number;
outputLen?: number;
type: `${ModelTypeEnum}`;
multiple?: number;
}) => {
const modelData = getModelMap?.[type]?.(model);
if (!modelData)
return {
inputTotal: 0,
outputTotal: 0,
total: 0,
modelName: ''
};
const inputTotal = modelData.inputPrice
? Math.ceil(modelData.inputPrice * (inputLen / multiple) * PRICE_SCALE)
: 0;
const outputTotal = modelData.outputPrice
? Math.ceil(modelData.outputPrice * (outputLen / multiple) * PRICE_SCALE)
: 0;
return {
modelName: modelData.name,
inputTotal: inputTotal,
outputTotal: outputTotal,
total: inputTotal + outputTotal
};
};

View File

@@ -0,0 +1,23 @@
import { ConcatUsageProps, CreateUsageProps } from '@fastgpt/global/support/wallet/usage/api';
import { addLog } from '@fastgpt/service/common/system/log';
import { POST } from '@fastgpt/service/common/api/plusRequest';
import { FastGPTProUrl } from '@fastgpt/service/common/system/constants';
export function createUsage(data: CreateUsageProps) {
if (!FastGPTProUrl) return;
if (data.totalPoints === 0) {
addLog.info('0 totalPoints', data);
}
try {
POST('/support/wallet/usage/createUsage', data);
} catch (error) {}
}
export function concatUsage(data: ConcatUsageProps) {
if (!FastGPTProUrl) return;
if (data.totalPoints === 0) {
addLog.info('0 totalPoints', data);
}
try {
POST('/support/wallet/usage/concatUsage', data);
} catch (error) {}
}

View File

@@ -0,0 +1,274 @@
import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants';
import { ModelTypeEnum } from '@fastgpt/service/core/ai/model';
import type { ChatHistoryItemResType } from '@fastgpt/global/core/chat/type.d';
import { addLog } from '@fastgpt/service/common/system/log';
import { createUsage, concatUsage } from './controller';
import { formatModelChars2Points } from '@/service/support/wallet/usage/utils';
import { ChatModuleBillType } from '@fastgpt/global/support/wallet/bill/type';
export const pushChatUsage = ({
appName,
appId,
teamId,
tmbId,
source,
moduleDispatchBills
}: {
appName: string;
appId: string;
teamId: string;
tmbId: string;
source: `${UsageSourceEnum}`;
moduleDispatchBills: ChatModuleBillType[];
}) => {
const totalPoints = moduleDispatchBills.reduce((sum, item) => sum + (item.totalPoints || 0), 0);
createUsage({
teamId,
tmbId,
appName,
appId,
totalPoints,
source,
list: moduleDispatchBills.map((item) => ({
moduleName: item.moduleName,
amount: item.totalPoints || 0,
model: item.model,
charsLength: item.charsLength
}))
});
addLog.info(`finish completions`, {
source,
teamId,
tmbId,
totalPoints
});
return { totalPoints };
};
export const pushQAUsage = async ({
teamId,
tmbId,
model,
charsLength,
billId
}: {
teamId: string;
tmbId: string;
model: string;
charsLength: number;
billId: string;
}) => {
// 计算价格
const { totalPoints } = formatModelChars2Points({
model,
modelType: ModelTypeEnum.llm,
charsLength
});
concatUsage({
billId,
teamId,
tmbId,
totalPoints,
charsLength,
listIndex: 1
});
return { totalPoints };
};
export const pushGenerateVectorUsage = ({
billId,
teamId,
tmbId,
charsLength,
model,
source = UsageSourceEnum.fastgpt,
extensionModel,
extensionCharsLength
}: {
billId?: string;
teamId: string;
tmbId: string;
charsLength: number;
model: string;
source?: `${UsageSourceEnum}`;
extensionModel?: string;
extensionCharsLength?: number;
}) => {
const { totalPoints: totalVector, modelName: vectorModelName } = formatModelChars2Points({
modelType: ModelTypeEnum.vector,
model,
charsLength
});
const { extensionTotalPoints, extensionModelName } = (() => {
if (!extensionModel || !extensionCharsLength)
return {
extensionTotalPoints: 0,
extensionModelName: ''
};
const { totalPoints, modelName } = formatModelChars2Points({
modelType: ModelTypeEnum.llm,
model: extensionModel,
charsLength: extensionCharsLength
});
return {
extensionTotalPoints: totalPoints,
extensionModelName: modelName
};
})();
const totalPoints = totalVector + extensionTotalPoints;
// 插入 Bill 记录
if (billId) {
concatUsage({
teamId,
tmbId,
totalPoints,
billId,
charsLength,
listIndex: 0
});
} else {
createUsage({
teamId,
tmbId,
appName: 'support.wallet.moduleName.index',
totalPoints,
source,
list: [
{
moduleName: 'support.wallet.moduleName.index',
amount: totalVector,
model: vectorModelName,
charsLength
},
...(extensionModel !== undefined
? [
{
moduleName: 'core.module.template.Query extension',
amount: extensionTotalPoints,
model: extensionModelName,
charsLength: extensionCharsLength
}
]
: [])
]
});
}
return { totalPoints };
};
export const pushQuestionGuideUsage = ({
charsLength,
teamId,
tmbId
}: {
charsLength: number;
teamId: string;
tmbId: string;
}) => {
const qgModel = global.llmModels[0];
const { totalPoints, modelName } = formatModelChars2Points({
charsLength,
model: qgModel.model,
modelType: ModelTypeEnum.llm
});
createUsage({
teamId,
tmbId,
appName: 'core.app.Next Step Guide',
totalPoints,
source: UsageSourceEnum.fastgpt,
list: [
{
moduleName: 'core.app.Next Step Guide',
amount: totalPoints,
model: modelName,
charsLength
}
]
});
};
export function pushAudioSpeechUsage({
appName = 'support.wallet.bill.Audio Speech',
model,
charsLength,
teamId,
tmbId,
source = UsageSourceEnum.fastgpt
}: {
appName?: string;
model: string;
charsLength: number;
teamId: string;
tmbId: string;
source: `${UsageSourceEnum}`;
}) {
const { totalPoints, modelName } = formatModelChars2Points({
model,
charsLength,
modelType: ModelTypeEnum.audioSpeech
});
createUsage({
teamId,
tmbId,
appName,
totalPoints,
source,
list: [
{
moduleName: appName,
amount: totalPoints,
model: modelName,
charsLength
}
]
});
}
export function pushWhisperUsage({
teamId,
tmbId,
duration
}: {
teamId: string;
tmbId: string;
duration: number;
}) {
const whisperModel = global.whisperModel;
if (!whisperModel) return;
const { totalPoints, modelName } = formatModelChars2Points({
model: whisperModel.model,
charsLength: duration,
modelType: ModelTypeEnum.whisper,
multiple: 60
});
const name = 'support.wallet.bill.Whisper';
createUsage({
teamId,
tmbId,
appName: name,
totalPoints,
source: UsageSourceEnum.fastgpt,
list: [
{
moduleName: name,
amount: totalPoints,
model: modelName,
duration
}
]
});
}

View File

@@ -0,0 +1,44 @@
import { ModelTypeEnum, getModelMap } from '@fastgpt/service/core/ai/model';
import { AuthUserTypeEnum } from '@fastgpt/global/support/permission/constant';
import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants';
export function authType2UsageSource({
authType,
shareId,
source
}: {
authType?: `${AuthUserTypeEnum}`;
shareId?: string;
source?: `${UsageSourceEnum}`;
}) {
if (source) return source;
if (shareId) return UsageSourceEnum.shareLink;
if (authType === AuthUserTypeEnum.apikey) return UsageSourceEnum.api;
return UsageSourceEnum.fastgpt;
}
export const formatModelChars2Points = ({
model,
charsLength = 0,
modelType,
multiple = 1000
}: {
model: string;
charsLength: number;
modelType: `${ModelTypeEnum}`;
multiple?: number;
}) => {
const modelData = getModelMap?.[modelType]?.(model);
if (!modelData)
return {
totalPoints: 0,
modelName: ''
};
const totalPoints = (modelData.charsPointsPrice || 0) * (charsLength / multiple);
return {
modelName: modelData.name,
totalPoints
};
};

View File

@@ -5,6 +5,7 @@ import { MongoChatItem } from '@fastgpt/service/core/chat/chatItemSchema';
import { MongoChat } from '@fastgpt/service/core/chat/chatSchema';
import { addLog } from '@fastgpt/service/common/system/log';
import { chatContentReplaceBlock } from '@fastgpt/global/core/chat/utils';
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
type Props = {
chatId: string;
@@ -46,61 +47,54 @@ export async function saveChat({
...chat?.metadata,
...metadata
};
const title =
chatContentReplaceBlock(content[0].value).slice(0, 20) ||
content[1]?.value?.slice(0, 20) ||
'Chat';
const promise: any[] = [
MongoChatItem.insertMany(
await mongoSessionRun(async (session) => {
await MongoChatItem.insertMany(
content.map((item) => ({
chatId,
teamId,
tmbId,
appId,
...item
}))
)
];
const title =
chatContentReplaceBlock(content[0].value).slice(0, 20) ||
content[1]?.value?.slice(0, 20) ||
'Chat';
if (chat) {
promise.push(
MongoChat.updateOne(
{ appId, chatId },
{
title,
updateTime: new Date(),
metadata: metadataUpdate
}
)
})),
{ session }
);
} else {
promise.push(
MongoChat.create({
chatId,
teamId,
tmbId,
appId,
variables,
title,
source,
shareId,
outLinkUid,
metadata: metadataUpdate
})
);
}
if (chat) {
chat.title = title;
chat.updateTime = new Date();
chat.metadata = metadataUpdate;
await chat.save({ session });
} else {
await MongoChat.create(
[
{
chatId,
teamId,
tmbId,
appId,
variables,
title,
source,
shareId,
outLinkUid,
metadata: metadataUpdate
}
],
{ session }
);
}
});
if (updateUseTime && source === ChatSourceEnum.online) {
promise.push(
MongoApp.findByIdAndUpdate(appId, {
updateTime: new Date()
})
);
MongoApp.findByIdAndUpdate(appId, {
updateTime: new Date()
});
}
await Promise.all(promise);
} catch (error) {
addLog.error(`update chat history error`, error);
}