4.6.8-alpha (#804)

* perf: redirect request and err log replace

perf: dataset openapi

feat: session

fix: retry input error

feat: 468 doc

sub page

feat: standard sub

perf: rerank tip

perf: rerank tip

perf: api sdk

perf: openapi

sub plan

perf: sub ui

fix: ts

* perf: init log

* fix: variable select

* sub page

* icon

* perf: llm model config

* perf: menu ux

* perf: system store

* perf: publish app name

* fix: init data

* perf: flow edit ux

* fix: value type format and ux

* fix prompt editor default value (#13)

* fix prompt editor default value

* fix prompt editor update when not focus

* add key with variable

---------

Co-authored-by: Archer <545436317@qq.com>

* fix: value type

* doc

* i18n

* import path

* home page

* perf: mongo session running

* fix: ts

* perf: use toast

* perf: flow edit

* perf: sse response

* slider ui

* fetch error

* fix prompt editor rerender when not focus by key defaultvalue (#14)

* perf: prompt editor

* feat: dataset search concat

* perf: doc

* fix:ts

* perf: doc

* fix json editor onblur value (#15)

* faq

* vector model default config

* ipv6

---------

Co-authored-by: heheer <71265218+newfish-cmyk@users.noreply.github.com>
This commit is contained in:
Archer
2024-02-01 21:57:41 +08:00
committed by GitHub
parent fc19c4cf09
commit 34602b25df
285 changed files with 10345 additions and 11223 deletions

View File

@@ -22,10 +22,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
jsonRes<InitDateResponse>(res, {
data: {
feConfigs: global.feConfigs,
chatModels: global.chatModels,
qaModels: global.qaModels,
cqModels: global.cqModels,
extractModels: global.extractModels,
subPlans: global.subPlans,
llmModels: global.llmModels,
vectorModels: global.vectorModels,
reRankModels:
global.reRankModels?.map((item) => ({
@@ -33,7 +31,6 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
requestUrl: undefined,
requestAuth: undefined
})) || [],
qgModes: global.qgModels,
whisperModel: global.whisperModel,
audioSpeechModels: global.audioSpeechModels,
systemVersion: global.systemVersion || '0.0.0',
@@ -119,11 +116,8 @@ export async function initSystemConfig() {
...fileRes.systemEnv,
...(dbConfig.systemEnv || {})
},
chatModels: dbConfig.chatModels || fileRes.chatModels || [],
qaModels: dbConfig.qaModels || fileRes.qaModels || [],
cqModels: dbConfig.cqModels || fileRes.cqModels || [],
extractModels: dbConfig.extractModels || fileRes.extractModels || [],
qgModels: dbConfig.qgModels || fileRes.qgModels || [],
subPlans: dbConfig.subPlans || fileRes.subPlans,
llmModels: dbConfig.llmModels || fileRes.llmModels || [],
vectorModels: dbConfig.vectorModels || fileRes.vectorModels || [],
reRankModels: dbConfig.reRankModels || fileRes.reRankModels || [],
audioSpeechModels: dbConfig.audioSpeechModels || fileRes.audioSpeechModels || [],
@@ -133,12 +127,9 @@ export async function initSystemConfig() {
// set config
global.feConfigs = config.feConfigs;
global.systemEnv = config.systemEnv;
global.subPlans = config.subPlans;
global.chatModels = config.chatModels;
global.qaModels = config.qaModels;
global.cqModels = config.cqModels;
global.extractModels = config.extractModels;
global.qgModels = config.qgModels;
global.llmModels = config.llmModels;
global.vectorModels = config.vectorModels;
global.reRankModels = config.reRankModels;
global.audioSpeechModels = config.audioSpeechModels;
@@ -147,11 +138,8 @@ export async function initSystemConfig() {
console.log({
feConfigs: global.feConfigs,
systemEnv: global.systemEnv,
chatModels: global.chatModels,
qaModels: global.qaModels,
cqModels: global.cqModels,
extractModels: global.extractModels,
qgModels: global.qgModels,
subPlans: global.subPlans,
llmModels: global.llmModels,
vectorModels: global.vectorModels,
reRankModels: global.reRankModels,
audioSpeechModels: global.audioSpeechModels,

View File

@@ -17,7 +17,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
shareId
});
const qgModel = global.qgModels[0];
const qgModel = global.llmModels[0];
const { result, inputTokens, outputTokens } = await createQuestionGuide({
messages,

View File

@@ -6,6 +6,7 @@ import { MongoApp } from '@fastgpt/service/core/app/schema';
import { MongoOutLink } from '@fastgpt/service/support/outLink/schema';
import { authApp } from '@fastgpt/service/support/permission/auth/app';
import { MongoChatItem } from '@fastgpt/service/core/chat/chatItemSchema';
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
/* 获取我的模型 */
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
@@ -21,21 +22,33 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
await authApp({ req, authToken: true, appId, per: 'owner' });
// 删除对应的聊天
await MongoChatItem.deleteMany({
appId
});
await MongoChat.deleteMany({
appId
});
// 删除分享链接
await MongoOutLink.deleteMany({
appId
});
// 删除模型
await MongoApp.deleteOne({
_id: appId
await mongoSessionRun(async (session) => {
await MongoChatItem.deleteMany(
{
appId
},
{ session }
);
await MongoChat.deleteMany(
{
appId
},
{ session }
);
// 删除分享链接
await MongoOutLink.deleteMany(
{
appId
},
{ session }
);
// delete app
await MongoApp.deleteOne(
{
_id: appId
},
{ session }
);
});
jsonRes(res);

View File

@@ -8,11 +8,11 @@ import type { AppSimpleEditFormType } from '@fastgpt/global/core/app/type.d';
import type { ModuleItemType } from '@fastgpt/global/core/module/type';
import { FormatForm2ModulesProps } from '@fastgpt/global/core/app/api';
import { DatasetSearchModeEnum } from '@fastgpt/global/core/dataset/constants';
import { getExtractModel } from '@/service/core/ai/model';
import { getLLMModel } from '@/service/core/ai/model';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
const { formData, chatModelMaxToken, chatModelList } = req.body as FormatForm2ModulesProps;
const { formData, chatModelMaxToken } = req.body as FormatForm2ModulesProps;
const modules = [
...(formData.dataset.datasets.length > 0
@@ -381,7 +381,7 @@ function datasetTemplate({ formData, maxToken }: Props): ModuleItemType[] {
key: 'usingReRank',
type: 'hidden',
label: '',
valueType: 'string',
valueType: 'boolean',
showTargetInApp: false,
showTargetInPlugin: false,
value: true,
@@ -676,7 +676,7 @@ function datasetTemplate({ formData, maxToken }: Props): ModuleItemType[] {
label: 'core.module.input.label.aiModel',
required: true,
valueType: 'string',
value: getExtractModel().model,
value: getLLMModel().model,
showTargetInApp: false,
showTargetInPlugin: false,
connected: false

View File

@@ -7,7 +7,7 @@ import { jsonRes } from '@fastgpt/service/common/response';
import type { AppSimpleEditFormType } from '@fastgpt/global/core/app/type.d';
import type { ModuleItemType } from '@fastgpt/global/core/module/type';
import { FormatForm2ModulesProps } from '@fastgpt/global/core/app/api';
import { getExtractModel } from '@/service/core/ai/model';
import { getLLMModel } from '@/service/core/ai/model';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
@@ -377,7 +377,7 @@ function datasetTemplate(formData: AppSimpleEditFormType): ModuleItemType[] {
key: 'usingReRank',
type: 'hidden',
label: '',
valueType: 'string',
valueType: 'boolean',
showTargetInApp: false,
showTargetInPlugin: false,
value: formData.dataset.usingReRank,
@@ -686,7 +686,7 @@ function datasetTemplate(formData: AppSimpleEditFormType): ModuleItemType[] {
label: 'core.module.input.label.aiModel',
required: true,
valueType: 'string',
value: getExtractModel().model,
value: getLLMModel().model,
showTargetInApp: false,
showTargetInPlugin: false,
connected: false

View File

@@ -6,7 +6,7 @@ import type { AppUpdateParams } from '@fastgpt/global/core/app/api';
import { authApp } from '@fastgpt/service/support/permission/auth/app';
import { FlowNodeTypeEnum } from '@fastgpt/global/core/module/node/constant';
import { ModuleInputKeyEnum } from '@fastgpt/global/core/module/constants';
import { getChatModel } from '@/service/core/ai/model';
import { getLLMModel } from '@/service/core/ai/model';
/* 获取我的模型 */
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
@@ -32,7 +32,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
if (item.flowType === FlowNodeTypeEnum.chatNode) {
const model =
item.inputs.find((item) => item.key === ModuleInputKeyEnum.aiModel)?.value || '';
const chatModel = getChatModel(model);
const chatModel = getLLMModel(model);
const quoteMaxToken = chatModel.quoteMaxToken || 3000;
maxTokens = Math.max(maxTokens, quoteMaxToken);
@@ -42,7 +42,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
modules.forEach((item) => {
if (item.flowType === FlowNodeTypeEnum.datasetSearchNode) {
item.inputs.forEach((input) => {
if (input.key === ModuleInputKeyEnum.datasetLimit) {
if (input.key === ModuleInputKeyEnum.datasetMaxTokens) {
const val = input.value as number;
if (val > maxTokens) {
input.value = maxTokens;

View File

@@ -5,6 +5,7 @@ import { MongoChat } from '@fastgpt/service/core/chat/chatSchema';
import { MongoChatItem } from '@fastgpt/service/core/chat/chatItemSchema';
import { DelHistoryProps } from '@/global/core/chat/api';
import { autChatCrud } from '@/service/support/permission/auth/chat';
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
/* clear chat history */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
@@ -22,13 +23,21 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
per: 'w'
});
await MongoChatItem.deleteMany({
appId,
chatId
});
await MongoChat.findOneAndRemove({
appId,
chatId
await mongoSessionRun(async (session) => {
await MongoChatItem.deleteMany(
{
appId,
chatId
},
{ session }
);
await MongoChat.findOneAndRemove(
{
appId,
chatId
},
{ session }
);
});
jsonRes(res);

View File

@@ -21,12 +21,14 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
per: 'w'
});
const { _id } = await createOneCollection({
...body,
teamId,
tmbId
});
jsonRes(res, {
data: await createOneCollection({
...body,
teamId,
tmbId
})
data: _id
});
} catch (err) {
jsonRes(res, {

View File

@@ -1,13 +1,14 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { uploadFile } from '@fastgpt/service/common/file/gridfs/controller';
import { delFileByFileIdList, uploadFile } from '@fastgpt/service/common/file/gridfs/controller';
import { getUploadModel } from '@fastgpt/service/common/file/multer';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
import { FileCreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api';
import { removeFilesByPaths } from '@fastgpt/service/common/file/utils';
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
import { DatasetCollectionTypeEnum } from '@fastgpt/global/core/dataset/constants';
import { BucketNameEnum } from '@fastgpt/global/common/file/constants';
/**
* Creates the multer uploader
@@ -18,7 +19,7 @@ const upload = getUploadModel({
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
let filePaths: string[] = [];
let fileId: string = '';
const { datasetId } = req.query as { datasetId: string };
try {
@@ -45,7 +46,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
const { fileMetadata, collectionMetadata, ...collectionData } = data;
// upload file and create collection
const fileId = await uploadFile({
fileId = await uploadFile({
teamId,
tmbId,
bucketName,
@@ -56,7 +57,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
});
// create collection
const collectionId = await createOneCollection({
const { _id: collectionId } = await createOneCollection({
...collectionData,
metadata: collectionMetadata,
teamId,
@@ -69,6 +70,14 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
data: collectionId
});
} catch (error) {
if (fileId) {
try {
await delFileByFileIdList({
fileIdList: [fileId],
bucketName: BucketNameEnum.dataset
});
} catch (error) {}
}
jsonRes(res, {
code: 500,
error

View File

@@ -15,8 +15,10 @@ import { checkDatasetLimit } from '@fastgpt/service/support/permission/limit/dat
import { predictDataLimitLength } from '@fastgpt/global/core/dataset/utils';
import { createTrainingBill } from '@fastgpt/service/support/wallet/bill/controller';
import { BillSourceEnum } from '@fastgpt/global/support/wallet/bill/constants';
import { getQAModel, getVectorModel } from '@/service/core/ai/model';
import { getLLMModel, getVectorModel } from '@/service/core/ai/model';
import { reloadCollectionChunks } from '@fastgpt/service/core/dataset/collection/utils';
import { getStandardSubPlan } from '@/service/support/wallet/sub/utils';
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
@@ -41,39 +43,51 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
// 1. check dataset limit
await checkDatasetLimit({
teamId,
freeSize: global.feConfigs?.subscription?.datasetStoreFreeSize,
insertLen: predictDataLimitLength(trainingType, new Array(10))
insertLen: predictDataLimitLength(trainingType, new Array(10)),
standardPlans: getStandardSubPlan()
});
// 2. create collection
const collectionId = await createOneCollection({
...body,
name: link,
teamId,
tmbId,
type: DatasetCollectionTypeEnum.link,
const { _id: collectionId } = await mongoSessionRun(async (session) => {
// 2. create collection
const collection = await createOneCollection({
...body,
name: link,
teamId,
tmbId,
type: DatasetCollectionTypeEnum.link,
trainingType,
chunkSize,
chunkSplitter,
qaPrompt,
trainingType,
chunkSize,
chunkSplitter,
qaPrompt,
rawLink: link
});
rawLink: link,
session
});
// 3. create bill and start sync
const { billId } = await createTrainingBill({
teamId,
tmbId,
appName: 'core.dataset.collection.Sync Collection',
billSource: BillSourceEnum.training,
vectorModel: getVectorModel(dataset.vectorModel).name,
agentModel: getQAModel(dataset.agentModel).name
});
await reloadCollectionChunks({
collectionId,
tmbId,
billId
// 3. create bill and start sync
const { billId } = await createTrainingBill({
teamId,
tmbId,
appName: 'core.dataset.collection.Sync Collection',
billSource: BillSourceEnum.training,
vectorModel: getVectorModel(dataset.vectorModel).name,
agentModel: getLLMModel(dataset.agentModel).name,
session
});
// load
await reloadCollectionChunks({
collection: {
...collection.toObject(),
datasetId: dataset
},
tmbId,
billId,
session
});
return collection;
});
jsonRes(res, {

View File

@@ -18,7 +18,8 @@ import { pushDataToTrainingQueue } from '@/service/core/dataset/data/controller'
import { hashStr } from '@fastgpt/global/common/string/tools';
import { createTrainingBill } from '@fastgpt/service/support/wallet/bill/controller';
import { BillSourceEnum } from '@fastgpt/global/support/wallet/bill/constants';
import { getQAModel, getVectorModel } from '@/service/core/ai/model';
import { getLLMModel, getVectorModel } from '@/service/core/ai/model';
import { getStandardSubPlan } from '@/service/support/wallet/sub/utils';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
@@ -52,12 +53,12 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
// 2. check dataset limit
await checkDatasetLimit({
teamId,
freeSize: global.feConfigs?.subscription?.datasetStoreFreeSize,
insertLen: predictDataLimitLength(trainingType, chunks)
insertLen: predictDataLimitLength(trainingType, chunks),
standardPlans: getStandardSubPlan()
});
// 3. create collection and training bill
const [collectionId, { billId }] = await Promise.all([
const [{ _id: collectionId }, { billId }] = await Promise.all([
createOneCollection({
...body,
teamId,
@@ -79,7 +80,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
appName: name,
billSource: BillSourceEnum.training,
vectorModel: getVectorModel(dataset.vectorModel)?.name,
agentModel: getQAModel(dataset.agentModel)?.name
agentModel: getLLMModel(dataset.agentModel)?.name
})
]);

View File

@@ -4,6 +4,7 @@ import { connectToDatabase } from '@/service/mongo';
import { findCollectionAndChild } from '@fastgpt/service/core/dataset/collection/utils';
import { delCollectionAndRelatedSources } from '@fastgpt/service/core/dataset/collection/controller';
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
@@ -32,9 +33,12 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
});
// delete
await delCollectionAndRelatedSources({
collections
});
await mongoSessionRun((session) =>
delCollectionAndRelatedSources({
collections,
session
})
);
jsonRes(res);
} catch (err) {

View File

@@ -14,8 +14,9 @@ import {
import { DatasetErrEnum } from '@fastgpt/global/common/error/code/dataset';
import { createTrainingBill } from '@fastgpt/service/support/wallet/bill/controller';
import { BillSourceEnum } from '@fastgpt/global/support/wallet/bill/constants';
import { getQAModel, getVectorModel } from '@/service/core/ai/model';
import { getLLMModel, getVectorModel } from '@/service/core/ai/model';
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
@@ -27,7 +28,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
throw new Error('CollectionIdId is required');
}
const { collection, teamId, tmbId } = await authDatasetCollection({
const { collection, tmbId } = await authDatasetCollection({
req,
authToken: true,
collectionId,
@@ -51,44 +52,54 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
/* Not the same original text, create and reload */
const vectorModelData = getVectorModel(collection.datasetId.vectorModel);
const agentModelData = getQAModel(collection.datasetId.agentModel);
// create training bill
const { billId } = await createTrainingBill({
teamId: collection.teamId,
tmbId,
appName: 'core.dataset.collection.Sync Collection',
billSource: BillSourceEnum.training,
vectorModel: vectorModelData.name,
agentModel: agentModelData.name
});
const agentModelData = getLLMModel(collection.datasetId.agentModel);
// create a collection and delete old
const _id = await createOneCollection({
teamId: collection.teamId,
tmbId: collection.tmbId,
parentId: collection.parentId,
datasetId: collection.datasetId._id,
name: title || collection.name,
type: collection.type,
trainingType: collection.trainingType,
chunkSize: collection.chunkSize,
fileId: collection.fileId,
rawLink: collection.rawLink,
metadata: collection.metadata,
createTime: collection.createTime
});
await mongoSessionRun(async (session) => {
// create training bill
const { billId } = await createTrainingBill({
teamId: collection.teamId,
tmbId,
appName: 'core.dataset.collection.Sync Collection',
billSource: BillSourceEnum.training,
vectorModel: vectorModelData.name,
agentModel: agentModelData.name,
session
});
// start load
await reloadCollectionChunks({
collectionId: _id,
tmbId,
billId,
rawText
});
// create a collection and delete old
const newCol = await createOneCollection({
teamId: collection.teamId,
tmbId: collection.tmbId,
parentId: collection.parentId,
datasetId: collection.datasetId._id,
name: title || collection.name,
type: collection.type,
trainingType: collection.trainingType,
chunkSize: collection.chunkSize,
fileId: collection.fileId,
rawLink: collection.rawLink,
metadata: collection.metadata,
createTime: collection.createTime,
session
});
// delete old collection
await delCollectionAndRelatedSources({
collections: [collection]
// start load
await reloadCollectionChunks({
collection: {
...newCol.toObject(),
datasetId: collection.datasetId
},
tmbId,
billId,
rawText,
session
});
// delete old collection
await delCollectionAndRelatedSources({
collections: [collection],
session
});
});
jsonRes(res, {

View File

@@ -6,7 +6,7 @@ import type { CreateDatasetParams } from '@/global/core/dataset/api.d';
import { createDefaultCollection } from '@fastgpt/service/core/dataset/collection/controller';
import { authUserNotVisitor } from '@fastgpt/service/support/permission/auth/user';
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
import { getQAModel, getVectorModel } from '@/service/core/ai/model';
import { getLLMModel, getVectorModel, getDatasetModel } from '@/service/core/ai/model';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
@@ -17,7 +17,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
type = DatasetTypeEnum.dataset,
avatar,
vectorModel = global.vectorModels[0].model,
agentModel = global.qaModels[0].model
agentModel = getDatasetModel().model
} = req.body as CreateDatasetParams;
// auth
@@ -25,7 +25,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
// check model valid
const vectorModelStore = getVectorModel(vectorModel);
const agentModelStore = getQAModel(agentModel);
const agentModelStore = getLLMModel(agentModel);
if (!vectorModelStore || !agentModelStore) {
throw new Error('vectorModel or qaModel is invalid');
}

View File

@@ -17,6 +17,7 @@ import { pushGenerateVectorBill } from '@/service/support/wallet/bill/push';
import { InsertOneDatasetDataProps } from '@/global/core/dataset/api';
import { simpleText } from '@fastgpt/global/common/string/tools';
import { checkDatasetLimit } from '@fastgpt/service/support/permission/limit/dataset';
import { getStandardSubPlan } from '@/service/support/wallet/sub/utils';
export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
@@ -42,8 +43,8 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
await checkDatasetLimit({
teamId,
freeSize: global.feConfigs?.subscription?.datasetStoreFreeSize,
insertLen: 1
insertLen: 1,
standardPlans: getStandardSubPlan()
});
// auth collection and get dataset

View File

@@ -11,6 +11,7 @@ import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/
import { checkDatasetLimit } from '@fastgpt/service/support/permission/limit/dataset';
import { predictDataLimitLength } from '@fastgpt/global/core/dataset/utils';
import { pushDataToTrainingQueue } from '@/service/core/dataset/data/controller';
import { getStandardSubPlan } from '@/service/support/wallet/sub/utils';
export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
@@ -37,8 +38,8 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
// auth dataset limit
await checkDatasetLimit({
teamId,
freeSize: global.feConfigs?.subscription?.datasetStoreFreeSize,
insertLen: predictDataLimitLength(collection.trainingType, data)
insertLen: predictDataLimitLength(collection.trainingType, data),
standardPlans: getStandardSubPlan()
});
jsonRes<PushDatasetDataResponse>(res, {

View File

@@ -5,6 +5,7 @@ import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
import { delDatasetRelevantData } from '@fastgpt/service/core/dataset/controller';
import { findDatasetAndAllChildren } from '@fastgpt/service/core/dataset/controller';
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
@@ -32,11 +33,15 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
});
// delete all dataset.data and pg data
await delDatasetRelevantData({ datasets });
// delete dataset data
await MongoDataset.deleteMany({
_id: { $in: datasets.map((d) => d._id) }
await mongoSessionRun(async (session) => {
// delete dataset data
await delDatasetRelevantData({ datasets, session });
await MongoDataset.deleteMany(
{
_id: { $in: datasets.map((d) => d._id) }
},
{ session }
);
});
jsonRes(res);

View File

@@ -1,7 +1,7 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { getQAModel, getVectorModel } from '@/service/core/ai/model';
import { getLLMModel, getVectorModel } from '@/service/core/ai/model';
import type { DatasetItemType } from '@fastgpt/global/core/dataset/type.d';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
@@ -29,7 +29,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
data: {
...dataset,
vectorModel: getVectorModel(dataset.vectorModel),
agentModel: getQAModel(dataset.agentModel),
agentModel: getLLMModel(dataset.agentModel),
canWrite,
isOwner
}

View File

@@ -44,7 +44,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
// query extension
// const { queries } = await searchQueryExtension({
// query: text,
// model: global.chatModels[0].model
// model: global.llmModel[0].model
// });
const { searchRes, charsLength, ...result } = await searchDatasetData({

View File

@@ -17,6 +17,21 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
await authRequestFromLocal({ req });
// string all value
Object.keys(obj).forEach((key) => {
let val = obj[key];
if (typeof val === 'object') {
val = JSON.stringify(val);
} else if (typeof val === 'number') {
val = String(val);
} else if (typeof val === 'boolean') {
val = val ? 'true' : 'false';
}
obj[key] = val;
});
const textResult = replaceVariable(text, obj);
res.json({

View File

@@ -20,7 +20,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
const nanoid = customAlphabet(
'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890',
Math.floor(Math.random() * 14) + 24
Math.floor(Math.random() * 14) + 52
);
const apiKey = `${global.systemEnv?.openapiPrefix || 'fastgpt'}-${nanoid()}`;

View File

@@ -20,7 +20,9 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
const baseUrl = openaiAccount?.baseUrl || openaiBaseUrl;
openaiAccount.baseUrl = baseUrl;
const ai = getAIApi(openaiAccount);
const ai = getAIApi({
userKey: openaiAccount
});
const response = await ai.chat.completions.create({
model: 'gpt-3.5-turbo',

View File

@@ -3,6 +3,7 @@ import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { checkDatasetLimit } from '@fastgpt/service/support/permission/limit/dataset';
import { getStandardSubPlan } from '@/service/support/wallet/sub/utils';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
@@ -22,8 +23,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
await checkDatasetLimit({
teamId,
freeSize: global.feConfigs?.subscription?.datasetStoreFreeSize,
insertLen: numberSize
insertLen: numberSize,
standardPlans: getStandardSubPlan()
});
jsonRes(res);

View File

@@ -3,7 +3,7 @@ import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { BillSourceEnum } from '@fastgpt/global/support/wallet/bill/constants';
import { CreateTrainingBillProps } from '@fastgpt/global/support/wallet/bill/api.d';
import { getQAModel, getVectorModel } from '@/service/core/ai/model';
import { getLLMModel, getVectorModel } from '@/service/core/ai/model';
import { createTrainingBill } from '@fastgpt/service/support/wallet/bill/controller';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
@@ -26,7 +26,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
appName: name,
billSource: BillSourceEnum.training,
vectorModel: getVectorModel(dataset.vectorModel).name,
agentModel: getQAModel(dataset.agentModel).name
agentModel: getLLMModel(dataset.agentModel).name
});
jsonRes<string>(res, {

View File

@@ -2,8 +2,9 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { getTeamDatasetValidSub } from '@fastgpt/service/support/wallet/sub/utils';
import { getVectorCountByTeamId } from '@fastgpt/service/common/vectorStore/controller';
import { getTeamSubPlanStatus } from '@fastgpt/service/support/wallet/sub/utils';
import { getStandardSubPlan } from '@/service/support/wallet/sub/utils';
import { FeTeamSubType } from '@fastgpt/global/support/wallet/sub/type';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
@@ -15,20 +16,11 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
authToken: true
});
const [{ sub, maxSize }, usedSize] = await Promise.all([
getTeamDatasetValidSub({
jsonRes<FeTeamSubType>(res, {
data: await getTeamSubPlanStatus({
teamId,
freeSize: global.feConfigs?.subscription?.datasetStoreFreeSize
}),
getVectorCountByTeamId(teamId)
]);
jsonRes(res, {
data: {
sub,
maxSize,
usedSize
}
standardPlans: getStandardSubPlan()
})
});
} catch (err) {
jsonRes(res, {

View File

@@ -5,12 +5,15 @@ import { withNextCors } from '@fastgpt/service/common/middle/cors';
import { pushGenerateVectorBill } from '@/service/support/wallet/bill/push';
import { connectToDatabase } from '@/service/mongo';
import { authTeamBalance } from '@/service/support/permission/auth/bill';
import { getVectorsByText, GetVectorProps } from '@fastgpt/service/core/ai/embedding';
import { getVectorsByText } from '@fastgpt/service/core/ai/embedding';
import { updateApiKeyUsage } from '@fastgpt/service/support/openapi/tools';
import { getBillSourceByAuthType } from '@fastgpt/global/support/wallet/bill/tools';
import { getVectorModel } from '@/service/core/ai/model';
type Props = GetVectorProps & {
type Props = {
input: string | string[];
model: string;
dimensions?: number;
billId?: string;
};
@@ -33,7 +36,10 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
await authTeamBalance(teamId);
const { charsLength, vectors } = await getVectorsByText({ input: query, model });
const { charsLength, vectors } = await getVectorsByText({
input: query,
model: getVectorModel(model)
});
res.json({
object: 'list',