Dataset Permission (#1786)
* feat: dataset controllers feat: dataset schema fix: add missing type to dataset schema Signed-off-by: FinleyGe <m13203533462@163.com> * feat: dataset list api Signed-off-by: FinleyGe <m13203533462@163.com> * chore: all dataset api Signed-off-by: FinleyGe <m13203533462@163.com> * feat: new auth dataset method Signed-off-by: FinleyGe <m13203533462@163.com> * chore: use new auth method in detail, paths. feat: add new param defaultPermission to create api Signed-off-by: FinleyGe <m13203533462@163.com> * chore: app auth params Signed-off-by: FinleyGe <m13203533462@163.com> * chore: use new auth method Signed-off-by: FinleyGe <m13203533462@163.com> * feat: new auth collection and file method Signed-off-by: FinleyGe <m13203533462@163.com> * chore: dataset collection api new auth Signed-off-by: FinleyGe <m13203533462@163.com> * chore: create/*.ts auth Signed-off-by: FinleyGe <m13203533462@163.com> * chore: dataset auth Signed-off-by: FinleyGe <m13203533462@163.com> * fix: import paths Signed-off-by: FinleyGe <m13203533462@163.com> * feat: dataset collaborator Signed-off-by: FinleyGe <m13203533462@163.com> * chore: dataset frontend feat: dataset list frontend feat: dataset detail Signed-off-by: FinleyGe <m13203533462@163.com> * feat: finish the dataset permission fix: ts errors Signed-off-by: FinleyGe <m13203533462@163.com> * fix: empty response of collection api Signed-off-by: FinleyGe <m13203533462@163.com> * chore: adjust the code * chore: adjust the code * chore: i18n * fix: ts error * fix: fe CollectionCard permission --------- Signed-off-by: FinleyGe <m13203533462@163.com>
This commit is contained in:
@@ -1,31 +1,66 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import type { NextApiRequest } from 'next';
|
||||
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
|
||||
import { getVectorModel } from '@fastgpt/service/core/ai/model';
|
||||
import type { DatasetSimpleItemType } from '@fastgpt/global/core/dataset/type.d';
|
||||
import { mongoRPermission } from '@fastgpt/global/support/permission/utils';
|
||||
import { authUserPer } from '@fastgpt/service/support/permission/user/auth';
|
||||
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
import {
|
||||
PerResourceTypeEnum,
|
||||
ReadPermissionVal
|
||||
} from '@fastgpt/global/support/permission/constant';
|
||||
import { MongoResourcePermission } from '@fastgpt/service/support/permission/schema';
|
||||
import { DatasetPermission } from '@fastgpt/global/support/permission/dataset/controller';
|
||||
import { authUserPer } from '@fastgpt/service/support/permission/user/auth';
|
||||
|
||||
/* get all dataset by teamId or tmbId */
|
||||
async function handler(
|
||||
req: NextApiRequest,
|
||||
res: NextApiResponse<any>
|
||||
): Promise<DatasetSimpleItemType[]> {
|
||||
// 凭证校验
|
||||
const { teamId, tmbId, permission } = await authUserPer({
|
||||
async function handler(req: NextApiRequest): Promise<DatasetSimpleItemType[]> {
|
||||
const {
|
||||
teamId,
|
||||
tmbId,
|
||||
permission: tmbPer
|
||||
} = await authUserPer({
|
||||
req,
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
per: ReadPermissionVal
|
||||
});
|
||||
|
||||
const datasets = await MongoDataset.find({
|
||||
...mongoRPermission({ teamId, tmbId, permission }),
|
||||
type: { $ne: DatasetTypeEnum.folder }
|
||||
}).lean();
|
||||
const [myDatasets, rpList] = await Promise.all([
|
||||
MongoDataset.find({
|
||||
teamId,
|
||||
type: {
|
||||
$ne: DatasetTypeEnum.folder
|
||||
}
|
||||
})
|
||||
.sort({
|
||||
updateTime: -1
|
||||
})
|
||||
.lean(),
|
||||
MongoResourcePermission.find({
|
||||
resourceType: PerResourceTypeEnum.dataset,
|
||||
teamId,
|
||||
tmbId
|
||||
}).lean()
|
||||
]);
|
||||
|
||||
return datasets.map((item) => ({
|
||||
const filterDatasets = myDatasets
|
||||
.map((dataset) => {
|
||||
const perVal = rpList.find(
|
||||
(item) => String(item.resourceId) === String(dataset._id)
|
||||
)?.permission;
|
||||
const Per = new DatasetPermission({
|
||||
per: perVal ?? dataset.defaultPermission,
|
||||
isOwner: String(dataset.tmbId) === tmbId || tmbPer.isOwner
|
||||
});
|
||||
|
||||
return {
|
||||
...dataset,
|
||||
permission: Per
|
||||
};
|
||||
})
|
||||
.filter((app) => app.permission.hasReadPer);
|
||||
|
||||
return filterDatasets.map((item) => ({
|
||||
_id: item._id,
|
||||
avatar: item.avatar,
|
||||
name: item.name,
|
||||
|
||||
@@ -1,39 +1,27 @@
|
||||
/*
|
||||
Create one dataset collection
|
||||
*/
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@fastgpt/service/common/response';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
import type { NextApiRequest } from 'next';
|
||||
import type { CreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api.d';
|
||||
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
||||
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
|
||||
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
try {
|
||||
await connectToDatabase();
|
||||
const body = req.body as CreateDatasetCollectionParams;
|
||||
async function handler(req: NextApiRequest) {
|
||||
const body = req.body as CreateDatasetCollectionParams;
|
||||
|
||||
const { teamId, tmbId } = await authDataset({
|
||||
req,
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
datasetId: body.datasetId,
|
||||
per: 'w'
|
||||
});
|
||||
const { teamId, tmbId } = await authDataset({
|
||||
req,
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
datasetId: body.datasetId,
|
||||
per: WritePermissionVal
|
||||
});
|
||||
|
||||
const { _id } = await createOneCollection({
|
||||
...body,
|
||||
teamId,
|
||||
tmbId
|
||||
});
|
||||
|
||||
jsonRes(res, {
|
||||
data: _id
|
||||
});
|
||||
} catch (err) {
|
||||
jsonRes(res, {
|
||||
code: 500,
|
||||
error: err
|
||||
});
|
||||
}
|
||||
const { _id } = await createOneCollection({
|
||||
...body,
|
||||
teamId,
|
||||
tmbId
|
||||
});
|
||||
return _id;
|
||||
}
|
||||
|
||||
export default NextAPI(handler);
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@fastgpt/service/common/response';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
import type { NextApiRequest } from 'next';
|
||||
import { readFileContentFromMongo } from '@fastgpt/service/common/file/gridfs/controller';
|
||||
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
||||
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
|
||||
import { FileIdCreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api';
|
||||
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
|
||||
import {
|
||||
@@ -18,97 +16,88 @@ import { createTrainingUsage } from '@fastgpt/service/support/wallet/usage/contr
|
||||
import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants';
|
||||
import { getLLMModel, getVectorModel } from '@fastgpt/service/core/ai/model';
|
||||
import { rawText2Chunks } from '@fastgpt/service/core/dataset/read';
|
||||
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
async function handler(req: NextApiRequest) {
|
||||
const { datasetId, parentId, fileId } = req.body as FileIdCreateDatasetCollectionParams;
|
||||
const trainingType = TrainingModeEnum.chunk;
|
||||
const { teamId, tmbId, dataset } = await authDataset({
|
||||
req,
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
per: WritePermissionVal,
|
||||
datasetId: datasetId
|
||||
});
|
||||
|
||||
try {
|
||||
await connectToDatabase();
|
||||
// 1. read file
|
||||
const { rawText, filename } = await readFileContentFromMongo({
|
||||
teamId,
|
||||
bucketName: BucketNameEnum.dataset,
|
||||
fileId,
|
||||
isQAImport: true
|
||||
});
|
||||
console.log(rawText);
|
||||
// 2. split chunks
|
||||
const chunks = rawText2Chunks({
|
||||
rawText,
|
||||
isQAImport: true
|
||||
});
|
||||
|
||||
const { teamId, tmbId, dataset } = await authDataset({
|
||||
req,
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
per: 'w',
|
||||
datasetId: datasetId
|
||||
});
|
||||
// 3. auth limit
|
||||
await checkDatasetLimit({
|
||||
teamId,
|
||||
insertLen: predictDataLimitLength(trainingType, chunks)
|
||||
});
|
||||
|
||||
// 1. read file
|
||||
const { rawText, filename } = await readFileContentFromMongo({
|
||||
await mongoSessionRun(async (session) => {
|
||||
// 4. create collection
|
||||
const { _id: collectionId } = await createOneCollection({
|
||||
teamId,
|
||||
bucketName: BucketNameEnum.dataset,
|
||||
tmbId,
|
||||
name: filename,
|
||||
parentId,
|
||||
datasetId,
|
||||
type: DatasetCollectionTypeEnum.file,
|
||||
fileId,
|
||||
isQAImport: true
|
||||
});
|
||||
console.log(rawText);
|
||||
// 2. split chunks
|
||||
const chunks = rawText2Chunks({
|
||||
rawText,
|
||||
isQAImport: true
|
||||
|
||||
// special metadata
|
||||
trainingType,
|
||||
chunkSize: 0,
|
||||
|
||||
session
|
||||
});
|
||||
|
||||
// 3. auth limit
|
||||
await checkDatasetLimit({
|
||||
// 5. create training bill
|
||||
const { billId } = await createTrainingUsage({
|
||||
teamId,
|
||||
insertLen: predictDataLimitLength(trainingType, chunks)
|
||||
tmbId,
|
||||
appName: filename,
|
||||
billSource: UsageSourceEnum.training,
|
||||
vectorModel: getVectorModel(dataset.vectorModel)?.name,
|
||||
agentModel: getLLMModel(dataset.agentModel)?.name,
|
||||
session
|
||||
});
|
||||
|
||||
await mongoSessionRun(async (session) => {
|
||||
// 4. create collection
|
||||
const { _id: collectionId } = await createOneCollection({
|
||||
teamId,
|
||||
tmbId,
|
||||
name: filename,
|
||||
parentId,
|
||||
datasetId,
|
||||
type: DatasetCollectionTypeEnum.file,
|
||||
fileId,
|
||||
|
||||
// special metadata
|
||||
trainingType,
|
||||
chunkSize: 0,
|
||||
|
||||
session
|
||||
});
|
||||
|
||||
// 5. create training bill
|
||||
const { billId } = await createTrainingUsage({
|
||||
teamId,
|
||||
tmbId,
|
||||
appName: filename,
|
||||
billSource: UsageSourceEnum.training,
|
||||
vectorModel: getVectorModel(dataset.vectorModel)?.name,
|
||||
agentModel: getLLMModel(dataset.agentModel)?.name,
|
||||
session
|
||||
});
|
||||
|
||||
// 6. insert to training queue
|
||||
await pushDataListToTrainingQueue({
|
||||
teamId,
|
||||
tmbId,
|
||||
datasetId: dataset._id,
|
||||
collectionId,
|
||||
agentModel: dataset.agentModel,
|
||||
vectorModel: dataset.vectorModel,
|
||||
trainingMode: trainingType,
|
||||
billId,
|
||||
data: chunks.map((chunk, index) => ({
|
||||
q: chunk.q,
|
||||
a: chunk.a,
|
||||
chunkIndex: index
|
||||
})),
|
||||
session
|
||||
});
|
||||
|
||||
return collectionId;
|
||||
// 6. insert to training queue
|
||||
await pushDataListToTrainingQueue({
|
||||
teamId,
|
||||
tmbId,
|
||||
datasetId: dataset._id,
|
||||
collectionId,
|
||||
agentModel: dataset.agentModel,
|
||||
vectorModel: dataset.vectorModel,
|
||||
trainingMode: trainingType,
|
||||
billId,
|
||||
data: chunks.map((chunk, index) => ({
|
||||
q: chunk.q,
|
||||
a: chunk.a,
|
||||
chunkIndex: index
|
||||
})),
|
||||
session
|
||||
});
|
||||
|
||||
jsonRes(res);
|
||||
} catch (error) {
|
||||
jsonRes(res, {
|
||||
code: 500,
|
||||
error
|
||||
});
|
||||
}
|
||||
return collectionId;
|
||||
});
|
||||
}
|
||||
export default NextAPI(handler);
|
||||
|
||||
@@ -1,8 +1,5 @@
|
||||
import type { NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@fastgpt/service/common/response';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
import { readFileContentFromMongo } from '@fastgpt/service/common/file/gridfs/controller';
|
||||
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
||||
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
|
||||
import { FileIdCreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api';
|
||||
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
|
||||
import {
|
||||
@@ -23,11 +20,9 @@ import { MongoRawTextBuffer } from '@fastgpt/service/common/buffer/rawText/schem
|
||||
import { rawText2Chunks } from '@fastgpt/service/core/dataset/read';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import { ApiRequestProps } from '@fastgpt/service/type/next';
|
||||
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
|
||||
async function handler(
|
||||
req: ApiRequestProps<FileIdCreateDatasetCollectionParams>,
|
||||
res: NextApiResponse<any>
|
||||
) {
|
||||
async function handler(req: ApiRequestProps<FileIdCreateDatasetCollectionParams>) {
|
||||
const {
|
||||
fileId,
|
||||
trainingType = TrainingModeEnum.chunk,
|
||||
@@ -37,13 +32,11 @@ async function handler(
|
||||
...body
|
||||
} = req.body;
|
||||
|
||||
await connectToDatabase();
|
||||
|
||||
const { teamId, tmbId, dataset } = await authDataset({
|
||||
req,
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
per: 'w',
|
||||
per: WritePermissionVal,
|
||||
datasetId: body.datasetId
|
||||
});
|
||||
|
||||
@@ -137,13 +130,10 @@ async function handler(
|
||||
}
|
||||
);
|
||||
|
||||
// remove buffer
|
||||
await MongoRawTextBuffer.deleteOne({ sourceId: fileId });
|
||||
return collectionId;
|
||||
});
|
||||
|
||||
// remove buffer
|
||||
await MongoRawTextBuffer.deleteOne({ sourceId: fileId });
|
||||
|
||||
jsonRes(res);
|
||||
}
|
||||
|
||||
export default NextAPI(handler);
|
||||
|
||||
@@ -1,11 +1,6 @@
|
||||
/*
|
||||
Create one dataset collection
|
||||
*/
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@fastgpt/service/common/response';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
import type { NextApiRequest } from 'next';
|
||||
import type { LinkCreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api.d';
|
||||
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
||||
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
|
||||
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
|
||||
import {
|
||||
TrainingModeEnum,
|
||||
@@ -18,83 +13,75 @@ import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants'
|
||||
import { getLLMModel, getVectorModel } from '@fastgpt/service/core/ai/model';
|
||||
import { reloadCollectionChunks } from '@fastgpt/service/core/dataset/collection/utils';
|
||||
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
try {
|
||||
await connectToDatabase();
|
||||
const {
|
||||
link,
|
||||
trainingType = TrainingModeEnum.chunk,
|
||||
chunkSize = 512,
|
||||
async function handler(req: NextApiRequest) {
|
||||
const {
|
||||
link,
|
||||
trainingType = TrainingModeEnum.chunk,
|
||||
chunkSize = 512,
|
||||
chunkSplitter,
|
||||
qaPrompt,
|
||||
...body
|
||||
} = req.body as LinkCreateDatasetCollectionParams;
|
||||
|
||||
const { teamId, tmbId, dataset } = await authDataset({
|
||||
req,
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
datasetId: body.datasetId,
|
||||
per: WritePermissionVal
|
||||
});
|
||||
|
||||
// 1. check dataset limit
|
||||
await checkDatasetLimit({
|
||||
teamId,
|
||||
insertLen: predictDataLimitLength(trainingType, new Array(10))
|
||||
});
|
||||
|
||||
await mongoSessionRun(async (session) => {
|
||||
// 2. create collection
|
||||
const collection = await createOneCollection({
|
||||
...body,
|
||||
name: link,
|
||||
teamId,
|
||||
tmbId,
|
||||
type: DatasetCollectionTypeEnum.link,
|
||||
|
||||
trainingType,
|
||||
chunkSize,
|
||||
chunkSplitter,
|
||||
qaPrompt,
|
||||
...body
|
||||
} = req.body as LinkCreateDatasetCollectionParams;
|
||||
|
||||
const { teamId, tmbId, dataset } = await authDataset({
|
||||
req,
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
datasetId: body.datasetId,
|
||||
per: 'w'
|
||||
rawLink: link,
|
||||
session
|
||||
});
|
||||
|
||||
// 1. check dataset limit
|
||||
await checkDatasetLimit({
|
||||
// 3. create bill and start sync
|
||||
const { billId } = await createTrainingUsage({
|
||||
teamId,
|
||||
insertLen: predictDataLimitLength(trainingType, new Array(10))
|
||||
tmbId,
|
||||
appName: 'core.dataset.collection.Sync Collection',
|
||||
billSource: UsageSourceEnum.training,
|
||||
vectorModel: getVectorModel(dataset.vectorModel).name,
|
||||
agentModel: getLLMModel(dataset.agentModel).name,
|
||||
session
|
||||
});
|
||||
|
||||
const { _id: collectionId } = await mongoSessionRun(async (session) => {
|
||||
// 2. create collection
|
||||
const collection = await createOneCollection({
|
||||
...body,
|
||||
name: link,
|
||||
teamId,
|
||||
tmbId,
|
||||
type: DatasetCollectionTypeEnum.link,
|
||||
|
||||
trainingType,
|
||||
chunkSize,
|
||||
chunkSplitter,
|
||||
qaPrompt,
|
||||
|
||||
rawLink: link,
|
||||
session
|
||||
});
|
||||
|
||||
// 3. create bill and start sync
|
||||
const { billId } = await createTrainingUsage({
|
||||
teamId,
|
||||
tmbId,
|
||||
appName: 'core.dataset.collection.Sync Collection',
|
||||
billSource: UsageSourceEnum.training,
|
||||
vectorModel: getVectorModel(dataset.vectorModel).name,
|
||||
agentModel: getLLMModel(dataset.agentModel).name,
|
||||
session
|
||||
});
|
||||
|
||||
// load
|
||||
await reloadCollectionChunks({
|
||||
collection: {
|
||||
...collection.toObject(),
|
||||
datasetId: dataset
|
||||
},
|
||||
tmbId,
|
||||
billId,
|
||||
session
|
||||
});
|
||||
|
||||
return collection;
|
||||
// load
|
||||
await reloadCollectionChunks({
|
||||
collection: {
|
||||
...collection.toObject(),
|
||||
datasetId: dataset
|
||||
},
|
||||
tmbId,
|
||||
billId,
|
||||
session
|
||||
});
|
||||
|
||||
jsonRes(res, {
|
||||
data: { collectionId }
|
||||
});
|
||||
} catch (err) {
|
||||
jsonRes(res, {
|
||||
code: 500,
|
||||
error: err
|
||||
});
|
||||
}
|
||||
return collection;
|
||||
});
|
||||
}
|
||||
|
||||
export default NextAPI(handler);
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@fastgpt/service/common/response';
|
||||
import { uploadFile } from '@fastgpt/service/common/file/gridfs/controller';
|
||||
import { getUploadModel } from '@fastgpt/service/common/file/multer';
|
||||
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
||||
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
|
||||
import { FileCreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api';
|
||||
import { removeFilesByPaths } from '@fastgpt/service/common/file/utils';
|
||||
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
|
||||
@@ -23,6 +22,7 @@ import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
|
||||
import { MongoImage } from '@fastgpt/service/common/file/image/schema';
|
||||
import { readRawTextByLocalFile } from '@fastgpt/service/common/file/read/utils';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
|
||||
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
/**
|
||||
@@ -49,7 +49,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
req,
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
per: 'w',
|
||||
per: WritePermissionVal,
|
||||
datasetId: data.datasetId
|
||||
});
|
||||
|
||||
@@ -168,9 +168,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
};
|
||||
});
|
||||
|
||||
jsonRes(res, {
|
||||
data: { collectionId, results: insertResults }
|
||||
});
|
||||
return { collectionId, results: insertResults };
|
||||
} catch (error) {
|
||||
removeFilesByPaths(filePaths);
|
||||
|
||||
|
||||
@@ -1,11 +1,6 @@
|
||||
/*
|
||||
Create one dataset collection
|
||||
*/
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@fastgpt/service/common/response';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
import type { NextApiRequest } from 'next';
|
||||
import type { TextCreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api.d';
|
||||
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
||||
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
|
||||
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
|
||||
import {
|
||||
TrainingModeEnum,
|
||||
@@ -20,102 +15,94 @@ import { createTrainingUsage } from '@fastgpt/service/support/wallet/usage/contr
|
||||
import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants';
|
||||
import { getLLMModel, getVectorModel } from '@fastgpt/service/core/ai/model';
|
||||
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
|
||||
async function handler(req: NextApiRequest) {
|
||||
const {
|
||||
name,
|
||||
text,
|
||||
trainingType = TrainingModeEnum.chunk,
|
||||
chunkSize = 512,
|
||||
chunkSplitter,
|
||||
qaPrompt,
|
||||
...body
|
||||
} = req.body as TextCreateDatasetCollectionParams;
|
||||
|
||||
const { teamId, tmbId, dataset } = await authDataset({
|
||||
req,
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
datasetId: body.datasetId,
|
||||
per: WritePermissionVal
|
||||
});
|
||||
|
||||
// 1. split text to chunks
|
||||
const { chunks } = splitText2Chunks({
|
||||
text,
|
||||
chunkLen: chunkSize,
|
||||
overlapRatio: trainingType === TrainingModeEnum.chunk ? 0.2 : 0,
|
||||
customReg: chunkSplitter ? [chunkSplitter] : []
|
||||
});
|
||||
|
||||
// 2. check dataset limit
|
||||
await checkDatasetLimit({
|
||||
teamId,
|
||||
insertLen: predictDataLimitLength(trainingType, chunks)
|
||||
});
|
||||
|
||||
const createResult = await mongoSessionRun(async (session) => {
|
||||
// 3. create collection
|
||||
const { _id: collectionId } = await createOneCollection({
|
||||
...body,
|
||||
teamId,
|
||||
tmbId,
|
||||
type: DatasetCollectionTypeEnum.virtual,
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
try {
|
||||
await connectToDatabase();
|
||||
const {
|
||||
name,
|
||||
text,
|
||||
trainingType = TrainingModeEnum.chunk,
|
||||
chunkSize = 512,
|
||||
trainingType,
|
||||
chunkSize,
|
||||
chunkSplitter,
|
||||
qaPrompt,
|
||||
...body
|
||||
} = req.body as TextCreateDatasetCollectionParams;
|
||||
|
||||
const { teamId, tmbId, dataset } = await authDataset({
|
||||
req,
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
datasetId: body.datasetId,
|
||||
per: 'w'
|
||||
hashRawText: hashStr(text),
|
||||
rawTextLength: text.length,
|
||||
session
|
||||
});
|
||||
|
||||
// 1. split text to chunks
|
||||
const { chunks } = splitText2Chunks({
|
||||
text,
|
||||
chunkLen: chunkSize,
|
||||
overlapRatio: trainingType === TrainingModeEnum.chunk ? 0.2 : 0,
|
||||
customReg: chunkSplitter ? [chunkSplitter] : []
|
||||
});
|
||||
|
||||
// 2. check dataset limit
|
||||
await checkDatasetLimit({
|
||||
// 4. create training bill
|
||||
const { billId } = await createTrainingUsage({
|
||||
teamId,
|
||||
insertLen: predictDataLimitLength(trainingType, chunks)
|
||||
tmbId,
|
||||
appName: name,
|
||||
billSource: UsageSourceEnum.training,
|
||||
vectorModel: getVectorModel(dataset.vectorModel)?.name,
|
||||
agentModel: getLLMModel(dataset.agentModel)?.name,
|
||||
session
|
||||
});
|
||||
|
||||
const createResult = await mongoSessionRun(async (session) => {
|
||||
// 3. create collection
|
||||
const { _id: collectionId } = await createOneCollection({
|
||||
...body,
|
||||
teamId,
|
||||
tmbId,
|
||||
type: DatasetCollectionTypeEnum.virtual,
|
||||
|
||||
name,
|
||||
trainingType,
|
||||
chunkSize,
|
||||
chunkSplitter,
|
||||
qaPrompt,
|
||||
|
||||
hashRawText: hashStr(text),
|
||||
rawTextLength: text.length,
|
||||
session
|
||||
});
|
||||
|
||||
// 4. create training bill
|
||||
const { billId } = await createTrainingUsage({
|
||||
teamId,
|
||||
tmbId,
|
||||
appName: name,
|
||||
billSource: UsageSourceEnum.training,
|
||||
vectorModel: getVectorModel(dataset.vectorModel)?.name,
|
||||
agentModel: getLLMModel(dataset.agentModel)?.name,
|
||||
session
|
||||
});
|
||||
|
||||
// 5. push chunks to training queue
|
||||
const insertResults = await pushDataListToTrainingQueue({
|
||||
teamId,
|
||||
tmbId,
|
||||
datasetId: dataset._id,
|
||||
collectionId,
|
||||
agentModel: dataset.agentModel,
|
||||
vectorModel: dataset.vectorModel,
|
||||
trainingMode: trainingType,
|
||||
prompt: qaPrompt,
|
||||
billId,
|
||||
data: chunks.map((text, index) => ({
|
||||
q: text,
|
||||
chunkIndex: index
|
||||
})),
|
||||
session
|
||||
});
|
||||
|
||||
return { collectionId, results: insertResults };
|
||||
// 5. push chunks to training queue
|
||||
const insertResults = await pushDataListToTrainingQueue({
|
||||
teamId,
|
||||
tmbId,
|
||||
datasetId: dataset._id,
|
||||
collectionId,
|
||||
agentModel: dataset.agentModel,
|
||||
vectorModel: dataset.vectorModel,
|
||||
trainingMode: trainingType,
|
||||
prompt: qaPrompt,
|
||||
billId,
|
||||
data: chunks.map((text, index) => ({
|
||||
q: text,
|
||||
chunkIndex: index
|
||||
})),
|
||||
session
|
||||
});
|
||||
|
||||
jsonRes(res, {
|
||||
data: createResult
|
||||
});
|
||||
} catch (err) {
|
||||
jsonRes(res, {
|
||||
code: 500,
|
||||
error: err
|
||||
});
|
||||
}
|
||||
return { collectionId, results: insertResults };
|
||||
});
|
||||
|
||||
return createResult;
|
||||
}
|
||||
|
||||
export const config = {
|
||||
@@ -125,3 +112,5 @@ export const config = {
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export default NextAPI(handler);
|
||||
|
||||
@@ -1,50 +1,42 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@fastgpt/service/common/response';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
import type { NextApiRequest } from 'next';
|
||||
import { findCollectionAndChild } from '@fastgpt/service/core/dataset/collection/utils';
|
||||
import { delCollectionAndRelatedSources } from '@fastgpt/service/core/dataset/collection/controller';
|
||||
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
|
||||
import { authDatasetCollection } from '@fastgpt/service/support/permission/dataset/auth';
|
||||
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
try {
|
||||
await connectToDatabase();
|
||||
async function handler(req: NextApiRequest) {
|
||||
const { id: collectionId } = req.query as { id: string };
|
||||
|
||||
const { id: collectionId } = req.query as { id: string };
|
||||
|
||||
if (!collectionId) {
|
||||
throw new Error('CollectionIdId is required');
|
||||
}
|
||||
|
||||
const { teamId, collection } = await authDatasetCollection({
|
||||
req,
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
collectionId,
|
||||
per: 'w'
|
||||
});
|
||||
|
||||
// find all delete id
|
||||
const collections = await findCollectionAndChild({
|
||||
teamId,
|
||||
datasetId: collection.datasetId._id,
|
||||
collectionId,
|
||||
fields: '_id teamId datasetId fileId metadata'
|
||||
});
|
||||
|
||||
// delete
|
||||
await mongoSessionRun((session) =>
|
||||
delCollectionAndRelatedSources({
|
||||
collections,
|
||||
session
|
||||
})
|
||||
);
|
||||
|
||||
jsonRes(res);
|
||||
} catch (err) {
|
||||
jsonRes(res, {
|
||||
code: 500,
|
||||
error: err
|
||||
});
|
||||
if (!collectionId) {
|
||||
return Promise.reject(CommonErrEnum.missingParams);
|
||||
}
|
||||
|
||||
const { teamId, collection } = await authDatasetCollection({
|
||||
req,
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
collectionId,
|
||||
per: WritePermissionVal
|
||||
});
|
||||
|
||||
// find all delete id
|
||||
const collections = await findCollectionAndChild({
|
||||
teamId,
|
||||
datasetId: collection.datasetId._id,
|
||||
collectionId,
|
||||
fields: '_id teamId datasetId fileId metadata'
|
||||
});
|
||||
|
||||
// delete
|
||||
await mongoSessionRun((session) =>
|
||||
delCollectionAndRelatedSources({
|
||||
collections,
|
||||
session
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
export default NextAPI(handler);
|
||||
|
||||
@@ -1,50 +1,43 @@
|
||||
/*
|
||||
Get one dataset collection detail
|
||||
*/
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@fastgpt/service/common/response';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
|
||||
import { DatasetCollectionItemType } from '@fastgpt/global/core/dataset/type';
|
||||
import type { NextApiRequest } from 'next';
|
||||
import { authDatasetCollection } from '@fastgpt/service/support/permission/dataset/auth';
|
||||
import { BucketNameEnum } from '@fastgpt/global/common/file/constants';
|
||||
import { getFileById } from '@fastgpt/service/common/file/gridfs/controller';
|
||||
import { getCollectionSourceData } from '@fastgpt/global/core/dataset/collection/utils';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
import { DatasetCollectionItemType } from '@fastgpt/global/core/dataset/type';
|
||||
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
try {
|
||||
await connectToDatabase();
|
||||
const { id } = req.query as { id: string };
|
||||
async function handler(req: NextApiRequest): Promise<DatasetCollectionItemType> {
|
||||
const { id } = req.query as { id: string };
|
||||
|
||||
if (!id) {
|
||||
throw new Error('Id is required');
|
||||
}
|
||||
|
||||
// 凭证校验
|
||||
const { collection, canWrite } = await authDatasetCollection({
|
||||
req,
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
collectionId: id,
|
||||
per: 'r'
|
||||
});
|
||||
|
||||
// get file
|
||||
const file = collection?.fileId
|
||||
? await getFileById({ bucketName: BucketNameEnum.dataset, fileId: collection.fileId })
|
||||
: undefined;
|
||||
|
||||
jsonRes<DatasetCollectionItemType>(res, {
|
||||
data: {
|
||||
...collection,
|
||||
canWrite,
|
||||
...getCollectionSourceData(collection),
|
||||
file
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
jsonRes(res, {
|
||||
code: 500,
|
||||
error: err
|
||||
});
|
||||
if (!id) {
|
||||
return Promise.reject(CommonErrEnum.missingParams);
|
||||
}
|
||||
|
||||
// 凭证校验
|
||||
const { collection, permission } = await authDatasetCollection({
|
||||
req,
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
collectionId: id,
|
||||
per: ReadPermissionVal
|
||||
});
|
||||
|
||||
// get file
|
||||
const file = collection?.fileId
|
||||
? await getFileById({ bucketName: BucketNameEnum.dataset, fileId: collection.fileId })
|
||||
: undefined;
|
||||
|
||||
return {
|
||||
...collection,
|
||||
...getCollectionSourceData(collection),
|
||||
permission,
|
||||
file
|
||||
};
|
||||
}
|
||||
|
||||
export default NextAPI(handler);
|
||||
|
||||
@@ -1,179 +1,167 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@fastgpt/service/common/response';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
import type { NextApiRequest } from 'next';
|
||||
import { DatasetTrainingCollectionName } from '@fastgpt/service/core/dataset/training/schema';
|
||||
import { Types } from '@fastgpt/service/common/mongo';
|
||||
import type { DatasetCollectionsListItemType } from '@/global/core/dataset/type.d';
|
||||
import type { GetDatasetCollectionsProps } from '@/global/core/api/datasetReq';
|
||||
import { PagingData } from '@/types';
|
||||
import { MongoDatasetCollection } from '@fastgpt/service/core/dataset/collection/schema';
|
||||
import { DatasetCollectionTypeEnum } from '@fastgpt/global/core/dataset/constants';
|
||||
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
||||
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
|
||||
import { DatasetDataCollectionName } from '@fastgpt/service/core/dataset/data/schema';
|
||||
import { startTrainingQueue } from '@/service/core/dataset/training/utils';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
try {
|
||||
await connectToDatabase();
|
||||
async function handler(req: NextApiRequest) {
|
||||
let {
|
||||
pageNum = 1,
|
||||
pageSize = 10,
|
||||
datasetId,
|
||||
parentId = null,
|
||||
searchText = '',
|
||||
selectFolder = false,
|
||||
simple = false
|
||||
} = req.body as GetDatasetCollectionsProps;
|
||||
searchText = searchText?.replace(/'/g, '');
|
||||
pageSize = Math.min(pageSize, 30);
|
||||
|
||||
let {
|
||||
pageNum = 1,
|
||||
pageSize = 10,
|
||||
datasetId,
|
||||
parentId = null,
|
||||
searchText = '',
|
||||
selectFolder = false,
|
||||
simple = false
|
||||
} = req.body as GetDatasetCollectionsProps;
|
||||
searchText = searchText?.replace(/'/g, '');
|
||||
pageSize = Math.min(pageSize, 30);
|
||||
// auth dataset and get my role
|
||||
const { teamId, permission } = await authDataset({
|
||||
req,
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
datasetId,
|
||||
per: ReadPermissionVal
|
||||
});
|
||||
|
||||
// auth dataset and get my role
|
||||
const { teamId, tmbId, canWrite } = await authDataset({
|
||||
req,
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
datasetId,
|
||||
per: 'r'
|
||||
});
|
||||
const match = {
|
||||
teamId: new Types.ObjectId(teamId),
|
||||
datasetId: new Types.ObjectId(datasetId),
|
||||
parentId: parentId ? new Types.ObjectId(parentId) : null,
|
||||
...(selectFolder ? { type: DatasetCollectionTypeEnum.folder } : {}),
|
||||
...(searchText
|
||||
? {
|
||||
name: new RegExp(searchText, 'i')
|
||||
}
|
||||
: {})
|
||||
};
|
||||
|
||||
const match = {
|
||||
teamId: new Types.ObjectId(teamId),
|
||||
datasetId: new Types.ObjectId(datasetId),
|
||||
parentId: parentId ? new Types.ObjectId(parentId) : null,
|
||||
...(selectFolder ? { type: DatasetCollectionTypeEnum.folder } : {}),
|
||||
...(searchText
|
||||
? {
|
||||
name: new RegExp(searchText, 'i')
|
||||
}
|
||||
: {})
|
||||
// not count data amount
|
||||
if (simple) {
|
||||
const collections = await MongoDatasetCollection.find(match, '_id parentId type name')
|
||||
.sort({
|
||||
updateTime: -1
|
||||
})
|
||||
.lean();
|
||||
return {
|
||||
pageNum,
|
||||
pageSize,
|
||||
data: await Promise.all(
|
||||
collections.map(async (item) => ({
|
||||
...item,
|
||||
dataAmount: 0,
|
||||
trainingAmount: 0,
|
||||
permission
|
||||
}))
|
||||
),
|
||||
total: await MongoDatasetCollection.countDocuments(match)
|
||||
};
|
||||
|
||||
// not count data amount
|
||||
if (simple) {
|
||||
const collections = await MongoDatasetCollection.find(match, '_id parentId type name')
|
||||
.sort({
|
||||
updateTime: -1
|
||||
})
|
||||
.lean();
|
||||
return jsonRes<PagingData<DatasetCollectionsListItemType>>(res, {
|
||||
data: {
|
||||
pageNum,
|
||||
pageSize,
|
||||
data: await Promise.all(
|
||||
collections.map(async (item) => ({
|
||||
...item,
|
||||
dataAmount: 0,
|
||||
trainingAmount: 0,
|
||||
canWrite // admin or team owner can write
|
||||
}))
|
||||
),
|
||||
total: await MongoDatasetCollection.countDocuments(match)
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const [collections, total]: [DatasetCollectionsListItemType[], number] = await Promise.all([
|
||||
MongoDatasetCollection.aggregate([
|
||||
{
|
||||
$match: match
|
||||
},
|
||||
{
|
||||
$sort: { updateTime: -1 }
|
||||
},
|
||||
{
|
||||
$skip: (pageNum - 1) * pageSize
|
||||
},
|
||||
{
|
||||
$limit: pageSize
|
||||
},
|
||||
// count training data
|
||||
{
|
||||
$lookup: {
|
||||
from: DatasetTrainingCollectionName,
|
||||
let: { id: '$_id', team_id: match.teamId, dataset_id: match.datasetId },
|
||||
pipeline: [
|
||||
{
|
||||
$match: {
|
||||
$expr: {
|
||||
$and: [{ $eq: ['$teamId', '$$team_id'] }, { $eq: ['$collectionId', '$$id'] }]
|
||||
}
|
||||
}
|
||||
},
|
||||
{ $count: 'count' }
|
||||
],
|
||||
as: 'trainingCount'
|
||||
}
|
||||
},
|
||||
// count collection total data
|
||||
{
|
||||
$lookup: {
|
||||
from: DatasetDataCollectionName,
|
||||
let: { id: '$_id', team_id: match.teamId, dataset_id: match.datasetId },
|
||||
pipeline: [
|
||||
{
|
||||
$match: {
|
||||
$expr: {
|
||||
$and: [
|
||||
{ $eq: ['$teamId', '$$team_id'] },
|
||||
{ $eq: ['$datasetId', '$$dataset_id'] },
|
||||
{ $eq: ['$collectionId', '$$id'] }
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{ $count: 'count' }
|
||||
],
|
||||
as: 'dataCount'
|
||||
}
|
||||
},
|
||||
{
|
||||
$project: {
|
||||
_id: 1,
|
||||
parentId: 1,
|
||||
tmbId: 1,
|
||||
name: 1,
|
||||
type: 1,
|
||||
status: 1,
|
||||
updateTime: 1,
|
||||
fileId: 1,
|
||||
rawLink: 1,
|
||||
dataAmount: {
|
||||
$ifNull: [{ $arrayElemAt: ['$dataCount.count', 0] }, 0]
|
||||
},
|
||||
trainingAmount: {
|
||||
$ifNull: [{ $arrayElemAt: ['$trainingCount.count', 0] }, 0]
|
||||
}
|
||||
}
|
||||
}
|
||||
]),
|
||||
MongoDatasetCollection.countDocuments(match)
|
||||
]);
|
||||
|
||||
const data = await Promise.all(
|
||||
collections.map(async (item, i) => ({
|
||||
...item,
|
||||
canWrite: String(item.tmbId) === tmbId || canWrite
|
||||
}))
|
||||
);
|
||||
|
||||
if (data.find((item) => item.trainingAmount > 0)) {
|
||||
startTrainingQueue();
|
||||
}
|
||||
|
||||
// count collections
|
||||
jsonRes<PagingData<DatasetCollectionsListItemType>>(res, {
|
||||
data: {
|
||||
pageNum,
|
||||
pageSize,
|
||||
data,
|
||||
total
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
jsonRes(res, {
|
||||
code: 500,
|
||||
error: err
|
||||
});
|
||||
}
|
||||
|
||||
const [collections, total]: [DatasetCollectionsListItemType[], number] = await Promise.all([
|
||||
MongoDatasetCollection.aggregate([
|
||||
{
|
||||
$match: match
|
||||
},
|
||||
{
|
||||
$sort: { updateTime: -1 }
|
||||
},
|
||||
{
|
||||
$skip: (pageNum - 1) * pageSize
|
||||
},
|
||||
{
|
||||
$limit: pageSize
|
||||
},
|
||||
// count training data
|
||||
{
|
||||
$lookup: {
|
||||
from: DatasetTrainingCollectionName,
|
||||
let: { id: '$_id', team_id: match.teamId, dataset_id: match.datasetId },
|
||||
pipeline: [
|
||||
{
|
||||
$match: {
|
||||
$expr: {
|
||||
$and: [{ $eq: ['$teamId', '$$team_id'] }, { $eq: ['$collectionId', '$$id'] }]
|
||||
}
|
||||
}
|
||||
},
|
||||
{ $count: 'count' }
|
||||
],
|
||||
as: 'trainingCount'
|
||||
}
|
||||
},
|
||||
// count collection total data
|
||||
{
|
||||
$lookup: {
|
||||
from: DatasetDataCollectionName,
|
||||
let: { id: '$_id', team_id: match.teamId, dataset_id: match.datasetId },
|
||||
pipeline: [
|
||||
{
|
||||
$match: {
|
||||
$expr: {
|
||||
$and: [
|
||||
{ $eq: ['$teamId', '$$team_id'] },
|
||||
{ $eq: ['$datasetId', '$$dataset_id'] },
|
||||
{ $eq: ['$collectionId', '$$id'] }
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{ $count: 'count' }
|
||||
],
|
||||
as: 'dataCount'
|
||||
}
|
||||
},
|
||||
{
|
||||
$project: {
|
||||
_id: 1,
|
||||
parentId: 1,
|
||||
tmbId: 1,
|
||||
name: 1,
|
||||
type: 1,
|
||||
status: 1,
|
||||
updateTime: 1,
|
||||
fileId: 1,
|
||||
rawLink: 1,
|
||||
dataAmount: {
|
||||
$ifNull: [{ $arrayElemAt: ['$dataCount.count', 0] }, 0]
|
||||
},
|
||||
trainingAmount: {
|
||||
$ifNull: [{ $arrayElemAt: ['$trainingCount.count', 0] }, 0]
|
||||
}
|
||||
}
|
||||
}
|
||||
]),
|
||||
MongoDatasetCollection.countDocuments(match)
|
||||
]);
|
||||
|
||||
const data = await Promise.all(
|
||||
collections.map(async (item) => ({
|
||||
...item,
|
||||
permission
|
||||
}))
|
||||
);
|
||||
|
||||
if (data.find((item) => item.trainingAmount > 0)) {
|
||||
startTrainingQueue();
|
||||
}
|
||||
|
||||
// count collections
|
||||
return {
|
||||
pageNum,
|
||||
pageSize,
|
||||
data,
|
||||
total
|
||||
};
|
||||
}
|
||||
|
||||
export default NextAPI(handler);
|
||||
|
||||
@@ -1,34 +1,24 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@fastgpt/service/common/response';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
import type { ParentTreePathItemType } from '@fastgpt/global/common/parentFolder/type.d';
|
||||
import type { NextApiRequest } from 'next';
|
||||
import { getDatasetCollectionPaths } from '@fastgpt/service/core/dataset/collection/utils';
|
||||
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
|
||||
import { authDatasetCollection } from '@fastgpt/service/support/permission/dataset/auth';
|
||||
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
try {
|
||||
await connectToDatabase();
|
||||
export default async function handler(req: NextApiRequest) {
|
||||
const { parentId } = req.query as { parentId: string };
|
||||
|
||||
const { parentId } = req.query as { parentId: string };
|
||||
|
||||
if (!parentId) {
|
||||
return jsonRes(res, {
|
||||
data: []
|
||||
});
|
||||
}
|
||||
|
||||
await authDatasetCollection({ req, authToken: true, collectionId: parentId, per: 'r' });
|
||||
const paths = await getDatasetCollectionPaths({
|
||||
parentId
|
||||
});
|
||||
|
||||
jsonRes<ParentTreePathItemType[]>(res, {
|
||||
data: paths
|
||||
});
|
||||
} catch (err) {
|
||||
jsonRes(res, {
|
||||
code: 500,
|
||||
error: err
|
||||
});
|
||||
if (!parentId) {
|
||||
return [];
|
||||
}
|
||||
|
||||
await authDatasetCollection({
|
||||
req,
|
||||
authToken: true,
|
||||
collectionId: parentId,
|
||||
per: ReadPermissionVal
|
||||
});
|
||||
const paths = await getDatasetCollectionPaths({
|
||||
parentId
|
||||
});
|
||||
|
||||
return paths;
|
||||
}
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import type { ApiRequestProps, ApiResponseType } from '@fastgpt/service/type/next';
|
||||
import type { ApiRequestProps } from '@fastgpt/service/type/next';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
|
||||
import { authDatasetCollection } from '@fastgpt/service/support/permission/dataset/auth';
|
||||
import { DatasetCollectionTypeEnum } from '@fastgpt/global/core/dataset/constants';
|
||||
import { createFileToken } from '@fastgpt/service/support/permission/controller';
|
||||
import { BucketNameEnum, ReadFileBaseUrl } from '@fastgpt/global/common/file/constants';
|
||||
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
|
||||
export type readCollectionSourceQuery = {
|
||||
collectionId: string;
|
||||
@@ -17,15 +18,14 @@ export type readCollectionSourceResponse = {
|
||||
};
|
||||
|
||||
async function handler(
|
||||
req: ApiRequestProps<readCollectionSourceBody, readCollectionSourceQuery>,
|
||||
res: ApiResponseType<any>
|
||||
req: ApiRequestProps<readCollectionSourceBody, readCollectionSourceQuery>
|
||||
): Promise<readCollectionSourceResponse> {
|
||||
const { collection, teamId, tmbId } = await authDatasetCollection({
|
||||
req,
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
collectionId: req.query.collectionId,
|
||||
per: 'r'
|
||||
per: ReadPermissionVal
|
||||
});
|
||||
|
||||
const sourceUrl = await (async () => {
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@fastgpt/service/common/response';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
|
||||
import type { NextApiRequest } from 'next';
|
||||
import { authDatasetCollection } from '@fastgpt/service/support/permission/dataset/auth';
|
||||
import {
|
||||
getCollectionAndRawText,
|
||||
reloadCollectionChunks
|
||||
@@ -17,98 +15,90 @@ import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants'
|
||||
import { getLLMModel, getVectorModel } from '@fastgpt/service/core/ai/model';
|
||||
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
|
||||
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
try {
|
||||
await connectToDatabase();
|
||||
async function handler(req: NextApiRequest) {
|
||||
const { collectionId } = req.body as { collectionId: string };
|
||||
|
||||
const { collectionId } = req.body as { collectionId: string };
|
||||
|
||||
if (!collectionId) {
|
||||
throw new Error('CollectionIdId is required');
|
||||
}
|
||||
|
||||
const { collection, tmbId } = await authDatasetCollection({
|
||||
req,
|
||||
authToken: true,
|
||||
collectionId,
|
||||
per: 'w'
|
||||
});
|
||||
|
||||
if (collection.type !== DatasetCollectionTypeEnum.link || !collection.rawLink) {
|
||||
return Promise.reject(DatasetErrEnum.unLinkCollection);
|
||||
}
|
||||
|
||||
const { title, rawText, isSameRawText } = await getCollectionAndRawText({
|
||||
collection
|
||||
});
|
||||
|
||||
if (isSameRawText) {
|
||||
return jsonRes(res, {
|
||||
data: DatasetCollectionSyncResultEnum.sameRaw
|
||||
});
|
||||
}
|
||||
|
||||
/* Not the same original text, create and reload */
|
||||
|
||||
const vectorModelData = getVectorModel(collection.datasetId.vectorModel);
|
||||
const agentModelData = getLLMModel(collection.datasetId.agentModel);
|
||||
|
||||
await mongoSessionRun(async (session) => {
|
||||
// create training bill
|
||||
const { billId } = await createTrainingUsage({
|
||||
teamId: collection.teamId,
|
||||
tmbId,
|
||||
appName: 'core.dataset.collection.Sync Collection',
|
||||
billSource: UsageSourceEnum.training,
|
||||
vectorModel: vectorModelData.name,
|
||||
agentModel: agentModelData.name,
|
||||
session
|
||||
});
|
||||
|
||||
// create a collection and delete old
|
||||
const newCol = await createOneCollection({
|
||||
teamId: collection.teamId,
|
||||
tmbId: collection.tmbId,
|
||||
parentId: collection.parentId,
|
||||
datasetId: collection.datasetId._id,
|
||||
name: title || collection.name,
|
||||
type: collection.type,
|
||||
trainingType: collection.trainingType,
|
||||
chunkSize: collection.chunkSize,
|
||||
fileId: collection.fileId,
|
||||
rawLink: collection.rawLink,
|
||||
metadata: collection.metadata,
|
||||
createTime: collection.createTime,
|
||||
session
|
||||
});
|
||||
|
||||
// start load
|
||||
await reloadCollectionChunks({
|
||||
collection: {
|
||||
...newCol.toObject(),
|
||||
datasetId: collection.datasetId
|
||||
},
|
||||
tmbId,
|
||||
billId,
|
||||
rawText,
|
||||
session
|
||||
});
|
||||
|
||||
// delete old collection
|
||||
await delCollectionAndRelatedSources({
|
||||
collections: [collection],
|
||||
session
|
||||
});
|
||||
});
|
||||
|
||||
jsonRes(res, {
|
||||
data: DatasetCollectionSyncResultEnum.success
|
||||
});
|
||||
} catch (err) {
|
||||
jsonRes(res, {
|
||||
code: 500,
|
||||
error: err
|
||||
});
|
||||
if (!collectionId) {
|
||||
return Promise.reject(CommonErrEnum.missingParams);
|
||||
}
|
||||
|
||||
const { collection, tmbId } = await authDatasetCollection({
|
||||
req,
|
||||
authToken: true,
|
||||
collectionId,
|
||||
per: WritePermissionVal
|
||||
});
|
||||
|
||||
if (collection.type !== DatasetCollectionTypeEnum.link || !collection.rawLink) {
|
||||
return Promise.reject(DatasetErrEnum.unLinkCollection);
|
||||
}
|
||||
|
||||
const { title, rawText, isSameRawText } = await getCollectionAndRawText({
|
||||
collection
|
||||
});
|
||||
|
||||
if (isSameRawText) {
|
||||
return DatasetCollectionSyncResultEnum.sameRaw;
|
||||
}
|
||||
|
||||
/* Not the same original text, create and reload */
|
||||
|
||||
const vectorModelData = getVectorModel(collection.datasetId.vectorModel);
|
||||
const agentModelData = getLLMModel(collection.datasetId.agentModel);
|
||||
|
||||
await mongoSessionRun(async (session) => {
|
||||
// create training bill
|
||||
const { billId } = await createTrainingUsage({
|
||||
teamId: collection.teamId,
|
||||
tmbId,
|
||||
appName: 'core.dataset.collection.Sync Collection',
|
||||
billSource: UsageSourceEnum.training,
|
||||
vectorModel: vectorModelData.name,
|
||||
agentModel: agentModelData.name,
|
||||
session
|
||||
});
|
||||
|
||||
// create a collection and delete old
|
||||
const newCol = await createOneCollection({
|
||||
teamId: collection.teamId,
|
||||
tmbId: collection.tmbId,
|
||||
parentId: collection.parentId,
|
||||
datasetId: collection.datasetId._id,
|
||||
name: title || collection.name,
|
||||
type: collection.type,
|
||||
trainingType: collection.trainingType,
|
||||
chunkSize: collection.chunkSize,
|
||||
fileId: collection.fileId,
|
||||
rawLink: collection.rawLink,
|
||||
metadata: collection.metadata,
|
||||
createTime: collection.createTime,
|
||||
session
|
||||
});
|
||||
|
||||
// start load
|
||||
await reloadCollectionChunks({
|
||||
collection: {
|
||||
...newCol.toObject(),
|
||||
datasetId: collection.datasetId
|
||||
},
|
||||
tmbId,
|
||||
billId,
|
||||
rawText,
|
||||
session
|
||||
});
|
||||
|
||||
// delete old collection
|
||||
await delCollectionAndRelatedSources({
|
||||
collections: [collection],
|
||||
session
|
||||
});
|
||||
});
|
||||
|
||||
return DatasetCollectionSyncResultEnum.success;
|
||||
}
|
||||
|
||||
export default NextAPI(handler);
|
||||
|
||||
@@ -1,43 +1,36 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@fastgpt/service/common/response';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
import type { NextApiRequest } from 'next';
|
||||
import type { UpdateDatasetCollectionParams } from '@/global/core/api/datasetReq.d';
|
||||
import { MongoDatasetCollection } from '@fastgpt/service/core/dataset/collection/schema';
|
||||
import { getCollectionUpdateTime } from '@fastgpt/service/core/dataset/collection/utils';
|
||||
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
|
||||
import { authDatasetCollection } from '@fastgpt/service/support/permission/dataset/auth';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
try {
|
||||
await connectToDatabase();
|
||||
const { id, parentId, name } = req.body as UpdateDatasetCollectionParams;
|
||||
async function handler(req: NextApiRequest) {
|
||||
const { id, parentId, name } = req.body as UpdateDatasetCollectionParams;
|
||||
|
||||
if (!id) {
|
||||
throw new Error('缺少参数');
|
||||
}
|
||||
|
||||
// 凭证校验
|
||||
await authDatasetCollection({
|
||||
req,
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
collectionId: id,
|
||||
per: 'w'
|
||||
});
|
||||
|
||||
const updateFields: Record<string, any> = {
|
||||
...(parentId !== undefined && { parentId: parentId || null }),
|
||||
...(name && { name, updateTime: getCollectionUpdateTime({ name }) })
|
||||
};
|
||||
|
||||
await MongoDatasetCollection.findByIdAndUpdate(id, {
|
||||
$set: updateFields
|
||||
});
|
||||
|
||||
jsonRes(res);
|
||||
} catch (err) {
|
||||
jsonRes(res, {
|
||||
code: 500,
|
||||
error: err
|
||||
});
|
||||
if (!id) {
|
||||
return Promise.reject(CommonErrEnum.missingParams);
|
||||
}
|
||||
|
||||
// 凭证校验
|
||||
await authDatasetCollection({
|
||||
req,
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
collectionId: id,
|
||||
per: WritePermissionVal
|
||||
});
|
||||
|
||||
const updateFields: Record<string, any> = {
|
||||
...(parentId !== undefined && { parentId: parentId || null }),
|
||||
...(name && { name, updateTime: getCollectionUpdateTime({ name }) })
|
||||
};
|
||||
|
||||
await MongoDatasetCollection.findByIdAndUpdate(id, {
|
||||
$set: updateFields
|
||||
});
|
||||
}
|
||||
|
||||
export default NextAPI(handler);
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@fastgpt/service/common/response';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
import type { NextApiRequest } from 'next';
|
||||
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
|
||||
import type { CreateDatasetParams } from '@/global/core/dataset/api.d';
|
||||
import { createDefaultCollection } from '@fastgpt/service/core/dataset/collection/controller';
|
||||
@@ -8,62 +6,59 @@ import { authUserPer } from '@fastgpt/service/support/permission/user/auth';
|
||||
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
|
||||
import { getLLMModel, getVectorModel, getDatasetModel } from '@fastgpt/service/core/ai/model';
|
||||
import { checkTeamDatasetLimit } from '@fastgpt/service/support/permission/teamLimit';
|
||||
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
import { NullPermission, WritePermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
try {
|
||||
await connectToDatabase();
|
||||
const {
|
||||
parentId,
|
||||
name,
|
||||
type = DatasetTypeEnum.dataset,
|
||||
avatar,
|
||||
vectorModel = global.vectorModels[0].model,
|
||||
agentModel = getDatasetModel().model
|
||||
} = req.body as CreateDatasetParams;
|
||||
async function handler(req: NextApiRequest) {
|
||||
const {
|
||||
parentId,
|
||||
name,
|
||||
type = DatasetTypeEnum.dataset,
|
||||
avatar,
|
||||
vectorModel = global.vectorModels[0].model,
|
||||
agentModel = getDatasetModel().model,
|
||||
defaultPermission = NullPermission
|
||||
} = req.body as CreateDatasetParams;
|
||||
|
||||
// auth
|
||||
const { teamId, tmbId } = await authUserPer({
|
||||
req,
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
per: WritePermissionVal
|
||||
});
|
||||
// auth
|
||||
const { teamId, tmbId } = await authUserPer({
|
||||
req,
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
per: WritePermissionVal
|
||||
});
|
||||
|
||||
// check model valid
|
||||
const vectorModelStore = getVectorModel(vectorModel);
|
||||
const agentModelStore = getLLMModel(agentModel);
|
||||
if (!vectorModelStore || !agentModelStore) {
|
||||
throw new Error('vectorModel or qaModel is invalid');
|
||||
}
|
||||
// check model valid
|
||||
const vectorModelStore = getVectorModel(vectorModel);
|
||||
const agentModelStore = getLLMModel(agentModel);
|
||||
if (!vectorModelStore || !agentModelStore) {
|
||||
throw new Error('vectorModel or qaModel is invalid'); // TODO: use enum code
|
||||
}
|
||||
|
||||
// check limit
|
||||
await checkTeamDatasetLimit(teamId);
|
||||
// check limit
|
||||
await checkTeamDatasetLimit(teamId);
|
||||
|
||||
const { _id } = await MongoDataset.create({
|
||||
name,
|
||||
const { _id } = await MongoDataset.create({
|
||||
name,
|
||||
teamId,
|
||||
tmbId,
|
||||
vectorModel,
|
||||
agentModel,
|
||||
avatar,
|
||||
parentId: parentId || null,
|
||||
type,
|
||||
defaultPermission
|
||||
});
|
||||
|
||||
if (type === DatasetTypeEnum.dataset) {
|
||||
await createDefaultCollection({
|
||||
datasetId: _id,
|
||||
teamId,
|
||||
tmbId,
|
||||
vectorModel,
|
||||
agentModel,
|
||||
avatar,
|
||||
parentId: parentId || null,
|
||||
type
|
||||
});
|
||||
|
||||
if (type === DatasetTypeEnum.dataset) {
|
||||
await createDefaultCollection({
|
||||
datasetId: _id,
|
||||
teamId,
|
||||
tmbId
|
||||
});
|
||||
}
|
||||
|
||||
jsonRes(res, { data: _id });
|
||||
} catch (err) {
|
||||
jsonRes(res, {
|
||||
code: 500,
|
||||
error: err
|
||||
tmbId
|
||||
});
|
||||
}
|
||||
|
||||
return _id;
|
||||
}
|
||||
|
||||
export default NextAPI(handler);
|
||||
|
||||
@@ -1,32 +1,31 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@fastgpt/service/common/response';
|
||||
import { authDatasetData } from '@/service/support/permission/auth/dataset';
|
||||
import type { NextApiRequest } from 'next';
|
||||
import { authDatasetData } from '@fastgpt/service/support/permission/dataset/auth';
|
||||
import { deleteDatasetData } from '@/service/core/dataset/data/controller';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
|
||||
|
||||
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
async function handler(req: NextApiRequest) {
|
||||
const { id: dataId } = req.query as {
|
||||
id: string;
|
||||
};
|
||||
|
||||
if (!dataId) {
|
||||
throw new Error('dataId is required');
|
||||
Promise.reject(CommonErrEnum.missingParams);
|
||||
}
|
||||
|
||||
// 凭证校验
|
||||
const { teamId, datasetData } = await authDatasetData({
|
||||
const { datasetData } = await authDatasetData({
|
||||
req,
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
dataId,
|
||||
per: 'w'
|
||||
per: WritePermissionVal
|
||||
});
|
||||
|
||||
await deleteDatasetData(datasetData);
|
||||
|
||||
jsonRes(res, {
|
||||
data: 'success'
|
||||
});
|
||||
return 'success';
|
||||
}
|
||||
|
||||
export default NextAPI(handler);
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@fastgpt/service/common/response';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
import { authDatasetData } from '@/service/support/permission/auth/dataset';
|
||||
import type { NextApiRequest } from 'next';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
import { authDatasetData } from '@fastgpt/service/support/permission/dataset/auth';
|
||||
|
||||
export type Response = {
|
||||
id: string;
|
||||
@@ -11,7 +10,7 @@ export type Response = {
|
||||
source: string;
|
||||
};
|
||||
|
||||
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
async function handler(req: NextApiRequest) {
|
||||
const { id: dataId } = req.query as {
|
||||
id: string;
|
||||
};
|
||||
@@ -22,12 +21,10 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
dataId,
|
||||
per: 'r'
|
||||
per: ReadPermissionVal
|
||||
});
|
||||
|
||||
jsonRes(res, {
|
||||
data: datasetData
|
||||
});
|
||||
return datasetData;
|
||||
}
|
||||
|
||||
export default NextAPI(handler);
|
||||
|
||||
@@ -2,30 +2,30 @@
|
||||
insert one data to dataset (immediately insert)
|
||||
manual input or mark data
|
||||
*/
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@fastgpt/service/common/response';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
import type { NextApiRequest } from 'next';
|
||||
import { countPromptTokens } from '@fastgpt/service/common/string/tiktoken/index';
|
||||
import { getVectorModel } from '@fastgpt/service/core/ai/model';
|
||||
import { hasSameValue } from '@/service/core/dataset/data/utils';
|
||||
import { insertData2Dataset } from '@/service/core/dataset/data/controller';
|
||||
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
|
||||
import { authDatasetCollection } from '@fastgpt/service/support/permission/dataset/auth';
|
||||
import { getCollectionWithDataset } from '@fastgpt/service/core/dataset/controller';
|
||||
import { pushGenerateVectorUsage } from '@/service/support/wallet/usage/push';
|
||||
import { InsertOneDatasetDataProps } from '@/global/core/dataset/api';
|
||||
import { simpleText } from '@fastgpt/global/common/string/tools';
|
||||
import { checkDatasetLimit } from '@fastgpt/service/support/permission/teamLimit';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
|
||||
|
||||
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
async function handler(req: NextApiRequest) {
|
||||
const { collectionId, q, a, indexes } = req.body as InsertOneDatasetDataProps;
|
||||
|
||||
if (!q) {
|
||||
throw new Error('q is required');
|
||||
Promise.reject(CommonErrEnum.missingParams);
|
||||
}
|
||||
|
||||
if (!collectionId) {
|
||||
throw new Error('collectionId is required');
|
||||
Promise.reject(CommonErrEnum.missingParams);
|
||||
}
|
||||
|
||||
// 凭证校验
|
||||
@@ -34,7 +34,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
collectionId,
|
||||
per: 'w'
|
||||
per: WritePermissionVal
|
||||
});
|
||||
|
||||
await checkDatasetLimit({
|
||||
@@ -93,9 +93,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
model: vectorModelData.model
|
||||
});
|
||||
|
||||
jsonRes<string>(res, {
|
||||
data: insertId
|
||||
});
|
||||
return insertId;
|
||||
}
|
||||
|
||||
export default NextAPI(handler);
|
||||
|
||||
@@ -1,15 +1,12 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@fastgpt/service/common/response';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
import type { DatasetDataListItemType } from '@/global/core/dataset/type.d';
|
||||
import type { NextApiRequest } from 'next';
|
||||
import type { GetDatasetDataListProps } from '@/global/core/api/datasetReq';
|
||||
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
|
||||
import { authDatasetCollection } from '@fastgpt/service/support/permission/dataset/auth';
|
||||
import { MongoDatasetData } from '@fastgpt/service/core/dataset/data/schema';
|
||||
import { PagingData } from '@/types';
|
||||
import { replaceRegChars } from '@fastgpt/global/common/string/tools';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
|
||||
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
async function handler(req: NextApiRequest) {
|
||||
let {
|
||||
pageNum = 1,
|
||||
pageSize = 10,
|
||||
@@ -25,7 +22,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
collectionId,
|
||||
per: 'r'
|
||||
per: ReadPermissionVal
|
||||
});
|
||||
|
||||
searchText = replaceRegChars(searchText).replace(/'/g, '');
|
||||
@@ -50,14 +47,12 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
MongoDatasetData.countDocuments(match)
|
||||
]);
|
||||
|
||||
jsonRes<PagingData<DatasetDataListItemType>>(res, {
|
||||
data: {
|
||||
pageNum,
|
||||
pageSize,
|
||||
data,
|
||||
total
|
||||
}
|
||||
});
|
||||
return {
|
||||
pageNum,
|
||||
pageSize,
|
||||
data,
|
||||
total
|
||||
};
|
||||
}
|
||||
|
||||
export default NextAPI(handler);
|
||||
|
||||
@@ -5,11 +5,12 @@ import type {
|
||||
PushDatasetDataProps,
|
||||
PushDatasetDataResponse
|
||||
} from '@fastgpt/global/core/dataset/api.d';
|
||||
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
|
||||
import { authDatasetCollection } from '@fastgpt/service/support/permission/dataset/auth';
|
||||
import { checkDatasetLimit } from '@fastgpt/service/support/permission/teamLimit';
|
||||
import { predictDataLimitLength } from '@fastgpt/global/core/dataset/utils';
|
||||
import { pushDataListToTrainingQueue } from '@fastgpt/service/core/dataset/training/controller';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
|
||||
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
const body = req.body as PushDatasetDataProps;
|
||||
@@ -29,7 +30,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
collectionId,
|
||||
per: 'w'
|
||||
per: WritePermissionVal
|
||||
});
|
||||
|
||||
// auth dataset limit
|
||||
|
||||
@@ -1,14 +1,13 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@fastgpt/service/common/response';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
import type { NextApiRequest } from 'next';
|
||||
import { updateData2Dataset } from '@/service/core/dataset/data/controller';
|
||||
import { authDatasetData } from '@/service/support/permission/auth/dataset';
|
||||
import { pushGenerateVectorUsage } from '@/service/support/wallet/usage/push';
|
||||
import { UpdateDatasetDataProps } from '@/global/core/dataset/api';
|
||||
import { checkDatasetLimit } from '@fastgpt/service/support/permission/teamLimit';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
import { authDatasetData } from '@fastgpt/service/support/permission/dataset/auth';
|
||||
|
||||
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
async function handler(req: NextApiRequest) {
|
||||
const { id, q = '', a, indexes = [] } = req.body as UpdateDatasetDataProps;
|
||||
|
||||
// auth data permission
|
||||
@@ -23,7 +22,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
dataId: id,
|
||||
per: 'w'
|
||||
per: WritePermissionVal
|
||||
});
|
||||
|
||||
// auth team balance
|
||||
@@ -46,8 +45,6 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
tokens,
|
||||
model: vectorModel
|
||||
});
|
||||
|
||||
jsonRes(res);
|
||||
}
|
||||
|
||||
export default NextAPI(handler);
|
||||
|
||||
@@ -1,54 +1,47 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@fastgpt/service/common/response';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
||||
import type { NextApiRequest } from 'next';
|
||||
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
|
||||
import { delDatasetRelevantData } from '@fastgpt/service/core/dataset/controller';
|
||||
import { findDatasetAndAllChildren } from '@fastgpt/service/core/dataset/controller';
|
||||
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
|
||||
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import { OwnerPermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
try {
|
||||
await connectToDatabase();
|
||||
const { id: datasetId } = req.query as {
|
||||
id: string;
|
||||
};
|
||||
async function handler(req: NextApiRequest) {
|
||||
const { id: datasetId } = req.query as {
|
||||
id: string;
|
||||
};
|
||||
|
||||
if (!datasetId) {
|
||||
throw new Error('缺少参数');
|
||||
}
|
||||
|
||||
// auth owner
|
||||
const { teamId } = await authDataset({
|
||||
req,
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
datasetId,
|
||||
per: 'owner'
|
||||
});
|
||||
|
||||
const datasets = await findDatasetAndAllChildren({
|
||||
teamId,
|
||||
datasetId
|
||||
});
|
||||
|
||||
// delete all dataset.data and pg data
|
||||
await mongoSessionRun(async (session) => {
|
||||
// delete dataset data
|
||||
await delDatasetRelevantData({ datasets, session });
|
||||
await MongoDataset.deleteMany(
|
||||
{
|
||||
_id: { $in: datasets.map((d) => d._id) }
|
||||
},
|
||||
{ session }
|
||||
);
|
||||
});
|
||||
|
||||
jsonRes(res);
|
||||
} catch (err) {
|
||||
jsonRes(res, {
|
||||
code: 500,
|
||||
error: err
|
||||
});
|
||||
if (!datasetId) {
|
||||
return Promise.reject(CommonErrEnum.missingParams);
|
||||
}
|
||||
|
||||
// auth owner
|
||||
const { teamId } = await authDataset({
|
||||
req,
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
datasetId,
|
||||
per: OwnerPermissionVal
|
||||
});
|
||||
|
||||
const datasets = await findDatasetAndAllChildren({
|
||||
teamId,
|
||||
datasetId
|
||||
});
|
||||
|
||||
// delete all dataset.data and pg data
|
||||
await mongoSessionRun(async (session) => {
|
||||
// delete dataset data
|
||||
await delDatasetRelevantData({ datasets, session });
|
||||
await MongoDataset.deleteMany(
|
||||
{
|
||||
_id: { $in: datasets.map((d) => d._id) }
|
||||
},
|
||||
{ session }
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
export default NextAPI(handler);
|
||||
|
||||
@@ -1,43 +1,39 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@fastgpt/service/common/response';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
import { getLLMModel, getVectorModel } from '@fastgpt/service/core/ai/model';
|
||||
import type { DatasetItemType } from '@fastgpt/global/core/dataset/type.d';
|
||||
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
||||
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
|
||||
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import { DatasetItemType } from '@fastgpt/global/core/dataset/type';
|
||||
import { ApiRequestProps } from '@fastgpt/service/type/next';
|
||||
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
try {
|
||||
await connectToDatabase();
|
||||
const { id: datasetId } = req.query as {
|
||||
id: string;
|
||||
};
|
||||
type Query = {
|
||||
id: string;
|
||||
};
|
||||
|
||||
if (!datasetId) {
|
||||
throw new Error('缺少参数');
|
||||
}
|
||||
async function handler(req: ApiRequestProps<Query>): Promise<DatasetItemType> {
|
||||
const { id: datasetId } = req.query as {
|
||||
id: string;
|
||||
};
|
||||
|
||||
// 凭证校验
|
||||
const { dataset, canWrite, isOwner } = await authDataset({
|
||||
req,
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
datasetId,
|
||||
per: 'r'
|
||||
});
|
||||
|
||||
jsonRes<DatasetItemType>(res, {
|
||||
data: {
|
||||
...dataset,
|
||||
vectorModel: getVectorModel(dataset.vectorModel),
|
||||
agentModel: getLLMModel(dataset.agentModel),
|
||||
canWrite,
|
||||
isOwner
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
jsonRes(res, {
|
||||
code: 500,
|
||||
error: err
|
||||
});
|
||||
if (!datasetId) {
|
||||
return Promise.reject(CommonErrEnum.missingParams);
|
||||
}
|
||||
|
||||
// 凭证校验
|
||||
const { dataset, permission } = await authDataset({
|
||||
req,
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
datasetId,
|
||||
per: ReadPermissionVal
|
||||
});
|
||||
|
||||
return {
|
||||
...dataset,
|
||||
permission,
|
||||
vectorModel: getVectorModel(dataset.vectorModel),
|
||||
agentModel: getLLMModel(dataset.agentModel)
|
||||
};
|
||||
}
|
||||
|
||||
export default NextAPI(handler);
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { responseWriteController } from '@fastgpt/service/common/response';
|
||||
import { addLog } from '@fastgpt/service/common/system/log';
|
||||
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
||||
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
|
||||
import { MongoDatasetData } from '@fastgpt/service/core/dataset/data/schema';
|
||||
import { findDatasetAndAllChildren } from '@fastgpt/service/core/dataset/controller';
|
||||
import {
|
||||
@@ -9,6 +9,8 @@ import {
|
||||
updateExportDatasetLimit
|
||||
} from '@fastgpt/service/support/user/utils';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
|
||||
|
||||
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
let { datasetId } = req.query as {
|
||||
@@ -16,11 +18,16 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
};
|
||||
|
||||
if (!datasetId || !global.pgClient) {
|
||||
throw new Error('缺少参数');
|
||||
return Promise.reject(CommonErrEnum.missingParams);
|
||||
}
|
||||
|
||||
// 凭证校验
|
||||
const { teamId } = await authDataset({ req, authToken: true, datasetId, per: 'w' });
|
||||
const { teamId } = await authDataset({
|
||||
req,
|
||||
authToken: true,
|
||||
datasetId,
|
||||
per: WritePermissionVal
|
||||
});
|
||||
|
||||
await checkExportDatasetLimit({
|
||||
teamId,
|
||||
|
||||
@@ -1,24 +1,14 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@fastgpt/service/common/response';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
import { authDatasetFile } from '@fastgpt/service/support/permission/auth/dataset';
|
||||
import { DatasetFileSchema } from '@fastgpt/global/core/dataset/type.d';
|
||||
import type { NextApiRequest } from 'next';
|
||||
import { authDatasetFile } from '@fastgpt/service/support/permission/dataset/auth';
|
||||
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
try {
|
||||
await connectToDatabase();
|
||||
async function handler(req: NextApiRequest) {
|
||||
const { fileId } = req.query as { fileId: string };
|
||||
// 凭证校验
|
||||
const { file } = await authDatasetFile({ req, authToken: true, fileId, per: ReadPermissionVal });
|
||||
|
||||
const { fileId } = req.query as { fileId: string };
|
||||
// 凭证校验
|
||||
const { file } = await authDatasetFile({ req, authToken: true, fileId, per: 'r' });
|
||||
|
||||
jsonRes<DatasetFileSchema>(res, {
|
||||
data: file
|
||||
});
|
||||
} catch (err) {
|
||||
jsonRes(res, {
|
||||
code: 500,
|
||||
error: err
|
||||
});
|
||||
}
|
||||
return file;
|
||||
}
|
||||
|
||||
export default NextAPI(handler);
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import type { NextApiResponse } from 'next';
|
||||
import { authFile } from '@fastgpt/service/support/permission/auth/file';
|
||||
import { authDatasetFile } from '@fastgpt/service/support/permission/dataset/auth';
|
||||
import { DatasetSourceReadTypeEnum } from '@fastgpt/global/core/dataset/constants';
|
||||
import { rawText2Chunks, readDatasetSourceRawText } from '@fastgpt/service/core/dataset/read';
|
||||
import { authCert } from '@fastgpt/service/support/permission/auth/common';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import { ApiRequestProps } from '@fastgpt/service/type/next';
|
||||
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
|
||||
export type PostPreviewFilesChunksProps = {
|
||||
type: DatasetSourceReadTypeEnum;
|
||||
@@ -21,8 +21,7 @@ export type PreviewChunksResponse = {
|
||||
}[];
|
||||
|
||||
async function handler(
|
||||
req: ApiRequestProps<PostPreviewFilesChunksProps>,
|
||||
res: NextApiResponse<any>
|
||||
req: ApiRequestProps<PostPreviewFilesChunksProps>
|
||||
): Promise<PreviewChunksResponse> {
|
||||
const { type, sourceId, chunkSize, customSplitChar, overlapRatio, selector, isQAImport } =
|
||||
req.body;
|
||||
@@ -36,7 +35,13 @@ async function handler(
|
||||
|
||||
const { teamId } = await (async () => {
|
||||
if (type === DatasetSourceReadTypeEnum.fileLocal) {
|
||||
return authFile({ req, authToken: true, authApiKey: true, fileId: sourceId });
|
||||
return authDatasetFile({
|
||||
req,
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
fileId: sourceId,
|
||||
per: ReadPermissionVal
|
||||
});
|
||||
}
|
||||
return authCert({ req, authApiKey: true, authToken: true });
|
||||
})();
|
||||
|
||||
@@ -1,34 +1,68 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@fastgpt/service/common/response';
|
||||
import type { NextApiRequest } from 'next';
|
||||
import type { DatasetListItemType } from '@fastgpt/global/core/dataset/type.d';
|
||||
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
|
||||
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
|
||||
import { mongoRPermission } from '@fastgpt/global/support/permission/utils';
|
||||
import { authUserPer } from '@fastgpt/service/support/permission/user/auth';
|
||||
import { getVectorModel } from '@fastgpt/service/core/ai/model';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
import { DatasetPermission } from '@fastgpt/global/support/permission/dataset/controller';
|
||||
import {
|
||||
PerResourceTypeEnum,
|
||||
ReadPermissionVal
|
||||
} from '@fastgpt/global/support/permission/constant';
|
||||
import { MongoResourcePermission } from '@fastgpt/service/support/permission/schema';
|
||||
import { parseParentIdInMongo } from '@fastgpt/global/common/parentFolder/utils';
|
||||
|
||||
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
async function handler(req: NextApiRequest) {
|
||||
const { parentId, type } = req.query as { parentId?: string; type?: DatasetTypeEnum };
|
||||
// 凭证校验
|
||||
const { teamId, tmbId, permission } = await authUserPer({
|
||||
const {
|
||||
teamId,
|
||||
tmbId,
|
||||
permission: tmbPer
|
||||
} = await authUserPer({
|
||||
req,
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
per: ReadPermissionVal
|
||||
});
|
||||
|
||||
const datasets = await MongoDataset.find({
|
||||
...mongoRPermission({ teamId, tmbId, permission }),
|
||||
...(parentId !== undefined && { parentId: parentId || null }),
|
||||
...(type && { type })
|
||||
})
|
||||
.sort({ updateTime: -1 })
|
||||
.lean();
|
||||
const [myDatasets, rpList] = await Promise.all([
|
||||
MongoDataset.find({
|
||||
teamId,
|
||||
...parseParentIdInMongo(parentId),
|
||||
...(type && { type })
|
||||
})
|
||||
.sort({
|
||||
updateTime: -1
|
||||
})
|
||||
.lean(),
|
||||
MongoResourcePermission.find({
|
||||
resourceType: PerResourceTypeEnum.dataset,
|
||||
teamId,
|
||||
tmbId
|
||||
}).lean()
|
||||
]);
|
||||
|
||||
const filterDatasets = myDatasets
|
||||
.map((dataset) => {
|
||||
const perVal = rpList.find(
|
||||
(item) => String(item.resourceId) === String(dataset._id)
|
||||
)?.permission;
|
||||
const Per = new DatasetPermission({
|
||||
per: perVal ?? dataset.defaultPermission,
|
||||
isOwner: String(dataset.tmbId) === tmbId || tmbPer.isOwner
|
||||
});
|
||||
|
||||
return {
|
||||
...dataset,
|
||||
permission: Per
|
||||
};
|
||||
})
|
||||
.filter((app) => app.permission.hasReadPer);
|
||||
|
||||
const data = await Promise.all(
|
||||
datasets.map<DatasetListItemType>((item) => ({
|
||||
filterDatasets.map<DatasetListItemType>((item) => ({
|
||||
_id: item._id,
|
||||
parentId: item.parentId,
|
||||
avatar: item.avatar,
|
||||
@@ -36,15 +70,12 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
intro: item.intro,
|
||||
type: item.type,
|
||||
permission: item.permission,
|
||||
canWrite: permission.hasWritePer,
|
||||
isOwner: permission.isOwner || String(item.tmbId) === tmbId,
|
||||
vectorModel: getVectorModel(item.vectorModel)
|
||||
vectorModel: getVectorModel(item.vectorModel),
|
||||
defaultPermission: item.defaultPermission
|
||||
}))
|
||||
);
|
||||
|
||||
jsonRes<DatasetListItemType[]>(res, {
|
||||
data
|
||||
});
|
||||
return data;
|
||||
}
|
||||
|
||||
export default NextAPI(handler);
|
||||
|
||||
@@ -1,33 +1,20 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@fastgpt/service/common/response';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
import type { NextApiRequest } from 'next';
|
||||
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
|
||||
import type { ParentTreePathItemType } from '@fastgpt/global/common/parentFolder/type.d';
|
||||
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
||||
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
|
||||
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
try {
|
||||
await connectToDatabase();
|
||||
async function handler(req: NextApiRequest) {
|
||||
const { parentId } = req.query as { parentId: string };
|
||||
|
||||
const { parentId } = req.query as { parentId: string };
|
||||
|
||||
if (!parentId) {
|
||||
return jsonRes(res, {
|
||||
data: []
|
||||
});
|
||||
}
|
||||
|
||||
await authDataset({ req, authToken: true, datasetId: parentId, per: 'r' });
|
||||
|
||||
jsonRes<ParentTreePathItemType[]>(res, {
|
||||
data: await getParents(parentId)
|
||||
});
|
||||
} catch (err) {
|
||||
jsonRes(res, {
|
||||
code: 500,
|
||||
error: err
|
||||
});
|
||||
if (!parentId) {
|
||||
return [];
|
||||
}
|
||||
|
||||
await authDataset({ req, authToken: true, datasetId: parentId, per: ReadPermissionVal });
|
||||
|
||||
return await getParents(parentId);
|
||||
}
|
||||
|
||||
async function getParents(parentId?: string): Promise<ParentTreePathItemType[]> {
|
||||
@@ -44,3 +31,5 @@ async function getParents(parentId?: string): Promise<ParentTreePathItemType[]>
|
||||
|
||||
return paths;
|
||||
}
|
||||
|
||||
export default NextAPI(handler);
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@fastgpt/service/common/response';
|
||||
import type { SearchTestProps, SearchTestResponse } from '@/global/core/dataset/api.d';
|
||||
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
||||
import type { NextApiRequest } from 'next';
|
||||
import type { SearchTestProps } from '@/global/core/dataset/api.d';
|
||||
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
|
||||
import { pushGenerateVectorUsage } from '@/service/support/wallet/usage/push';
|
||||
import { searchDatasetData } from '@fastgpt/service/core/dataset/search/controller';
|
||||
import { updateApiKeyUsage } from '@fastgpt/service/support/openapi/tools';
|
||||
@@ -13,8 +12,10 @@ import {
|
||||
checkTeamReRankPermission
|
||||
} from '@fastgpt/service/support/permission/teamLimit';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
|
||||
|
||||
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
async function handler(req: NextApiRequest) {
|
||||
const {
|
||||
datasetId,
|
||||
text,
|
||||
@@ -29,8 +30,9 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
} = req.body as SearchTestProps;
|
||||
|
||||
if (!datasetId || !text) {
|
||||
throw new Error('缺少参数');
|
||||
return Promise.reject(CommonErrEnum.missingParams);
|
||||
}
|
||||
|
||||
const start = Date.now();
|
||||
|
||||
// auth dataset role
|
||||
@@ -39,7 +41,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
datasetId,
|
||||
per: 'r'
|
||||
per: ReadPermissionVal
|
||||
});
|
||||
// auth balance
|
||||
await checkTeamAIPoints(teamId);
|
||||
@@ -88,14 +90,12 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
});
|
||||
}
|
||||
|
||||
jsonRes<SearchTestResponse>(res, {
|
||||
data: {
|
||||
list: searchRes,
|
||||
duration: `${((Date.now() - start) / 1000).toFixed(3)}s`,
|
||||
queryExtensionModel: aiExtensionResult?.model,
|
||||
...result
|
||||
}
|
||||
});
|
||||
return {
|
||||
list: searchRes,
|
||||
duration: `${((Date.now() - start) / 1000).toFixed(3)}s`,
|
||||
queryExtensionModel: aiExtensionResult?.model,
|
||||
...result
|
||||
};
|
||||
}
|
||||
|
||||
export default NextAPI(handler);
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
import type { ApiRequestProps, ApiResponseType } from '@fastgpt/service/type/next';
|
||||
import type { ApiRequestProps } from '@fastgpt/service/type/next';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
||||
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
|
||||
import { MongoDatasetData } from '@fastgpt/service/core/dataset/data/schema';
|
||||
import { MongoDatasetTraining } from '@fastgpt/service/core/dataset/training/schema';
|
||||
|
||||
type Props = {};
|
||||
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
|
||||
export type getDatasetTrainingQueueResponse = {
|
||||
rebuildingCount: number;
|
||||
@@ -12,8 +11,7 @@ export type getDatasetTrainingQueueResponse = {
|
||||
};
|
||||
|
||||
async function handler(
|
||||
req: ApiRequestProps<any, { datasetId: string }>,
|
||||
res: ApiResponseType<any>
|
||||
req: ApiRequestProps<any, { datasetId: string }>
|
||||
): Promise<getDatasetTrainingQueueResponse> {
|
||||
const { datasetId } = req.query;
|
||||
|
||||
@@ -22,7 +20,7 @@ async function handler(
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
datasetId,
|
||||
per: 'r'
|
||||
per: ReadPermissionVal
|
||||
});
|
||||
|
||||
const [rebuildingCount, trainingCount] = await Promise.all([
|
||||
|
||||
@@ -1,46 +1,37 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@fastgpt/service/common/response';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
import type { NextApiRequest } from 'next';
|
||||
import { MongoDatasetTraining } from '@fastgpt/service/core/dataset/training/schema';
|
||||
import { authCert } from '@fastgpt/service/support/permission/auth/common';
|
||||
import { GetTrainingQueueProps } from '@/global/core/dataset/api';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
try {
|
||||
await connectToDatabase();
|
||||
await authCert({ req, authToken: true });
|
||||
const { vectorModel, agentModel } = req.query as GetTrainingQueueProps;
|
||||
async function handler(req: NextApiRequest) {
|
||||
await authCert({ req, authToken: true });
|
||||
const { vectorModel, agentModel } = req.query as GetTrainingQueueProps;
|
||||
|
||||
// get queue data
|
||||
// 分别统计 model = vectorModel和agentModel的数量
|
||||
const data = await MongoDatasetTraining.aggregate([
|
||||
{
|
||||
$match: {
|
||||
lockTime: { $lt: new Date('2040/1/1') },
|
||||
$or: [{ model: { $eq: vectorModel } }, { model: { $eq: agentModel } }]
|
||||
}
|
||||
},
|
||||
{
|
||||
$group: {
|
||||
_id: '$model',
|
||||
count: { $sum: 1 }
|
||||
}
|
||||
// get queue data
|
||||
// 分别统计 model = vectorModel和agentModel的数量
|
||||
const data = await MongoDatasetTraining.aggregate([
|
||||
{
|
||||
$match: {
|
||||
lockTime: { $lt: new Date('2040/1/1') },
|
||||
$or: [{ model: { $eq: vectorModel } }, { model: { $eq: agentModel } }]
|
||||
}
|
||||
]);
|
||||
|
||||
const vectorTrainingCount = data.find((item) => item._id === vectorModel)?.count || 0;
|
||||
const agentTrainingCount = data.find((item) => item._id === agentModel)?.count || 0;
|
||||
|
||||
jsonRes(res, {
|
||||
data: {
|
||||
vectorTrainingCount,
|
||||
agentTrainingCount
|
||||
},
|
||||
{
|
||||
$group: {
|
||||
_id: '$model',
|
||||
count: { $sum: 1 }
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
jsonRes(res, {
|
||||
code: 500,
|
||||
error: err
|
||||
});
|
||||
}
|
||||
}
|
||||
]);
|
||||
|
||||
const vectorTrainingCount = data.find((item) => item._id === vectorModel)?.count || 0;
|
||||
const agentTrainingCount = data.find((item) => item._id === agentModel)?.count || 0;
|
||||
|
||||
return {
|
||||
vectorTrainingCount,
|
||||
agentTrainingCount
|
||||
};
|
||||
}
|
||||
|
||||
export default NextAPI(handler);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
||||
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
|
||||
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
|
||||
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
|
||||
import { MongoDatasetData } from '@fastgpt/service/core/dataset/data/schema';
|
||||
@@ -8,7 +8,8 @@ import { createTrainingUsage } from '@fastgpt/service/support/wallet/usage/contr
|
||||
import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants';
|
||||
import { getLLMModel, getVectorModel } from '@fastgpt/service/core/ai/model';
|
||||
import { TrainingModeEnum } from '@fastgpt/global/core/dataset/constants';
|
||||
import { ApiRequestProps, ApiResponseType } from '@fastgpt/service/type/next';
|
||||
import { ApiRequestProps } from '@fastgpt/service/type/next';
|
||||
import { OwnerPermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
|
||||
export type rebuildEmbeddingBody = {
|
||||
datasetId: string;
|
||||
@@ -17,10 +18,7 @@ export type rebuildEmbeddingBody = {
|
||||
|
||||
export type Response = {};
|
||||
|
||||
async function handler(
|
||||
req: ApiRequestProps<rebuildEmbeddingBody>,
|
||||
res: ApiResponseType<any>
|
||||
): Promise<Response> {
|
||||
async function handler(req: ApiRequestProps<rebuildEmbeddingBody>): Promise<Response> {
|
||||
const { datasetId, vectorModel } = req.body;
|
||||
|
||||
const { teamId, tmbId, dataset } = await authDataset({
|
||||
@@ -28,7 +26,7 @@ async function handler(
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
datasetId,
|
||||
per: 'owner'
|
||||
per: OwnerPermissionVal
|
||||
});
|
||||
|
||||
// check vector model
|
||||
|
||||
@@ -1,58 +1,56 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@fastgpt/service/common/response';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
import type { NextApiRequest } from 'next';
|
||||
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
|
||||
import type { DatasetUpdateBody } from '@fastgpt/global/core/dataset/api.d';
|
||||
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
||||
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import {
|
||||
OwnerPermissionVal,
|
||||
WritePermissionVal
|
||||
} from '@fastgpt/global/support/permission/constant';
|
||||
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
try {
|
||||
await connectToDatabase();
|
||||
const {
|
||||
id,
|
||||
parentId,
|
||||
name,
|
||||
avatar,
|
||||
intro,
|
||||
permission,
|
||||
agentModel,
|
||||
websiteConfig,
|
||||
externalReadUrl,
|
||||
status
|
||||
} = req.body as DatasetUpdateBody;
|
||||
async function handler(req: NextApiRequest) {
|
||||
const {
|
||||
id,
|
||||
parentId,
|
||||
name,
|
||||
avatar,
|
||||
intro,
|
||||
agentModel,
|
||||
websiteConfig,
|
||||
externalReadUrl,
|
||||
defaultPermission,
|
||||
status
|
||||
} = req.body as DatasetUpdateBody;
|
||||
|
||||
if (!id) {
|
||||
throw new Error('缺少参数');
|
||||
}
|
||||
|
||||
if (permission) {
|
||||
await authDataset({ req, authToken: true, datasetId: id, per: 'owner' });
|
||||
} else {
|
||||
await authDataset({ req, authToken: true, datasetId: id, per: 'w' });
|
||||
}
|
||||
|
||||
await MongoDataset.findOneAndUpdate(
|
||||
{
|
||||
_id: id
|
||||
},
|
||||
{
|
||||
...(parentId !== undefined && { parentId: parentId || null }),
|
||||
...(name && { name }),
|
||||
...(avatar && { avatar }),
|
||||
...(permission && { permission }),
|
||||
...(agentModel && { agentModel: agentModel.model }),
|
||||
...(websiteConfig && { websiteConfig }),
|
||||
...(status && { status }),
|
||||
...(intro && { intro }),
|
||||
...(externalReadUrl && { externalReadUrl })
|
||||
}
|
||||
);
|
||||
|
||||
jsonRes(res);
|
||||
} catch (err) {
|
||||
jsonRes(res, {
|
||||
code: 500,
|
||||
error: err
|
||||
});
|
||||
if (!id) {
|
||||
return Promise.reject(CommonErrEnum.missingParams);
|
||||
}
|
||||
|
||||
if (defaultPermission) {
|
||||
await authDataset({ req, authToken: true, datasetId: id, per: OwnerPermissionVal });
|
||||
} else {
|
||||
await authDataset({ req, authToken: true, datasetId: id, per: WritePermissionVal });
|
||||
}
|
||||
|
||||
console.log('update dataset', req.body);
|
||||
|
||||
await MongoDataset.findOneAndUpdate(
|
||||
{
|
||||
_id: id
|
||||
},
|
||||
{
|
||||
...(parentId !== undefined && { parentId: parentId || null }),
|
||||
...(name && { name }),
|
||||
...(avatar && { avatar }),
|
||||
...(agentModel && { agentModel: agentModel.model }),
|
||||
...(websiteConfig && { websiteConfig }),
|
||||
...(status && { status }),
|
||||
...(intro && { intro }),
|
||||
...(externalReadUrl && { externalReadUrl }),
|
||||
defaultPermission
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
export default NextAPI(handler);
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
||||
import type { NextApiRequest } from 'next';
|
||||
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
|
||||
import { checkExportDatasetLimit } from '@fastgpt/service/support/user/utils';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
|
||||
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
async function handler(req: NextApiRequest) {
|
||||
const { datasetId } = req.query as {
|
||||
datasetId: string;
|
||||
};
|
||||
@@ -13,7 +14,12 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
}
|
||||
|
||||
// 凭证校验
|
||||
const { teamId } = await authDataset({ req, authToken: true, datasetId, per: 'w' });
|
||||
const { teamId } = await authDataset({
|
||||
req,
|
||||
authToken: true,
|
||||
datasetId,
|
||||
per: WritePermissionVal
|
||||
});
|
||||
|
||||
await checkExportDatasetLimit({
|
||||
teamId,
|
||||
|
||||
@@ -1,41 +1,33 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@fastgpt/service/common/response';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants';
|
||||
import { CreateTrainingUsageProps } from '@fastgpt/global/support/wallet/usage/api.d';
|
||||
import { getLLMModel, getVectorModel } from '@fastgpt/service/core/ai/model';
|
||||
import { createTrainingUsage } from '@fastgpt/service/support/wallet/usage/controller';
|
||||
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
||||
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
|
||||
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
try {
|
||||
await connectToDatabase();
|
||||
const { name, datasetId } = req.body as CreateTrainingUsageProps;
|
||||
async function handler(req: NextApiRequest) {
|
||||
const { name, datasetId } = req.body as CreateTrainingUsageProps;
|
||||
|
||||
const { teamId, tmbId, dataset } = await authDataset({
|
||||
req,
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
datasetId,
|
||||
per: 'w'
|
||||
});
|
||||
const { teamId, tmbId, dataset } = await authDataset({
|
||||
req,
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
datasetId,
|
||||
per: WritePermissionVal
|
||||
});
|
||||
|
||||
const { billId } = await createTrainingUsage({
|
||||
teamId,
|
||||
tmbId,
|
||||
appName: name,
|
||||
billSource: UsageSourceEnum.training,
|
||||
vectorModel: getVectorModel(dataset.vectorModel).name,
|
||||
agentModel: getLLMModel(dataset.agentModel).name
|
||||
});
|
||||
const { billId } = await createTrainingUsage({
|
||||
teamId,
|
||||
tmbId,
|
||||
appName: name,
|
||||
billSource: UsageSourceEnum.training,
|
||||
vectorModel: getVectorModel(dataset.vectorModel).name,
|
||||
agentModel: getLLMModel(dataset.agentModel).name
|
||||
});
|
||||
|
||||
jsonRes<string>(res, {
|
||||
data: billId
|
||||
});
|
||||
} catch (err) {
|
||||
jsonRes(res, {
|
||||
code: 500,
|
||||
error: err
|
||||
});
|
||||
}
|
||||
return billId;
|
||||
}
|
||||
|
||||
export default NextAPI(handler);
|
||||
|
||||
Reference in New Issue
Block a user