* model config

* feat: model config ui

* perf: rename variable

* feat: custom request url

* perf: model buffer

* perf: init model

* feat: json model config

* auto login

* fix: ts

* update packages

* package

* fix: dockerfile
This commit is contained in:
Archer
2025-01-22 22:59:28 +08:00
committed by GitHub
parent 16629e32a7
commit e009be51e7
93 changed files with 2361 additions and 564 deletions

View File

@@ -0,0 +1,10 @@
{
"provider": "OpenAI",
"model": "text-embedding-3-small",
"name": "text-embedding-3-small",
"defaultToken": 512,
"maxToken": 3000,
"charsPointsPrice": 0
}

View File

@@ -3,9 +3,8 @@
"model": "text-embedding-ada-002",
"name": "text-embedding-ada-002",
"defaultToken": 512, // 默认分块 token
"maxToken": 3000, // 最大分块 token
"weight": 0, // 权重
"defaultToken": 512,
"maxToken": 3000,
"charsPointsPrice": 0 // 积分/1k token
"charsPointsPrice": 0
}

View File

@@ -1,33 +1,28 @@
{
"provider": "OpenAI",
"model": "gpt-4o-mini",
"name": "GPT-4o-mini", // alias
"name": "GPT-4o-mini",
"maxContext": 125000, // 最大上下文
"maxResponse": 16000, // 最大回复
"quoteMaxToken": 60000, // 最大引用
"maxTemperature": 1.2, // 最大温度
"presencePenaltyRange": [-2, 2], // 惩罚系数范围
"frequencyPenaltyRange": [-2, 2], // 频率惩罚系数范围
"responseFormatList": ["text", "json_object", "json_schema"], // 响应格式
"showStopSign": true, // 是否显示停止符号
"censor": false,
"charsPointsPrice": 0,
"vision": true, // 是否支持图片识别
"toolChoice": true, // 是否支持工具调用
"functionCall": false, // 是否支持函数调用(一般都可以 false 了,基本不用了)
"defaultSystemChatPrompt": "", // 默认系统提示
"maxContext": 125000,
"maxResponse": 16000,
"quoteMaxToken": 60000,
"maxTemperature": 1.2,
"datasetProcess": true, // 用于知识库文本处理
"usedInClassify": true, // 用于问题分类
"customCQPrompt": "", // 自定义问题分类提示
"usedInExtractFields": true, // 用于提取字段
"customExtractPrompt": "", // 自定义提取提示
"usedInToolCall": true, // 用于工具调用
"usedInQueryExtension": true, // 用于问题优化
"vision": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {}, // 额外的自定义 body
"fieldMap": {}, // body 字段映射
"datasetProcess": true,
"usedInClassify": true,
"customCQPrompt": "",
"usedInExtractFields": true,
"customExtractPrompt": "",
"usedInToolCall": true,
"censor": false, // 是否开启敏感词过滤
"charsPointsPrice": 0 // n 积分/1k token
"defaultConfig": {},
"fieldMap": {}
}

View File

@@ -0,0 +1,21 @@
import { connectionMongo, getMongoModel } from '../../../common/mongo';
const { Schema } = connectionMongo;
import type { SystemModelSchemaType } from '../type';
const SystemModelSchema = new Schema({
model: {
type: String,
required: true,
unique: true
},
metadata: {
type: Object,
required: true,
default: {}
}
});
export const MongoSystemModel = getMongoModel<SystemModelSchemaType>(
'system_models',
SystemModelSchema
);

View File

@@ -0,0 +1,118 @@
import path from 'path';
import * as fs from 'fs';
import { SystemModelItemType } from '../type';
import { ModelTypeEnum } from '@fastgpt/global/core/ai/model';
import { MongoSystemModel } from './schema';
import {
LLMModelItemType,
EmbeddingModelItemType,
TTSModelType,
STTModelType,
ReRankModelItemType
} from '@fastgpt/global/core/ai/model.d';
import { debounce } from 'lodash';
type FolderBaseType = `${ModelTypeEnum}`;
export const loadSystemModels = async (init = false) => {
const getModelNameList = (base: FolderBaseType) => {
const currentFileUrl = new URL(import.meta.url);
const modelsPath = path.join(path.dirname(currentFileUrl.pathname), base);
return fs.readdirSync(modelsPath) as string[];
};
const pushModel = (model: SystemModelItemType) => {
global.systemModelList.push(model);
if (model.isActive) {
global.systemActiveModelList.push(model);
if (model.type === ModelTypeEnum.llm) {
global.llmModelMap.set(model.model, model);
global.llmModelMap.set(model.name, model);
} else if (model.type === ModelTypeEnum.embedding) {
global.embeddingModelMap.set(model.model, model);
global.embeddingModelMap.set(model.name, model);
} else if (model.type === ModelTypeEnum.tts) {
global.ttsModelMap.set(model.model, model);
global.ttsModelMap.set(model.name, model);
} else if (model.type === ModelTypeEnum.stt) {
global.sttModelMap.set(model.model, model);
global.sttModelMap.set(model.name, model);
} else if (model.type === ModelTypeEnum.rerank) {
global.reRankModelMap.set(model.model, model);
global.reRankModelMap.set(model.name, model);
}
}
};
if (!init && global.systemModelList && global.systemModelList.length > 0) return;
const dbModels = await MongoSystemModel.find({}).lean();
global.systemModelList = [];
global.systemActiveModelList = [];
global.llmModelMap = new Map<string, LLMModelItemType>();
global.embeddingModelMap = new Map<string, EmbeddingModelItemType>();
global.ttsModelMap = new Map<string, TTSModelType>();
global.sttModelMap = new Map<string, STTModelType>();
global.reRankModelMap = new Map<string, ReRankModelItemType>();
const baseList: FolderBaseType[] = [
ModelTypeEnum.llm,
ModelTypeEnum.embedding,
ModelTypeEnum.tts,
ModelTypeEnum.stt,
ModelTypeEnum.rerank
];
// System model
await Promise.all(
baseList.map(async (base) => {
const modelList = getModelNameList(base);
const nameList = modelList.map((name) => `${base}/${name}`);
await Promise.all(
nameList.map(async (name) => {
const fileContent = (await import(`./${name}`))?.default as SystemModelItemType;
const dbModel = dbModels.find((item) => item.model === fileContent.model);
const model: any = {
...fileContent,
...dbModel?.metadata,
type: dbModel?.metadata?.type || base,
isCustom: false
};
pushModel(model);
})
);
})
);
// Custom model
dbModels.forEach((dbModel) => {
if (global.systemModelList.find((item) => item.model === dbModel.model)) return;
pushModel({
...dbModel.metadata,
isCustom: true
});
});
console.log('Load models success', JSON.stringify(global.systemActiveModelList, null, 2));
};
export const watchSystemModelUpdate = () => {
const changeStream = MongoSystemModel.watch();
changeStream.on(
'change',
debounce(async () => {
try {
await loadSystemModels(true);
} catch (error) {}
}, 500)
);
};