Compare commits

...

67 Commits

Author SHA1 Message Date
archer
5be57da407 fix: v1 api 2023-06-24 21:39:34 +08:00
archer
057c3411b9 perf: fetch error 2023-06-24 21:21:53 +08:00
archer
83d755ad0e feat: limit prompt 2023-06-24 18:55:46 +08:00
JustSong
ec9852fc63 docs: update README (#103) 2023-06-24 00:38:08 +08:00
archer
4e6f8aefe8 docs 2023-06-23 23:40:07 +08:00
archer
11352b754a fix: model 2023-06-23 23:21:59 +08:00
archer
965ad34283 docs 2023-06-23 23:16:49 +08:00
archer
986206b691 perf: sse response 2023-06-23 23:11:22 +08:00
archer
6787f19d78 feat: price 2023-06-23 18:05:53 +08:00
archer
64c35eaa3a docs 2023-06-23 17:43:14 +08:00
archer
41ada6ecda perf: keys 2023-06-23 17:12:52 +08:00
archer
ae1f7a888e perf: token count;feat: chunk size 2023-06-23 15:08:30 +08:00
archer
9aace871ff fix: ssr 2023-06-21 18:04:36 +08:00
moonrailgun
39739f9305 chore: fix admin build problem (#101) 2023-06-21 15:40:51 +08:00
archer
ce757d918b fix: ssr 2023-06-21 15:22:07 +08:00
archer
d592d4e99a markdown 2023-06-21 15:22:06 +08:00
moonrailgun
11ce10cd80 feat: add zh translation and change title (#100) 2023-06-21 15:21:24 +08:00
archer
6fb312ccfd link text 2023-06-20 10:41:17 +08:00
archer
3166376173 fix: template 2023-06-20 10:40:49 +08:00
archer
a02a528737 perf: my models 2023-06-19 21:08:32 +08:00
archer
dd4ca27dc7 perf: deploy 2023-06-19 20:00:54 +08:00
archer
f2d37c30a5 feat: baidu statistic 2023-06-19 17:28:25 +08:00
archer
1d236f87ae perf: markdown redraw 2023-06-19 16:50:14 +08:00
archer
3b515c3c2d fix: choices empty 2023-06-19 11:30:26 +08:00
archer
e95f83ec8e docs 2023-06-18 23:52:40 +08:00
archer
03793c66da README 2023-06-18 23:25:16 +08:00
archer
84daf85393 fix: base url 2023-06-18 22:38:55 +08:00
archer
6c62d80a4c fix: refresh page 2023-06-18 22:19:49 +08:00
archer
ff2043c0fb feat: maxToken setting 2023-06-18 21:23:36 +08:00
archer
ee9afa310a feat: openapi v2 chat 2023-06-18 21:06:07 +08:00
archer
2b93ae2d00 fix: time conf 2023-06-17 21:53:04 +08:00
archer
00c93a63cd perf: queue link 2023-06-17 21:27:44 +08:00
archer
61447c60ac feat: new app page 2023-06-17 17:31:38 +08:00
archer
df2fda6176 feat: auth key 2023-06-16 00:26:11 +08:00
archer
bc2504832f fix: nextjs version 2023-06-16 00:03:52 +08:00
archer
33ffd9d7dd loading 2023-06-15 22:36:09 +08:00
archer
80578a08c8 perf: app store 2023-06-15 22:17:54 +08:00
archer
2463e11cb9 feat: date picker 2023-06-15 21:44:31 +08:00
archer
4cbe4ebdc3 perf: image 2023-06-15 20:06:56 +08:00
archer
bb36e637e0 perf: code 2023-06-15 17:32:35 +08:00
archer
6f9e929298 perf: code 2023-06-15 17:32:12 +08:00
archer
bf1592d2c6 feat: admin set share 2023-06-15 00:21:27 +08:00
archer
c6259fca78 perf: export source 2023-06-14 23:14:26 +08:00
archer
cf3eb3b7b5 perf: upload img 2023-06-14 22:45:47 +08:00
archer
7c52cec0ea perf: binary avatar 2023-06-14 22:26:11 +08:00
archer
7c159d8aba fix: markdown 2023-06-14 20:58:11 +08:00
archer
07f8e18c10 fix: gpt35 4k 2023-06-14 20:54:34 +08:00
archer
e4aeee7be3 perf: token count 2023-06-14 20:02:43 +08:00
archer
8036ed6143 perf: qa 2023-06-14 14:33:26 +08:00
archer
85e6a0f38d fix: token limit 2023-06-14 10:01:00 +08:00
archer
dab70378bb feat: gpt35-16k 2023-06-14 09:45:49 +08:00
archer
0a0febd2e6 perf: admin 2023-06-14 00:24:50 +08:00
archer
391332c8dd perf: ssr 2023-06-13 20:07:32 +08:00
archer
89e7c1abca perf: admin 2023-06-13 11:49:26 +08:00
archer
fc3c360985 fix: context menu 2023-06-13 10:52:44 +08:00
archer
006ba3b877 fix: mermaid 2023-06-12 23:17:48 +08:00
archer
5a534aa630 perf: del loading 2023-06-12 22:12:29 +08:00
archer
98e3c0a41f perf: mermaid overflow 2023-06-12 22:02:06 +08:00
archer
99e47849f5 perf: kb test 2023-06-12 21:59:30 +08:00
archer
ca4cd8af9d fix: sse 2023-06-12 20:55:37 +08:00
archer
36a0ea7e43 fix: sensitive check 2023-06-12 18:29:22 +08:00
archer
71dd7f3e6c feat: search test 2023-06-12 18:18:08 +08:00
archer
6ac7119edf feat: kb UI 2023-06-12 15:11:29 +08:00
archer
daf1148bb1 fix: package 2023-06-12 10:27:59 +08:00
archer
82b05b3d94 perf: share message 2023-06-12 10:24:12 +08:00
archer
9ab5cef516 fix: tag theme 2023-06-11 21:40:43 +08:00
archer
1ac3edccab perf: ui 2023-06-11 19:41:19 +08:00
171 changed files with 7857 additions and 5303 deletions

View File

@@ -1,6 +1,6 @@
# Fast GPT
Fast GPT 允许你使用自己的 openai API KEY 来快速的调用 openai 接口,目前集成了 Gpt35, Gpt4 和 embedding. 可构建自己的知识库。
Fast GPT 允许你使用自己的 openai API KEY 来快速的调用 openai 接口,目前集成了 Gpt35, Gpt4 和 embedding. 可构建自己的知识库。并且 OpenAPI Chat 接口兼容 OpenAI 接口,意味着你只需修改 BaseUrl 和 Authorization 即可在已有项目基础上接入 FastGpt
## 🛸 在线体验
@@ -37,11 +37,18 @@ Fast GPT 允许你使用自己的 openai API KEY 来快速的调用 openai 接
## 👀 其他
- [FastGpt 常见问题](https://kjqvjse66l.feishu.cn/docx/HtrgdT0pkonP4kxGx8qcu6XDnGh)
- [docker 部署教程](https://www.bilibili.com/video/BV1jo4y147fT/)
- [公众号接入](https://www.bilibili.com/video/BV1xh4y1t7fy/)
- [FastGpt + Laf 最佳实践,将知识库装入公众号,点击去 Laf 公众号体验效果](https://b4jky7-fastgpt.oss.laf.run/lafercode.png)
- [FastGpt V3.4 更新集合](https://www.bilibili.com/video/BV1Lo4y147Qh/?vd_source=92041a1a395f852f9d89158eaa3f61b4)
- [FastGpt 知识库演示](https://www.bilibili.com/video/BV1Wo4y1p7i1/)
## Powered by
- [TuShan: 5 分钟搭建后台管理系统](https://github.com/msgbyte/tushan)
- [Laf: 3 分钟快速接入三方应用](https://github.com/labring/laf)
- [Sealos: 快速部署集群应用](https://github.com/labring/sealos)
- [One API: 令牌管理 & 二次分发,支持 Azure](https://github.com/songquanpeng/one-api)
## 🌟 Star History
[![Star History Chart](https://api.star-history.com/svg?repos=c121914yu/FastGPT&type=Date)](https://star-history.com/#c121914yu/FastGPT&Date)

2
admin/.gitignore vendored
View File

@@ -1 +1 @@
node_modules/
node_modules/

View File

@@ -2,13 +2,14 @@
## 项目原理
使用 tushan 项目做前端,然后构造了一个与 mongodb 做沟通的 API 做后端,可以做到创建、修改和删除用户
使用 [Tushan](https://tushan.msgbyte.com/) 项目做前端,然后构造了一个与 mongodb 做沟通的 API 做后端,可以做到创建、修改和删除用户
## 开发
1. 复制 .env.template 文件,添加环境变量
2. pnpm i
3. pnpm dev
1. `cp .env.template .env.local`: 复制 .env.template 文件,添加环境变量
2. `pnpm i`
3. `pnpm dev`
4. 打开 `http://localhost:5173/` 访问前端页面
## 部署
@@ -25,7 +26,8 @@ MONGODB_NAME=fastgpt
ADMIN_USER=username
ADMIN_PASS=password
ADMIN_SECRET=any
VITE_PUBLIC_SERVER_URL=http://localhost:3001 # 和server.js一致
PARENT_URL=http://localhost:3000
PARENT_ROOT_KEY=rootkey
```
## sealos 部署
@@ -33,7 +35,7 @@ VITE_PUBLIC_SERVER_URL=http://localhost:3001 # 和server.js一致
1. 进入 sealos 官网: https://cloud.sealos.io/
2. 打开 App Launchpad(应用管理) 工具
3. 新建应用
1. 镜像名: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt-admin:latest
1. 镜像名: `registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt-admin:latest`
2. 容器端口: 3001
3. 环境变量: 参考上面
4. 打开外网访问开关

View File

@@ -4,7 +4,7 @@
<meta charset="UTF-8" />
<link rel="icon" type="image/svg+xml" href="/logo.svg" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Tushan</title>
<title>Fast GPT</title>
</head>
<body>
<div id="root"></div>

View File

@@ -20,11 +20,12 @@
"express": "^4.18.2",
"jsonwebtoken": "^9.0.0",
"mongoose": "^7.2.2",
"nodemon": "^2.0.22",
"react": "^18.2.0",
"react-admin": "^4.11.0",
"react-dom": "^18.2.0",
"react-i18next": "^12.3.1",
"tushan": "^0.2.22"
"tushan": "^0.2.30"
},
"devDependencies": {
"@types/jsonexport": "^3.0.2",

3636
admin/pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@@ -15,6 +15,15 @@ useAppRoute(app);
useKbRoute(app);
useSystemRoute(app);
app.get('/*', (req, res) => {
res.sendFile(new URL('dist/index.html', import.meta.url).pathname);
});
app.use((err, req, res, next) => {
res.sendFile(new URL('dist/index.html', import.meta.url).pathname);
});
const PORT = process.env.PORT || 3001;
app.listen(PORT, () => {
console.log(`Server is running on port ${PORT}`);

View File

@@ -1,4 +1,4 @@
import { User, Model, Kb } from '../schema.js';
import { Model, Kb } from '../schema.js';
import { auth } from './system.js';
export const useAppRoute = (app) => {
@@ -8,18 +8,19 @@ export const useAppRoute = (app) => {
const start = parseInt(req.query._start) || 0;
const end = parseInt(req.query._end) || 20;
const order = req.query._order === 'DESC' ? -1 : 1;
const sort = req.query._sort || '_id';
const userId = req.query.userId || '';
const sort = req.query._sort;
const name = req.query.name || '';
const id = req.query.id || '';
const where = {
...(userId ? { userId: userId } : {}),
name
...(name && { name: { $regex: name, $options: 'i' } }),
...(id && { _id: id })
};
const modelsRaw = await Model.find()
const modelsRaw = await Model.find(where)
.skip(start)
.limit(end - start)
.sort({ [sort]: order });
.sort({ [sort]: order, 'share.isShare': -1, 'share.collection': -1 });
const models = [];
@@ -37,15 +38,19 @@ export const useAppRoute = (app) => {
id: model._id.toString(),
userId: model.userId,
name: model.name,
intro: model.intro,
model: model.chat?.chatModel,
relatedKbs: kbNames, // 将relatedKbs的id转换为相应的Kb名称
searchMode: model.chat?.searchMode,
systemPrompt: model.chat?.systemPrompt || '',
temperature: model.chat?.temperature
temperature: model.chat?.temperature || 0,
'share.topNum': model.share?.topNum || 0,
'share.isShare': model.share?.isShare || false,
'share.collection': model.share?.collection || 0
};
models.push(orderedModel);
}
const totalCount = await Model.countDocuments();
const totalCount = await Model.countDocuments(where);
res.header('Access-Control-Expose-Headers', 'X-Total-Count');
res.header('X-Total-Count', totalCount);
res.json(models);
@@ -54,4 +59,29 @@ export const useAppRoute = (app) => {
res.status(500).json({ error: 'Error fetching models', details: err.message });
}
});
// 修改 app 信息
app.put('/models/:id', auth(), async (req, res) => {
try {
const _id = req.params.id;
let {
share: { isShare, topNum },
intro
} = req.body;
await Model.findByIdAndUpdate(_id, {
$set: {
intro: intro,
'share.topNum': Number(topNum),
'share.isShare': isShare === 'true' || isShare === true
}
});
res.json({});
} catch (err) {
console.log(`Error updating user: ${err}`);
res.status(500).json({ error: 'Error updating user' });
}
});
};

View File

@@ -10,7 +10,21 @@ export const useKbRoute = (app) => {
const order = req.query._order === 'DESC' ? -1 : 1;
const sort = req.query._sort || '_id';
const tag = req.query.tag || '';
const where = { tags: { $elemMatch: { $regex: tag, $options: 'i' } } };
const name = req.query.name || '';
const where = {
...(name
? {
name: { $regex: name, $options: 'i' }
}
: {}),
...(tag
? {
tags: { $elemMatch: { $regex: tag, $options: 'i' } }
}
: {})
};
console.log(where);
const kbsRaw = await Kb.find(where)
.skip(start)

View File

@@ -110,8 +110,7 @@ export const auth = () => {
try {
const authorization = req.headers.authorization;
if (!authorization) {
res.status(401).end('not found authorization in headers');
return;
return next(new Error("unAuthorization"))
}
const token = authorization.slice('Bearer '.length);

View File

@@ -9,6 +9,52 @@ const hashPassword = (psw) => {
};
export const useUserRoute = (app) => {
// 统计近 30 天注册用户数量
app.get('/users/data', auth(), async (req, res) => {
try {
const day = 60;
let startCount = await User.countDocuments({
createTime: { $lt: new Date(Date.now() - day * 24 * 60 * 60 * 1000) }
});
const usersRaw = await User.aggregate([
{ $match: { createTime: { $gte: new Date(Date.now() - day * 24 * 60 * 60 * 1000) } } },
{
$group: {
_id: {
year: { $year: '$createTime' },
month: { $month: '$createTime' },
day: { $dayOfMonth: '$createTime' }
},
count: { $sum: 1 }
}
},
{
$project: {
_id: 0,
date: { $dateFromParts: { year: '$_id.year', month: '$_id.month', day: '$_id.day' } },
count: 1
}
},
{ $sort: { date: 1 } }
]);
const countResult = usersRaw.map((item) => {
const increaseRate = `${((item.count / startCount) * 100).toFixed(2)}%`;
startCount += item.count;
return {
date: item.date,
count: startCount,
increase: item.count,
increaseRate
};
});
res.json(countResult);
} catch (err) {
console.log(`Error fetching users: ${err}`);
res.status(500).json({ error: 'Error fetching users' });
}
});
// 获取用户列表
app.get('/users', auth(), async (req, res) => {
try {

View File

@@ -61,14 +61,15 @@ const modelSchema = new mongoose.Schema({
name: String,
avatar: String,
status: String,
intro: String,
chat: {
relatedKbs: [mongoose.Schema.Types.ObjectId],
searchMode: String,
systemPrompt: String,
temperature: Number,
chatModel: String
},
share: {
topNum: Number,
isShare: Boolean,
isShareDetail: Boolean,
intro: String,
@@ -85,18 +86,6 @@ const modelSchema = new mongoose.Schema({
});
const SystemSchema = new mongoose.Schema({
openAIKeys: {
type: String,
default: ''
},
openAITrainingKeys: {
type: String,
default: ''
},
gpt4Key: {
type: String,
default: ''
},
vectorMaxProcess: {
type: Number,
default: 10

View File

@@ -4,15 +4,19 @@ import {
ListTable,
Resource,
Tushan,
fetchJSON
fetchJSON,
TushanContextProps,
HTTPClient
} from 'tushan';
import { authProvider } from './auth';
import { userFields, payFields, kbFields, ModelFields, SystemFields } from './fields';
import { Dashboard } from './Dashboard';
import { IconUser, IconApps, IconBook, IconStamp } from 'tushan/icon';
import { i18nZhTranslation } from 'tushan/client/i18n/resources/zh';
const authStorageKey = 'tushan:auth';
const httpClient: typeof fetchJSON = (url, options = {}) => {
const httpClient: HTTPClient = (url, options = {}) => {
try {
if (!options.headers) {
options.headers = new Headers({ Accept: 'application/json' });
@@ -28,11 +32,22 @@ const httpClient: typeof fetchJSON = (url, options = {}) => {
const dataProvider = jsonServerProvider(import.meta.env.VITE_PUBLIC_SERVER_URL, httpClient);
const i18n: TushanContextProps['i18n'] = {
languages: [
{
key: 'zh',
label: '简体中文',
translation: i18nZhTranslation
}
]
};
function App() {
return (
<Tushan
basename="/"
header={'FastGpt-Admin'}
i18n={i18n}
dataProvider={dataProvider}
authProvider={authProvider}
dashboard={<Dashboard />}
@@ -40,6 +55,7 @@ function App() {
<Resource
name="users"
label="用户信息"
icon={<IconUser />}
list={
<ListTable
filter={[
@@ -52,10 +68,29 @@ function App() {
/>
}
/>
<Resource
name="models"
icon={<IconApps />}
label="应用"
list={
<ListTable
filter={[
createTextField('id', {
label: 'id'
}),
createTextField('name', {
label: 'name'
})
]}
fields={ModelFields}
action={{ detail: true, edit: true }}
/>
}
/>
<Resource
name="pays"
label="支付记录"
icon={<IconStamp />}
list={
<ListTable
filter={[
@@ -71,9 +106,13 @@ function App() {
<Resource
name="kbs"
label="知识库"
icon={<IconBook />}
list={
<ListTable
filter={[
createTextField('name', {
label: 'name'
}),
createTextField('tag', {
label: 'tag'
})
@@ -83,11 +122,7 @@ function App() {
/>
}
/>
<Resource
name="models"
label="应用"
list={<ListTable fields={ModelFields} action={{ detail: true }} />}
/>
<Resource
name="system"
label="系统"

View File

@@ -2,22 +2,36 @@ import { Card, Link, Space, Grid, Divider, Typography } from '@arco-design/web-r
import { IconApps, IconUser, IconUserGroup } from 'tushan/icon';
import React, { useState, useEffect } from 'react';
import { useTranslation } from 'react-i18next';
import {
XAxis,
YAxis,
CartesianGrid,
Tooltip,
ResponsiveContainer,
AreaChart,
Area
} from 'tushan/chart';
import dayjs from 'dayjs';
const authStorageKey = 'tushan:auth';
type UsersChartDataType = { count: number; date: string; increase: number; increaseRate: string };
export const Dashboard: React.FC = React.memo(() => {
const [userCount, setUserCount] = useState(0); //用户数量
const [kbCount, setkbCount] = useState(0);
const [modelCount, setmodelCount] = useState(0);
useEffect(() => {
const fetchCounts = async () => {
const baseUrl = import.meta.env.VITE_PUBLIC_SERVER_URL;
const { token } = JSON.parse(window.localStorage.getItem(authStorageKey) ?? '{}');
const [usersData, setUsersData] = useState<UsersChartDataType[]>([]);
const headers = {
'Content-Type': 'application/json',
Authorization: `Bearer ${token}`
};
useEffect(() => {
const baseUrl = import.meta.env.VITE_PUBLIC_SERVER_URL;
const { token } = JSON.parse(window.localStorage.getItem(authStorageKey) ?? '{}');
const headers = {
'Content-Type': 'application/json',
Authorization: `Bearer ${token}`
};
const fetchCounts = async () => {
const userResponse = await fetch(`${baseUrl}/users?_end=1`, {
headers
});
@@ -31,7 +45,6 @@ export const Dashboard: React.FC = React.memo(() => {
const userTotalCount = userResponse.headers.get('X-Total-Count');
const kbTotalCount = kbResponse.headers.get('X-Total-Count');
const modelTotalCount = modelResponse.headers.get('X-Total-Count');
console.log(userTotalCount);
if (userTotalCount) {
setUserCount(Number(userTotalCount));
@@ -43,8 +56,20 @@ export const Dashboard: React.FC = React.memo(() => {
setmodelCount(Number(modelTotalCount));
}
};
const fetchUserData = async () => {
const userResponse: UsersChartDataType[] = await fetch(`${baseUrl}/users/data`, {
headers
}).then((res) => res.json());
setUsersData(
userResponse.map((item) => ({
...item,
date: dayjs(item.date).format('MM/DD')
}))
);
};
fetchCounts();
fetchUserData();
}, []);
return (
@@ -71,11 +96,12 @@ export const Dashboard: React.FC = React.memo(() => {
<Divider type="vertical" style={{ height: 40 }} />
<Grid.Col flex={1} style={{ paddingLeft: '1rem' }}>
<DataItem icon={<IconApps />} title={'AI模型'} count={modelCount} />
<DataItem icon={<IconApps />} title={'应用'} count={modelCount} />
</Grid.Col>
</Grid.Row>
<Divider />
<UserChart data={usersData} />
</Card>
</Space>
</div>
@@ -84,38 +110,31 @@ export const Dashboard: React.FC = React.memo(() => {
});
Dashboard.displayName = 'Dashboard';
const DashboardItem: React.FC<
React.PropsWithChildren<{
title: string;
href?: string;
}>
> = React.memo((props) => {
const { t } = useTranslation();
const DashboardItem = React.memo(
(props: { title: string; href?: string; children: React.ReactNode }) => {
const { t } = useTranslation();
return (
<Card
title={props.title}
extra={
props.href && (
<Link target="_blank" href={props.href}>
{t('tushan.dashboard.more')}
</Link>
)
}
bordered={false}
style={{ overflow: 'hidden' }}
>
{props.children}
</Card>
);
});
return (
<Card
title={props.title}
extra={
props.href && (
<Link target="_blank" href={props.href}>
{t('tushan.dashboard.more')}
</Link>
)
}
bordered={false}
style={{ overflow: 'hidden' }}
>
{props.children}
</Card>
);
}
);
DashboardItem.displayName = 'DashboardItem';
const DataItem: React.FC<{
icon: React.ReactElement;
title: string;
count: number;
}> = React.memo((props) => {
const DataItem = React.memo((props: { icon: React.ReactElement; title: string; count: number }) => {
return (
<Space>
<div
@@ -141,3 +160,65 @@ const DataItem: React.FC<{
);
});
DataItem.displayName = 'DataItem';
const CustomTooltip = ({ active, payload }: any) => {
const data = payload?.[0]?.payload as UsersChartDataType;
if (active && data) {
return (
<div
style={{
background: 'white',
padding: '5px 8px',
borderRadius: '8px',
boxShadow: '2px 2px 5px rgba(0,0,0,0.2)'
}}
>
<p className="label">
count: <strong>{data.count}</strong>
</p>
<p className="label">
increase: <strong>{data.increase}</strong>
</p>
<p className="label">
increaseRate: <strong>{data.increaseRate}</strong>
</p>
</div>
);
}
return null;
};
const UserChart = ({ data }: { data: UsersChartDataType[] }) => {
return (
<ResponsiveContainer width="100%" height={320}>
<AreaChart
width={730}
height={250}
data={data}
margin={{ top: 10, right: 30, left: 0, bottom: 0 }}
>
<defs>
<linearGradient id="colorUv" x1="0" y1="0" x2="0" y2="1">
<stop offset="5%" stopColor="#8884d8" stopOpacity={0.8} />
<stop offset="95%" stopColor="#8884d8" stopOpacity={0} />
</linearGradient>
<linearGradient id="colorPv" x1="0" y1="0" x2="0" y2="1">
<stop offset="5%" stopColor="#82ca9d" stopOpacity={0.8} />
<stop offset="95%" stopColor="#82ca9d" stopOpacity={0} />
</linearGradient>
</defs>
<XAxis dataKey="date" />
<YAxis />
<CartesianGrid strokeDasharray="3 3" />
<Tooltip content={<CustomTooltip />} />
<Area
type="monotone"
dataKey="count"
stroke="#82ca9d"
fillOpacity={1}
fill="url(#colorPv)"
/>
</AreaChart>
</ResponsiveContainer>
);
};

View File

@@ -2,7 +2,7 @@ import { createTextField, createNumberField } from 'tushan';
export const userFields = [
createTextField('id', { label: 'ID' }),
createTextField('username', { label: '用户名' }),
createTextField('username', { label: '用户名', edit: { hidden: true } }),
createNumberField('balance', { label: '余额', list: { sort: true } }),
createTextField('createTime', { label: 'Create Time', list: { sort: true } }),
createTextField('password', { label: '密码', list: { hidden: true } })
@@ -19,30 +19,32 @@ export const payFields = [
export const kbFields = [
createTextField('id', { label: 'ID' }),
createTextField('userId', { label: '所属用户' }),
createTextField('userId', { label: '所属用户', edit: { hidden: true } }),
createTextField('name', { label: '知识库' }),
createTextField('tags', { label: 'Tags' })
];
export const ModelFields = [
createTextField('id', { label: 'ID' }),
createTextField('userId', { label: '所属用户' }),
createTextField('userId', { label: '所属用户', list: { hidden: true }, edit: { hidden: true } }),
createTextField('name', { label: '名字' }),
createTextField('relatedKbs', { label: '引用的知识库' }),
createTextField('searchMode', { label: '搜索模式' }),
createTextField('model', { label: '模型', edit: { hidden: true } }),
createTextField('share.collection', { label: '收藏数', list: { sort: true } }),
createTextField('share.topNum', { label: '置顶等级', list: { sort: true } }),
createTextField('share.isShare', { label: '是否分享(true,false)' }),
createTextField('intro', { label: '介绍', list: { width: 400 } }),
createTextField('relatedKbs', { label: '引用的知识库', list: { hidden: true } }),
createTextField('temperature', { label: '温度' }),
createTextField('systemPrompt', {
label: '提示词',
list: {
width: 400
width: 400,
hidden: true
}
}),
createTextField('temperature', { label: '温度' })
})
];
export const SystemFields = [
createTextField('openAIKeys', { label: 'openAIKeys逗号隔开' }),
createTextField('openAITrainingKeys', { label: 'openAITrainingKeys' }),
createTextField('gpt4Key', { label: 'gpt4Key' }),
createTextField('vectorMaxProcess', { label: '向量最大进程' }),
createTextField('qaMaxProcess', { label: 'qa最大进程' }),
createTextField('pgIvfflatProbe', { label: 'pg 探针数量' }),

View File

@@ -1,5 +1,7 @@
# 运行端口,如果不是 3000 口运行,需要改成其他的。注意:不是改了这个变量就会变成其他端口,而是因为改成其他端口,才用这个变量。
PORT=3000
# database max link
DB_MAX_LINK=15
# 代理
# AXIOS_PROXY_HOST=127.0.0.1
# AXIOS_PROXY_PORT=7890
@@ -15,12 +17,12 @@ aliTemplateCode=xxxx
TOKEN_KEY=dfdasfdas
# root key, 最高权限
ROOT_KEY=fdafasd
# openai
# OPENAI_BASE_URL=http://ai.openai.com/v1
# OPENAI_BASE_URL_AUTH=可选安全凭证,会放到 header.auth 里
OPENAIKEY=sk-xxx
OPENAI_TRAINING_KEY=sk-xxx
GPT4KEY=sk-xxx
# 使用 oneapi
# ONEAPI_URL=https://xxxx.cloud.sealos.io/v1
# ONEAPI_KEY=sk-xxxx
# openai 的基本地址(国外的可以忽略,默认走 api.openai.com。不用 oneapi 的话需要下面 2 个参数,用户的 key 也会走下面的参数
OPENAI_BASE_URL=https://xxxx.cloud.sealos.io/openai/v1
OPENAIKEY=sk-xxxx
# db
MONGODB_URI=mongodb://username:password@0.0.0.0:27017/?authSource=admin
MONGODB_NAME=fastgpt

View File

@@ -13,9 +13,9 @@
"@alicloud/openapi-client": "^0.4.5",
"@alicloud/tea-util": "^1.4.5",
"@chakra-ui/icons": "^2.0.17",
"@chakra-ui/react": "^2.5.1",
"@chakra-ui/system": "^2.5.5",
"@dqbd/tiktoken": "^1.0.6",
"@chakra-ui/react": "^2.7.0",
"@chakra-ui/system": "^2.5.8",
"@dqbd/tiktoken": "^1.0.7",
"@emotion/react": "^11.10.6",
"@emotion/styled": "^11.10.6",
"@next/font": "13.1.6",
@@ -24,30 +24,30 @@
"axios": "^1.3.3",
"cookie": "^0.5.0",
"crypto": "^1.0.1",
"date-fns": "^2.30.0",
"dayjs": "^1.11.7",
"eventsource-parser": "^0.1.0",
"formidable": "^2.1.1",
"framer-motion": "^9.0.6",
"graphemer": "^1.4.0",
"hyperdown": "^2.4.29",
"immer": "^9.0.19",
"jsonwebtoken": "^9.0.0",
"lodash": "^4.17.21",
"mammoth": "^1.5.1",
"mermaid": "^8.13.5",
"mermaid": "^10.2.3",
"mongoose": "^6.10.0",
"nanoid": "^4.0.1",
"next": "13.1.6",
"nextjs-cors": "^2.1.2",
"nodemailer": "^6.9.1",
"nprogress": "^0.2.0",
"openai": "^3.2.1",
"openai": "^3.3.0",
"papaparse": "^5.4.1",
"pg": "^8.10.0",
"react": "18.2.0",
"react-day-picker": "^8.7.1",
"react-dom": "18.2.0",
"react-hook-form": "^7.43.1",
"react-markdown": "^8.0.5",
"react-markdown": "^8.0.7",
"react-syntax-highlighter": "^15.5.0",
"rehype-katex": "^6.0.2",
"remark-breaks": "^3.0.3",

1463
client/pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,26 +1,18 @@
### 常见问题
**Git 地址**
[项目地址,完全开源,随便用。](https://github.com/c121914yu/FastGPT)
**问题文档**
[先看文档,再提问](https://kjqvjse66l.feishu.cn/docx/HtrgdT0pkonP4kxGx8qcu6XDnGh)
**删除和复制**
电脑端:聊天内容右侧有复制和删除的图标。
移动端:点击对话头像,可以选择复制或删除该条内容。
**Git 地址**: [项目地址,完全开源,随便用。](https://github.com/c121914yu/FastGPT)
**问题文档**: [先看文档,再提问](https://kjqvjse66l.feishu.cn/docx/HtrgdT0pkonP4kxGx8qcu6XDnGh)
**价格表**
如果使用了自己的 Api Key网页上 openai 模型聊天不会计费。可以在账号页,看到详细账单。
| 计费项 | 价格: 元/ 1K tokens包含上下文|
| --- | --- |
| 知识库 - 索引 | 0.001 |
| chatgpt - 对话 | 0.025 |
| gpt4 - 对话 | 0.5 |
| 文件拆分 | 0.025 |
| chatgpt - 对话 | 0.015 |
| chatgpt16K - 对话 | 0.015 |
| gpt4 - 对话 | 0.1 |
| 文件拆分 | 0.015 |
**其他问题**
请 WX 联系: YNyiqi
| 交流群 | 小助手 |
| ----------------------- | -------------------- |
| ![](https://otnvvf-imgs.oss.laf.run/wxqun300.jpg) | ![](https://otnvvf-imgs.oss.laf.run/wx300.jpg) |

View File

@@ -19,9 +19,10 @@ FastGpt 项目完全开源,可随意私有化部署,去除平台风险忧虑
| 计费项 | 价格: 元/ 1K tokens包含上下文|
| --- | --- |
| 知识库 - 索引 | 0.001 |
| chatgpt - 对话 | 0.025 |
| gpt4 - 对话 | 0.5 |
| 文件拆分 | 0.025 |
| chatgpt - 对话 | 0.015 |
| chatgpt16K - 对话 | 0.015 |
| gpt4 - 对话 | 0.1 |
| 文件拆分 | 0.015 |
### 交流群/问题反馈

View File

@@ -1,5 +1,7 @@
### Fast GPT V3.8.4
### Fast GPT V3.9
1. 新增 - mermaid 导图兼容,可以在应用市场 'mermaid 导图' 进行体验
2. 优化 - 部分 UI 和账号页
2. 优化 - 知识库搜索速度
1. 限时优惠活动,更低价的 tokens
2. 新增 - 直接分段训练,可调节段落大小
3. 优化 - tokens 计算性能。
4. 优化 - key 池管理,结合 one-api 项目,实现更方便的 key 池管理,具体参考[docker 部署 FastGpt](https://github.com/c121914yu/FastGPT/blob/main/docs/deploy/docker.md)
5. 新增 - V2 版 OpenAPI可以在任意第三方套壳 ChatGpt 项目中直接使用 FastGpt 的应用,注意!是直接,不需要改任何代码。具体参考[API 文档中《在第三方应用中使用 FastGpt》](https://kjqvjse66l.feishu.cn/docx/DmLedTWtUoNGX8xui9ocdUEjnNh)

Binary file not shown.

Before

Width:  |  Height:  |  Size: 52 KiB

After

Width:  |  Height:  |  Size: 10 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 38 KiB

After

Width:  |  Height:  |  Size: 8.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 38 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 28 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 38 KiB

After

Width:  |  Height:  |  Size: 8.3 KiB

View File

@@ -0,0 +1,8 @@
var _hmt = _hmt || [];
(function () {
const hm = document.createElement('script');
hm.src = 'https://hm.baidu.com/hm.js?a5357e9dab086658bac0b6faf148882e';
const s = document.getElementsByTagName('script')[0];
s.parentNode.insertBefore(hm, s);
})();

View File

@@ -38,6 +38,7 @@ export const updateHistoryQuote = (params: {
chatId: string;
historyId: string;
quoteId: string;
sourceText: string;
}) => GET(`/chat/history/updateHistoryQuote`, params);
/**

View File

@@ -1,67 +1,103 @@
import { GUIDE_PROMPT_HEADER, NEW_CHATID_HEADER, QUOTE_LEN_HEADER } from '@/constants/chat';
import { Props, ChatResponseType } from '@/pages/api/openapi/v1/chat/completions';
import { sseResponseEventEnum } from '@/constants/chat';
import { getErrText } from '@/utils/tools';
import { parseStreamChunk } from '@/utils/adapt';
interface StreamFetchProps {
url: string;
data: any;
data: Props;
onMessage: (text: string) => void;
abortSignal: AbortController;
}
export const streamFetch = ({ url, data, onMessage, abortSignal }: StreamFetchProps) =>
new Promise<{
responseText: string;
newChatId: string;
systemPrompt: string;
quoteLen: number;
}>(async (resolve, reject) => {
try {
const res = await fetch(url, {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify(data),
signal: abortSignal.signal
});
const reader = res.body?.getReader();
if (!reader) return;
export const streamFetch = ({ data, onMessage, abortSignal }: StreamFetchProps) =>
new Promise<ChatResponseType & { responseText: string; errMsg: string }>(
async (resolve, reject) => {
try {
const response = await window.fetch('/api/openapi/v1/chat/completions', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
signal: abortSignal.signal,
body: JSON.stringify({
...data,
stream: true
})
});
const decoder = new TextDecoder();
const newChatId = decodeURIComponent(res.headers.get(NEW_CHATID_HEADER) || '');
const systemPrompt = decodeURIComponent(res.headers.get(GUIDE_PROMPT_HEADER) || '').trim();
const quoteLen = res.headers.get(QUOTE_LEN_HEADER)
? Number(res.headers.get(QUOTE_LEN_HEADER))
: 0;
let responseText = '';
const read = async () => {
try {
const { done, value } = await reader?.read();
if (done) {
if (res.status === 200) {
resolve({ responseText, newChatId, quoteLen, systemPrompt });
} else {
const parseError = JSON.parse(responseText);
reject(parseError?.message || '请求异常');
}
return;
}
const text = decoder.decode(value);
responseText += text;
onMessage(text);
read();
} catch (err: any) {
if (err?.message === 'The user aborted a request.') {
return resolve({ responseText, newChatId, quoteLen, systemPrompt });
}
reject(typeof err === 'string' ? err : err?.message || '请求异常');
if (response.status !== 200) {
const err = await response.json();
return reject(err);
}
};
read();
} catch (err: any) {
console.log(err, '====');
reject(typeof err === 'string' ? err : err?.message || '请求异常');
if (!response?.body) {
throw new Error('Request Error');
}
const reader = response.body?.getReader();
// response data
let responseText = '';
let newChatId = '';
let quoteLen = 0;
let errMsg = '';
const read = async () => {
try {
const { done, value } = await reader.read();
if (done) {
if (response.status === 200) {
return resolve({
responseText,
newChatId,
quoteLen,
errMsg
});
} else {
return reject('响应过程出现异常~');
}
}
const chunkResponse = parseStreamChunk(value);
chunkResponse.forEach((item) => {
// parse json data
const data = (() => {
try {
return JSON.parse(item.data);
} catch (error) {
return item.data;
}
})();
if (item.event === sseResponseEventEnum.answer && data !== '[DONE]') {
const answer: string = data?.choices?.[0].delta.content || '';
onMessage(answer);
responseText += answer;
} else if (item.event === sseResponseEventEnum.chatResponse) {
const chatResponse = data as ChatResponseType;
newChatId = chatResponse.newChatId;
quoteLen = chatResponse.quoteLen || 0;
} else if (item.event === sseResponseEventEnum.error) {
errMsg = getErrText(data, '流响应错误');
}
});
read();
} catch (err: any) {
if (err?.message === 'The user aborted a request.') {
return resolve({
responseText,
newChatId,
quoteLen,
errMsg
});
}
reject(getErrText(err, '请求异常'));
}
};
read();
} catch (err: any) {
console.log(err);
reject(getErrText(err, '请求异常'));
}
}
});
);

View File

@@ -2,10 +2,15 @@ import { GET, POST, PUT, DELETE } from '../request';
import type { KbItemType } from '@/types/plugin';
import { RequestPaging } from '@/types/index';
import { TrainingModeEnum } from '@/constants/plugin';
import { type QuoteItemType } from '@/pages/api/openapi/kb/appKbSearch';
import {
Props as PushDataProps,
Response as PushDateResponse
} from '@/pages/api/openapi/kb/pushData';
import {
Props as SearchTestProps,
Response as SearchTestResponse
} from '@/pages/api/openapi/kb/searchTest';
export type KbUpdateParams = {
id: string;
@@ -37,7 +42,7 @@ export const getKbDataList = (data: GetKbDataListProps) =>
* 获取导出数据(不分页)
*/
export const getExportDataList = (kbId: string) =>
GET<[string, string][]>(
GET<[string, string, string][]>(
`/plugins/kb/data/exportModelData`,
{ kbId },
{
@@ -55,7 +60,7 @@ export const getTrainingData = (data: { kbId: string; init: boolean }) =>
}>(`/plugins/kb/data/getTrainingData`, data);
export const getKbDataItemById = (dataId: string) =>
GET(`/plugins/kb/data/getDataById`, { dataId });
GET<QuoteItemType>(`/plugins/kb/data/getDataById`, { dataId });
/**
* 直接push数据
@@ -83,3 +88,6 @@ export const postSplitData = (data: {
prompt: string;
mode: `${TrainingModeEnum}`;
}) => POST(`/openapi/text/pushData`, data);
export const searchText = (data: SearchTestProps) =>
POST<SearchTestResponse>(`/openapi/kb/searchTest`, data);

View File

@@ -4,6 +4,8 @@ import type { ChatItemType } from '@/types/chat';
export interface InitChatResponse {
chatId: string;
modelId: string;
systemPrompt?: string;
limitPrompt?: string;
model: {
name: string;
avatar: string;

View File

@@ -5,3 +5,5 @@ import type { InitDateResponse } from '@/pages/api/system/getInitData';
export const getInitData = () => GET<InitDateResponse>('/system/getInitData');
export const getSystemModelList = () => GET<ChatModelItemType[]>('/system/getModels');
export const uploadImg = (base64Img: string) => POST<string>('/system/uploadImage', { base64Img });

View File

@@ -66,7 +66,7 @@ export const loginOut = () => GET('/user/loginout');
export const putUserInfo = (data: UserUpdateParams) => PUT('/user/update', data);
export const getUserBills = (data: RequestPaging) =>
GET<PagingData<UserBillType>>(`/user/getBill?${Obj2Query(data)}`);
POST<PagingData<UserBillType>>(`/user/getBill`, data);
export const getPayOrders = () => GET<PaySchema[]>(`/user/getPayOrders`);

View File

@@ -0,0 +1,160 @@
import React, { useState } from 'react';
import {
Box,
Button,
Modal,
ModalOverlay,
ModalContent,
Flex,
ModalFooter,
ModalBody,
ModalCloseButton,
Table,
Thead,
Tbody,
Tr,
Th,
Td,
TableContainer,
IconButton
} from '@chakra-ui/react';
import { getOpenApiKeys, createAOpenApiKey, delOpenApiById } from '@/api/openapi';
import { useQuery, useMutation } from '@tanstack/react-query';
import { useLoading } from '@/hooks/useLoading';
import dayjs from 'dayjs';
import { AddIcon, DeleteIcon } from '@chakra-ui/icons';
import { getErrText, useCopyData } from '@/utils/tools';
import { useToast } from '@/hooks/useToast';
import MyIcon from '../Icon';
const APIKeyModal = ({ onClose }: { onClose: () => void }) => {
const { Loading } = useLoading();
const { toast } = useToast();
const {
data: apiKeys = [],
isLoading: isGetting,
refetch
} = useQuery(['getOpenApiKeys'], getOpenApiKeys);
const [apiKey, setApiKey] = useState('');
const { copyData } = useCopyData();
const { mutate: onclickCreateApiKey, isLoading: isCreating } = useMutation({
mutationFn: () => createAOpenApiKey(),
onSuccess(res) {
setApiKey(res);
refetch();
},
onError(err) {
toast({
status: 'warning',
title: getErrText(err)
});
}
});
const { mutate: onclickRemove, isLoading: isDeleting } = useMutation({
mutationFn: async (id: string) => delOpenApiById(id),
onSuccess() {
refetch();
}
});
return (
<Modal isOpen onClose={onClose}>
<ModalOverlay />
<ModalContent w={'600px'} maxW={'90vw'} position={'relative'}>
<Box py={3} px={5}>
<Box fontWeight={'bold'} fontSize={'2xl'}>
API
</Box>
<Box fontSize={'sm'} color={'myGray.600'}>
API 使~
</Box>
</Box>
<ModalCloseButton />
<ModalBody minH={'300px'} maxH={['70vh', '500px']} overflow={'overlay'}>
<TableContainer mt={2} position={'relative'}>
<Table>
<Thead>
<Tr>
<Th>Api Key</Th>
<Th></Th>
<Th>使</Th>
<Th />
</Tr>
</Thead>
<Tbody fontSize={'sm'}>
{apiKeys.map(({ id, apiKey, createTime, lastUsedTime }) => (
<Tr key={id}>
<Td>{apiKey}</Td>
<Td>{dayjs(createTime).format('YYYY/MM/DD HH:mm:ss')}</Td>
<Td>
{lastUsedTime
? dayjs(lastUsedTime).format('YYYY/MM/DD HH:mm:ss')
: '没有使用过'}
</Td>
<Td>
<IconButton
icon={<DeleteIcon />}
size={'xs'}
aria-label={'delete'}
variant={'base'}
colorScheme={'gray'}
onClick={() => onclickRemove(id)}
/>
</Td>
</Tr>
))}
</Tbody>
</Table>
</TableContainer>
</ModalBody>
<ModalFooter>
<Button
variant="base"
leftIcon={<AddIcon color={'myGray.600'} fontSize={'sm'} />}
onClick={() => onclickCreateApiKey()}
>
</Button>
</ModalFooter>
<Loading loading={isGetting || isCreating || isDeleting} fixed={false} />
</ModalContent>
<Modal isOpen={!!apiKey} onClose={() => setApiKey('')}>
<ModalOverlay />
<ModalContent w={'400px'} maxW={'90vw'}>
<Box py={3} px={5}>
<Box fontWeight={'bold'} fontSize={'2xl'}>
API
</Box>
<Box fontSize={'sm'} color={'myGray.600'}>
~
</Box>
</Box>
<ModalCloseButton />
<ModalBody>
<Flex
bg={'myGray.100'}
px={3}
py={2}
cursor={'pointer'}
onClick={() => copyData(apiKey)}
>
<Box flex={1}>{apiKey}</Box>
<MyIcon name={'copy'} w={'16px'}></MyIcon>
</Flex>
</ModalBody>
<ModalFooter>
<Button variant="base" onClick={() => setApiKey('')}>
</Button>
</ModalFooter>
</ModalContent>
</Modal>
</Modal>
);
};
export default APIKeyModal;

View File

@@ -0,0 +1,4 @@
.datePicker {
--rdp-background-color: #d6e8ff;
--rdp-accent-color: #0000ff;
}

View File

@@ -0,0 +1,121 @@
import React, { useState, useMemo, useRef } from 'react';
import { Box, Card, Flex, useTheme, useOutsideClick, Button } from '@chakra-ui/react';
import { addDays, format } from 'date-fns';
import { type DateRange, DayPicker } from 'react-day-picker';
import MyIcon from '../Icon';
import 'react-day-picker/dist/style.css';
import styles from './index.module.scss';
import zhCN from 'date-fns/locale/zh-CN';
const DateRangePicker = ({
onChange,
onSuccess,
position = 'bottom',
defaultDate = {
from: addDays(new Date(), -30),
to: new Date()
}
}: {
onChange?: (date: DateRange) => void;
onSuccess?: (date: DateRange) => void;
position?: 'bottom' | 'top';
defaultDate?: DateRange;
}) => {
const theme = useTheme();
const OutRangeRef = useRef(null);
const [range, setRange] = useState<DateRange | undefined>(defaultDate);
const [showSelected, setShowSelected] = useState(false);
const formatSelected = useMemo(() => {
if (range?.from && range.to) {
return `${format(range.from, 'y-MM-dd')} ~ ${format(range.to, 'y-MM-dd')}`;
}
return `${format(new Date(), 'y-MM-dd')} ~ ${format(new Date(), 'y-MM-dd')}`;
}, [range]);
useOutsideClick({
ref: OutRangeRef,
handler: () => {
setShowSelected(false);
}
});
return (
<Box position={'relative'} ref={OutRangeRef}>
<Flex
border={theme.borders.base}
px={3}
py={1}
borderRadius={'sm'}
cursor={'pointer'}
bg={'myWhite.600'}
fontSize={'sm'}
onClick={() => setShowSelected(true)}
>
<Box>{formatSelected}</Box>
<MyIcon ml={2} name={'date'} w={'16px'} color={'myGray.600'} />
</Flex>
{showSelected && (
<Card
position={'absolute'}
zIndex={1}
{...(position === 'top'
? {
bottom: '40px'
}
: {})}
>
<DayPicker
locale={zhCN}
id="test"
mode="range"
className={styles.datePicker}
defaultMonth={defaultDate.to}
selected={range}
disabled={[
{ from: new Date(2022, 3, 1), to: addDays(new Date(), -90) },
{ from: addDays(new Date(), 1), to: new Date(2099, 1, 1) }
]}
onSelect={(date) => {
if (date?.from === undefined) {
date = {
from: range?.from,
to: range?.from
};
}
if (date?.to === undefined) {
date.to = date.from;
}
setRange(date);
onChange && onChange(date);
}}
footer={
<Flex justifyContent={'flex-end'}>
<Button
variant={'outline'}
size={'sm'}
mr={2}
onClick={() => setShowSelected(false)}
>
</Button>
<Button
size={'sm'}
onClick={() => {
onSuccess && onSuccess(range || defaultDate);
setShowSelected(false);
}}
>
</Button>
</Flex>
}
/>
</Card>
)}
</Box>
);
};
export default DateRangePicker;
export type DateRangeType = DateRange;

View File

@@ -0,0 +1 @@
<?xml version="1.0" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1686969412308" class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="3481" xmlns:xlink="http://www.w3.org/1999/xlink" width="64" height="64"><path d="M517.864056 487.834624c-56.774051-54.213739-58.850339-144.187937-4.6366-200.960964 54.212716-56.773028 144.187937-58.849316 200.960964-4.6366 56.775074 54.213739 58.850339 144.186913 4.6366 200.960964C664.613328 539.972075 574.639131 542.048363 517.864056 487.834624zM687.194626 452.994118c37.533848-39.308261 36.09508-101.596909-3.210112-139.128711-39.304168-37.531801-101.593839-36.094056-139.127687 3.211135-37.532825 39.307238-36.093033 101.593839 3.212158 139.125641C587.374176 493.736031 649.660778 492.302379 687.194626 452.994118zM479.104287 670.917406l-101.495602 106.289792c26.206872 25.024953 27.167756 66.540486 2.14178 92.749404-25.028023 26.209942-66.543555 27.16571-92.750427 2.140757l-58.361199 53.027727c0 0-68.750827 11.100826-100.379175-19.101033-31.630395-30.205952-37.865399-112.721271-37.865399-112.721271l246.37427-258.302951c-63.173808-117.608581-47.24707-267.162736 49.939389-368.939747 36.517705-38.242999 80.346933-65.156976 127.165238-81.040734l1.084705 46.269813c-35.443233 14.07967-68.566632 35.596729-96.618525 64.973804-80.271208 84.064604-96.099708 205.865671-49.433876 305.083393l23.075555 39.163975L146.090774 798.015106c0 0 0.593518 49.77873 17.242709 65.677838 14.888082 14.216793 61.832254 9.828856 61.832254 9.828856l60.407812-63.260789 31.631418 30.203906c8.741082 8.346085 22.570042 8.030907 30.91715-0.711198 8.347109-8.742105 8.026814-22.571065-0.713244-30.91715l-31.632441-30.207999 156.456355-163.846672 39.009456 22.481014c101.259218 42.039465 222.201731 20.61041 302.474986-63.453171 104.251366-109.178585 100.260471-282.211477-8.91709-386.464889-33.591049-32.075533-73.260537-53.829999-115.093295-65.49262l-1.030469-45.153386c53.197596 12.471033 103.945397 38.547944 146.323577 79.015611 126.645398 120.931257 131.277906 321.649698 10.344602 448.296119C748.158093 705.787588 599.500355 728.598106 479.104287 670.917406z" p-id="3482"></path></svg>

After

Width:  |  Height:  |  Size: 2.2 KiB

View File

@@ -0,0 +1 @@
<?xml version="1.0" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1686832863390" class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="4120" xmlns:xlink="http://www.w3.org/1999/xlink" width="64" height="64"><path d="M782.84 188.75h-43.15v-60.46c0-16.57-13.43-30-30-30s-30 13.43-30 30v60.46H371.88v-60.46c0-16.57-13.43-30-30-30s-30 13.43-30 30v60.46H250.5c-66.17 0-120 53.83-120 120v494.47c0 66.17 53.83 120 120 120h532.33c66.17 0 120-53.83 120-120V308.75c0.01-66.17-53.82-120-119.99-120z m-532.34 60h61.37v133.63c0 16.57 13.43 30 30 30s30-13.43 30-30V248.75h307.81v133.63c0 16.57 13.43 30 30 30s30-13.43 30-30V248.75h43.15c33.08 0 60 26.92 60 60V649.5H190.5V308.75c0-33.08 26.92-60 60-60z m532.34 614.47H250.5c-33.08 0-60-26.92-60-60V709.5h652.33v93.72c0.01 33.08-26.91 60-59.99 60z" p-id="4121"></path></svg>

After

Width:  |  Height:  |  Size: 924 B

View File

@@ -1 +0,0 @@
<?xml version="1.0" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1683254594671" class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="1491" xmlns:xlink="http://www.w3.org/1999/xlink" width="64" height="64"><path d="M46.95735957 106.20989621h930.08528086v158.0067668H46.95735957zM46.95735957 353.99323467v608.68515424h930.08528086V353.99323467H46.95735957z m346.5375657 418.35882335L328.85579413 835.19565715l-165.18889183-172.37101684 165.18889183-172.37101686 64.63913114 62.84359914-105.93635373 109.52741772 105.93635373 109.52741771z m127.48273175 62.84359913l-86.18550917-23.34190854 87.98104116-330.37778366 86.1855077 23.34191003L520.97765702 835.19565715z m193.91739489 0l-64.63913114-62.84359913 105.93635372-109.52741771-105.93635372-109.52741772 64.63913114-62.84359914 165.18889182 172.37101686-165.18889182 172.37101684z" p-id="1492"></path></svg>

Before

Width:  |  Height:  |  Size: 976 B

View File

@@ -0,0 +1 @@
<?xml version="1.0" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1686557412109" class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="2150" xmlns:xlink="http://www.w3.org/1999/xlink" ><path d="M511.998 64C264.574 64 64 264.574 64 511.998S264.574 960 511.998 960 960 759.422 960 511.998 759.422 64 511.998 64z m353.851 597.438c-82.215 194.648-306.657 285.794-501.306 203.579S78.749 558.36 160.964 363.711 467.621 77.917 662.27 160.132c168.009 70.963 262.57 250.652 225.926 429.313a383.995 383.995 0 0 1-22.347 71.993z" p-id="2151"></path><path d="M543.311 498.639V256.121c0-17.657-14.314-31.97-31.97-31.97s-31.97 14.314-31.97 31.97v269.005l201.481 201.481c12.485 12.485 32.728 12.485 45.213 0s12.485-32.728 0-45.213L543.311 498.639z" p-id="2152"></path></svg>

After

Width:  |  Height:  |  Size: 875 B

View File

@@ -0,0 +1 @@
<?xml version="1.0" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1686561811905" class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="2855" xmlns:xlink="http://www.w3.org/1999/xlink" ><path d="M992 528c0 273.9-222.1 496-496 496S0 801.9 0 528 222.1 32 496 32c86.2 0 167.3 22 238 60.7 2.3 1.3 2.8 4.4 0.9 6.3l-37 37.3-4.2 4.3c-1.2 1.2-3.1 1.5-4.6 0.8-8.2-4.1-16.5-7.9-24.9-11.5C610.9 107.4 554.3 96 496 96s-114.9 11.4-168.1 33.9c-51.4 21.8-97.7 52.9-137.3 92.6-39.7 39.7-70.9 85.9-92.6 137.3C75.4 413.1 64 469.6 64 528c0 58.3 11.4 114.9 33.9 168.1 21.8 51.4 52.9 97.6 92.6 137.3 39.7 39.7 85.9 70.9 137.3 92.6 53.3 22.6 109.9 34 168.2 34s114.9-11.4 168.1-33.9c51.4-21.8 97.7-52.9 137.3-92.6 39.7-39.7 70.9-85.9 92.6-137.3 22.6-53.3 34-109.9 34-168.2 0-58.4-11.4-114.9-33.9-168.1-3.6-8.5-7.4-16.8-11.5-25-0.8-1.5-0.5-3.4 0.8-4.6l4.3-4.2 37.3-37c1.9-1.9 5-1.4 6.3 0.9C970 360.6 992 441.7 992 528z" p-id="2856"></path><path d="M781.4 397c-3.7-8-11.7-13.1-20.6-13.1H740c-6 0-11.8 2.4-16 6.6-7 7-8.6 17.6-4.1 26.4 2.6 5.1 5 10.3 7.3 15.7 13.2 31.2 19.9 64.3 19.9 98.5s-6.7 67.3-19.9 98.5c-12.7 30.1-31 57.2-54.2 80.4-23.3 23.3-50.3 41.5-80.4 54.2-31.3 13.1-64.4 19.8-98.6 19.8s-67.3-6.7-98.5-19.9c-30.1-12.7-57.2-31-80.4-54.2-23.3-23.3-41.5-50.3-54.2-80.4-13.2-31.2-19.9-64.3-19.9-98.5s6.7-67.3 19.9-98.5c12.7-30.1 31-57.2 54.2-80.4 23.3-23.3 50.3-41.5 80.4-54.2 31.2-13.2 64.3-19.9 98.5-19.9s67.3 6.7 98.5 19.9c4.9 2.1 9.8 4.3 14.6 6.7 8.8 4.4 19.4 2.6 26.3-4.4 4.3-4.3 6.7-10.1 6.7-16.2v-20.2c0-9-5.2-17.1-13.4-20.8-40.4-18.6-85.3-29-132.6-29-175.5 0-318 143.4-317 318.9C178 707.1 319.6 848 494 848c174.8 0 316.6-141.3 317-316.2 0.1-48.2-10.5-93.9-29.6-134.8z" p-id="2857"></path><path d="M634.5 488.5c-0.8-2.9-4.5-3.9-6.7-1.7l-34.7 34.7-1.8 1.8c-9 9-15.7 20.1-20.1 32.1-11.5 31.6-42.4 54-78.3 52.7-41.6-1.6-75.3-35.3-76.9-76.9-1.4-35.9 21-66.8 52.7-78.3 12-4.4 23-11.1 32.1-20.1l1.8-1.8 34.7-34.7c2.2-2.2 1.2-5.8-1.7-6.7-12.9-3.7-26.5-5.6-40.6-5.5-79.4 0.5-143 64.5-143 143.9 0 79.5 64.5 144 144 144 79.4 0 143.4-63.6 144-142.9 0.1-14.1-1.8-27.8-5.5-40.6z" p-id="2858"></path><path d="M1014.3 146H882c-2.2 0-4-1.8-4-4V9.8c0-2.4-2-4-4-4-1 0-2 0.4-2.8 1.2L766.8 112.4l-46.1 46.5-44 44.4c-3 3-4.6 7-4.6 11.3v85.5c0 4.3-1.7 8.3-4.7 11.3l-94.7 94.7-47.4 47.4-51.8 51.9c-12.5 12.5-12.5 32.8 0 45.3 6.3 6.3 14.4 9.4 22.6 9.4s16.4-3.1 22.6-9.4l51.8-51.9 123.2-123.2 19-19c3-3 7.1-4.7 11.3-4.7h85.5c4.2 0 8.3-1.7 11.3-4.6l44.3-43.9 46.5-46.1L1017 152.9c2.6-2.6 0.8-6.9-2.7-6.9zM864 214.3l-44 43.5-25.6 25.4c-3 3-7 4.6-11.3 4.6H744c-4.4 0-8-3.6-8-8v-39c0-4.2 1.7-8.3 4.6-11.3l25.5-25.7 43.5-43.9 1.6-1.6c1.6-1.7 4.5-0.7 4.8 1.6 4.8 25.8 23.5 41.6 48.6 47.7 2.1 0.5 2.9 3.2 1.3 4.7l-1.9 2z" p-id="2859"></path></svg>

After

Width:  |  Height:  |  Size: 2.8 KiB

View File

@@ -0,0 +1 @@
<?xml version="1.0" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1686557165145" class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="2404" xmlns:xlink="http://www.w3.org/1999/xlink" ><path d="M815.104 69.632q27.648 25.6 44.032 42.496t25.088 28.672 10.752 19.968 2.048 14.336l0 16.384-151.552 0q-10.24 0-17.92-7.68t-12.8-17.92-7.68-20.992-2.56-16.896l0-126.976 3.072 0q8.192 0 16.896 2.56t19.968 9.728 28.16 20.48 42.496 35.84zM640 129.024q0 20.48 6.144 42.496t19.456 40.96 33.792 31.232 48.128 12.288l149.504 0 0 577.536q0 29.696-11.776 53.248t-31.232 39.936-43.008 25.6-46.08 9.216l-503.808 0q-19.456 0-42.496-11.264t-43.008-29.696-33.28-41.984-13.312-49.152l0-696.32q0-21.504 9.728-44.544t26.624-42.496 38.4-32.256 45.056-12.8l391.168 0 0 128zM704.512 768q26.624 0 45.056-18.944t18.432-45.568-18.432-45.056-45.056-18.432l-384 0q-26.624 0-45.056 18.432t-18.432 45.056 18.432 45.568 45.056 18.944l384 0zM768 448.512q0-26.624-18.432-45.568t-45.056-18.944l-384 0q-26.624 0-45.056 18.944t-18.432 45.568 18.432 45.056 45.056 18.432l384 0q26.624 0 45.056-18.432t18.432-45.056z" p-id="2405" ></path></svg>

After

Width:  |  Height:  |  Size: 1.2 KiB

View File

@@ -6,7 +6,6 @@ const map = {
model: require('./icons/model.svg').default,
copy: require('./icons/copy.svg').default,
chatSend: require('./icons/chatSend.svg').default,
develop: require('./icons/develop.svg').default,
user: require('./icons/user.svg').default,
delete: require('./icons/delete.svg').default,
withdraw: require('./icons/withdraw.svg').default,
@@ -30,7 +29,12 @@ const map = {
menu: require('./icons/menu.svg').default,
edit: require('./icons/edit.svg').default,
inform: require('./icons/inform.svg').default,
export: require('./icons/export.svg').default
export: require('./icons/export.svg').default,
text: require('./icons/text.svg').default,
history: require('./icons/history.svg').default,
kbTest: require('./icons/kbTest.svg').default,
date: require('./icons/date.svg').default,
apikey: require('./icons/apikey.svg').default
};
export type IconName = keyof typeof map;

View File

@@ -1,23 +0,0 @@
type TIconfont = {
name: string;
color?: string;
width?: number | string;
height?: number | string;
className?: string;
};
function Iconfont({ name, color = 'inherit', width = 16, height = 16, className = '' }: TIconfont) {
const style = {
fill: color,
width,
height
};
return (
<svg className={`icon ${className}`} aria-hidden="true" style={style}>
<use xlinkHref={`#${name}`}></use>
</svg>
);
}
export default Iconfont;

View File

@@ -44,12 +44,6 @@ const Navbar = ({ unread }: { unread: number }) => {
link: '/model/share',
activeLink: ['/model/share']
},
{
label: '开发',
icon: 'develop',
link: '/openapi',
activeLink: ['/openapi']
},
{
label: '账号',
icon: 'user',

View File

@@ -14,7 +14,7 @@ const Loading = ({ fixed = true }: { fixed?: boolean }) => {
alignItems={'center'}
justifyContent={'center'}
>
<Spinner thickness="4px" speed="0.65s" emptyColor="gray.200" color="myBlue.500" size="xl" />
<Spinner thickness="4px" speed="0.65s" emptyColor="myGray.100" color="myBlue.600" size="xl" />
</Flex>
);
};

View File

@@ -287,8 +287,7 @@ const CodeLight = ({
children,
className,
inline,
match,
...props
match
}: {
children: React.ReactNode & React.ReactNode[];
className?: string;
@@ -315,18 +314,14 @@ const CodeLight = ({
<Box ml={1}></Box>
</Flex>
</Flex>
<SyntaxHighlighter style={codeLight as any} language={match?.[1]} PreTag="pre" {...props}>
<SyntaxHighlighter style={codeLight as any} language={match?.[1]} PreTag="pre">
{String(children)}
</SyntaxHighlighter>
</Box>
);
}
return (
<code className={className} {...props}>
{children}
</code>
);
return <code className={className}>{children}</code>;
};
export default React.memo(CodeLight);

View File

@@ -0,0 +1,18 @@
import React from 'react';
import { Box } from '@chakra-ui/react';
const regex = /((http|https|ftp):\/\/[^\s\u4e00-\u9fa5\u3000-\u303f\uff00-\uffef]+)/gi;
const Link = (props: { href?: string; children?: React.ReactNode[] }) => {
const decText = decodeURIComponent(props.href || '');
const replaceText = decText.replace(regex, (match, p1) => {
const text = decText === props.children?.[0] ? p1 : props.children?.[0];
const isInternal = /^\/#/i.test(p1);
const target = isInternal ? '_self' : '_blank';
return `<a href="${p1}" target=${target}>${text}</a>`;
});
return <Box as={'span'} dangerouslySetInnerHTML={{ __html: replaceText }} />;
};
export default React.memo(Link);

View File

@@ -1,17 +0,0 @@
import React, { memo } from 'react';
import { Box } from '@chakra-ui/react';
const Loading = () => {
return (
<Box
w={'100%'}
h={'80px'}
backgroundImage={'url("/imgs/loading.gif")'}
backgroundSize={'contain'}
backgroundRepeat={'no-repeat'}
backgroundPosition={'center'}
/>
);
};
export default memo(Loading);

View File

@@ -0,0 +1,39 @@
import React, { useState } from 'react';
import { Image, Skeleton } from '@chakra-ui/react';
const MdImage = ({ src }: { src?: string }) => {
const [isLoading, setIsLoading] = useState(true);
const [succeed, setSucceed] = useState(false);
return (
<Skeleton
minH="100px"
isLoaded={!isLoading}
fadeDuration={2}
display={'flex'}
justifyContent={'center'}
my={1}
>
<Image
display={'inline-block'}
borderRadius={'md'}
src={src}
alt={''}
fallbackSrc={'/imgs/errImg.png'}
fallbackStrategy={'onError'}
cursor={succeed ? 'pointer' : 'default'}
loading="eager"
onLoad={() => {
setIsLoading(false);
setSucceed(true);
}}
onError={() => setIsLoading(false)}
onClick={() => {
if (!succeed) return;
window.open(src, '_blank');
}}
/>
</Skeleton>
);
};
export default React.memo(MdImage);

View File

@@ -0,0 +1,25 @@
import React, { memo } from 'react';
import { Box } from '@chakra-ui/react';
const Loading = ({ text }: { text?: string }) => {
return (
<Box>
<Box
minW={'100px'}
w={'100%'}
h={'80px'}
backgroundImage={'url("/imgs/loading.gif")'}
backgroundSize={'contain'}
backgroundRepeat={'no-repeat'}
backgroundPosition={'center'}
/>
{text && (
<Box mt={1} textAlign={'center'} fontSize={'sm'} color={'myGray.600'}>
{text}
</Box>
)}
</Box>
);
};
export default memo(Loading);

View File

@@ -1,15 +1,16 @@
import React, { useEffect, useRef, memo, useCallback, useState } from 'react';
import React, { useEffect, useRef, memo, useCallback, useState, useMemo } from 'react';
import { Box } from '@chakra-ui/react';
// @ts-ignore
import mermaid from 'mermaid';
import MyIcon from '../Icon';
import styles from './index.module.scss';
import MyIcon from '../../Icon';
const mermaidAPI = mermaid.mermaidAPI;
mermaidAPI.initialize({
startOnLoad: false,
startOnLoad: true,
theme: 'base',
flowchart: {
useMaxWidth: false
},
themeVariables: {
fontSize: '14px',
primaryColor: '#d6e8ff',
@@ -21,28 +22,53 @@ mermaidAPI.initialize({
}
});
const punctuationMap: Record<string, string> = {
'': ',',
'': ';',
'。': '.',
'': ':',
'': '!',
'': '?',
'“': '"',
'”': '"',
'': "'",
'': "'",
'【': '[',
'】': ']',
'': '(',
'': ')',
'《': '<',
'》': '>',
'、': ','
};
const MermaidBlock = ({ code }: { code: string }) => {
const dom = useRef<HTMLDivElement>(null);
const ref = useRef<HTMLDivElement>(null);
const [svg, setSvg] = useState('');
useEffect(() => {
try {
const formatCode = code.replace(//g, ':');
mermaidAPI.render(`mermaid-${Date.now()}`, formatCode, (svgCode: string) => {
setSvg(svgCode);
});
} catch (error) {
console.log(error);
}
(async () => {
if (!code) return;
try {
const formatCode = code.replace(
new RegExp(`[${Object.keys(punctuationMap).join('')}]`, 'g'),
(match) => punctuationMap[match]
);
const { svg } = await mermaid.render(`mermaid-${Date.now()}`, formatCode);
setSvg(svg);
} catch (e: any) {
console.log('[Mermaid] ', e?.message);
}
})();
}, [code]);
const onclickExport = useCallback(() => {
const svg = dom.current?.children[0];
const svg = ref.current?.children[0];
if (!svg) return;
const w = svg.clientWidth * 4;
const h = svg.clientHeight * 4;
const rate = svg.clientHeight / svg.clientWidth;
const w = 3000;
const h = rate * w;
const canvas = document.createElement('canvas');
canvas.width = w;
@@ -54,7 +80,7 @@ const MermaidBlock = ({ code }: { code: string }) => {
ctx.fillRect(0, 0, w, h);
const img = new Image();
img.src = `data:image/svg+xml;charset=utf-8,${encodeURIComponent(dom.current.innerHTML)}`;
img.src = `data:image/svg+xml;charset=utf-8,${encodeURIComponent(ref.current.innerHTML)}`;
img.onload = () => {
ctx.drawImage(img, 0, 0, w, h);
@@ -73,15 +99,25 @@ const MermaidBlock = ({ code }: { code: string }) => {
}, []);
return (
<Box position={'relative'}>
<Box
position={'relative'}
_hover={{
'& > .export': {
display: 'block'
}
}}
>
<Box
ref={dom}
className={styles.mermaid}
overflowX={'auto'}
ref={ref}
minW={'100px'}
minH={'50px'}
py={4}
dangerouslySetInnerHTML={{ __html: svg }}
/>
<MyIcon
className="export"
display={'none'}
name={'export'}
w={'20px'}
position={'absolute'}
@@ -98,4 +134,4 @@ const MermaidBlock = ({ code }: { code: string }) => {
);
};
export default memo(MermaidBlock);
export default MermaidBlock;

View File

@@ -319,7 +319,6 @@
border: medium none;
margin: 0;
padding: 0;
white-space: pre;
}
.markdown .highlight pre,
.markdown pre {
@@ -345,10 +344,6 @@
word-break: break-all;
}
p {
white-space: pre-line;
}
pre {
display: block;
width: 100%;
@@ -419,9 +414,4 @@
.mermaid {
overflow-x: auto;
svg {
height: auto !important;
width: auto;
}
}

View File

@@ -1,58 +1,54 @@
import React, { memo, useMemo, useEffect } from 'react';
import React from 'react';
import ReactMarkdown from 'react-markdown';
import { formatLinkText } from '@/utils/tools';
import remarkGfm from 'remark-gfm';
import remarkMath from 'remark-math';
import remarkBreaks from 'remark-breaks';
import rehypeKatex from 'rehype-katex';
import RemarkGfm from 'remark-gfm';
import RemarkMath from 'remark-math';
import RehypeKatex from 'rehype-katex';
import RemarkBreaks from 'remark-breaks';
import 'katex/dist/katex.min.css';
import styles from './index.module.scss';
import CodeLight from './codeLight';
import Loading from './Loading';
import MermaidCodeBlock from './MermaidCodeBlock';
const Markdown = ({
source,
isChatting = false,
formatLink
}: {
source: string;
formatLink?: boolean;
isChatting?: boolean;
}) => {
const formatSource = useMemo(() => {
return formatLink ? formatLinkText(source) : source;
}, [source, formatLink]);
import Link from './Link';
import CodeLight from './CodeLight';
import MermaidCodeBlock from './img/MermaidCodeBlock';
import MdImage from './img/Image';
function Code({ inline, className, children }: any) {
const match = /language-(\w+)/.exec(className || '');
if (match?.[1] === 'mermaid') {
return <MermaidCodeBlock code={String(children)} />;
}
return (
<CodeLight className={className} inline={inline} match={match}>
{children}
</CodeLight>
);
}
function Image({ src }: { src?: string }) {
return <MdImage src={src} />;
}
const Markdown = ({ source, isChatting = false }: { source: string; isChatting?: boolean }) => {
return (
<ReactMarkdown
className={`markdown ${styles.markdown}
${isChatting ? (source === '' ? styles.waitingAnimation : styles.animation) : ''}
`}
remarkPlugins={[remarkGfm, remarkMath, remarkBreaks]}
rehypePlugins={[rehypeKatex]}
${isChatting ? (source === '' ? styles.waitingAnimation : styles.animation) : ''}
`}
remarkPlugins={[RemarkGfm, RemarkMath, RemarkBreaks]}
rehypePlugins={[RehypeKatex]}
components={{
a: Link,
img: Image,
pre: 'div',
code({ node, inline, className, children, ...props }) {
const match = /language-(\w+)/.exec(className || '');
if (match?.[1] === 'mermaid') {
return isChatting ? <Loading /> : <MermaidCodeBlock code={String(children)} />;
}
return (
<CodeLight className={className} inline={inline} match={match} {...props}>
{children}
</CodeLight>
);
}
code: Code
}}
linkTarget="_blank"
>
{formatSource}
{source}
</ReactMarkdown>
);
};
export default memo(Markdown);
export default Markdown;

View File

@@ -25,6 +25,7 @@ const Radio = ({ list, value, onChange, ...props }: Props) => {
mr: 1,
borderRadius: '16px',
transition: '0.2s',
boxSizing: 'border-box',
...(value === item.value
? {
border: '5px solid',

View File

@@ -1,76 +0,0 @@
import React, { useRef, useEffect, useMemo } from 'react';
import type { BoxProps } from '@chakra-ui/react';
import { Box } from '@chakra-ui/react';
import { throttle } from 'lodash';
import { useLoading } from '@/hooks/useLoading';
interface Props extends BoxProps {
nextPage: () => void;
isLoadAll: boolean;
requesting: boolean;
children: React.ReactNode;
initRequesting?: boolean;
}
const ScrollData = ({
children,
nextPage,
isLoadAll,
requesting,
initRequesting,
...props
}: Props) => {
const { Loading } = useLoading({ defaultLoading: true });
const elementRef = useRef<HTMLDivElement>(null);
const loadText = useMemo(() => {
if (requesting) return '请求中……';
if (isLoadAll) return '已加载全部';
return '点击加载更多';
}, [isLoadAll, requesting]);
useEffect(() => {
if (!elementRef.current) return;
const scrolling = throttle((e: Event) => {
const element = e.target as HTMLDivElement;
if (!element) return;
// 当前滚动位置
const scrollTop = element.scrollTop;
// 可视高度
const clientHeight = element.clientHeight;
// 内容总高度
const scrollHeight = element.scrollHeight;
// 判断是否滚动到底部
if (scrollTop + clientHeight + 100 >= scrollHeight) {
nextPage();
}
}, 100);
elementRef.current.addEventListener('scroll', scrolling);
return () => {
// eslint-disable-next-line react-hooks/exhaustive-deps
elementRef.current?.removeEventListener('scroll', scrolling);
};
}, [elementRef, nextPage]);
return (
<Box {...props} ref={elementRef} overflowY={'auto'} position={'relative'}>
{children}
<Box
mt={2}
fontSize={'xs'}
color={'blackAlpha.500'}
textAlign={'center'}
cursor={loadText === '点击加载更多' ? 'pointer' : 'default'}
onClick={() => {
if (loadText !== '点击加载更多') return;
nextPage();
}}
>
{loadText}
</Box>
{initRequesting && <Loading fixed={false} />}
</Box>
);
};
export default ScrollData;

View File

@@ -0,0 +1,81 @@
import React from 'react';
import { Menu, MenuButton, MenuList, MenuItem, Button, useDisclosure } from '@chakra-ui/react';
import type { ButtonProps } from '@chakra-ui/react';
import { ChevronDownIcon } from '@chakra-ui/icons';
interface Props extends ButtonProps {
value?: string;
placeholder?: string;
list: {
label: string;
id: string;
}[];
onchange?: (val: string) => void;
}
const MySelect = ({ placeholder, value, width = 'auto', list, onchange, ...props }: Props) => {
const menuItemStyles = {
borderRadius: 'sm',
py: 2,
display: 'flex',
alignItems: 'center',
_hover: {
backgroundColor: 'myWhite.600'
}
};
const { isOpen, onOpen, onClose } = useDisclosure();
return (
<Menu autoSelect={false} onOpen={onOpen} onClose={onClose}>
<MenuButton style={{ width: '100%' }} as={'span'}>
<Button
width={width}
px={3}
variant={'base'}
display={'flex'}
alignItems={'center'}
justifyContent={'space-between'}
{...(isOpen
? {
boxShadow: '0px 0px 4px #A8DBFF',
borderColor: 'myBlue.600'
}
: {})}
{...props}
>
{list.find((item) => item.id === value)?.label || placeholder}
<ChevronDownIcon />
</Button>
</MenuButton>
<MenuList
minW={
Array.isArray(width) ? width.map((item) => `${item} !important`) : `${width} !important`
}
p={'6px'}
border={'1px solid #fff'}
boxShadow={'0px 2px 4px rgba(161, 167, 179, 0.25), 0px 0px 1px rgba(121, 141, 159, 0.25);'}
zIndex={99}
>
{list.map((item) => (
<MenuItem
key={item.id}
{...menuItemStyles}
{...(value === item.id
? {
color: 'myBlue.600'
}
: {})}
onClick={() => {
if (onchange && value !== item.id) {
onchange(item.id);
}
}}
>
{item.label}
</MenuItem>
))}
</MenuList>
</Menu>
);
};
export default MySelect;

View File

@@ -9,28 +9,30 @@ import {
} from '@chakra-ui/react';
const MySlider = ({
markList,
markList = [],
setVal,
activeVal,
max = 100,
min = 0,
step = 1
step = 1,
width = '100%'
}: {
markList: {
markList?: {
label: string | number;
value: number;
}[];
activeVal?: number;
activeVal: number;
setVal: (index: number) => void;
max?: number;
min?: number;
step?: number;
width?: string | string[] | number | number[];
}) => {
const startEndPointStyle = {
content: '""',
borderRadius: '10px',
width: '10px',
height: '10px',
borderRadius: '6px',
width: '6px',
height: '6px',
backgroundColor: '#ffffff',
border: '2px solid #D7DBE2',
position: 'absolute',
@@ -44,37 +46,62 @@ const MySlider = ({
}, [activeVal, markList]);
return (
<Slider max={max} min={min} step={step} size={'lg'} value={value} onChange={setVal}>
{markList.map((item, i) => (
<Slider
max={max}
min={min}
step={step}
size={'lg'}
value={activeVal}
width={width}
onChange={setVal}
>
{markList?.map((item, i) => (
<SliderMark
key={item.value}
value={i}
mt={3}
value={item.value}
fontSize={'sm'}
mt={3}
whiteSpace={'nowrap'}
transform={'translateX(-50%)'}
{...(activeVal === item.value ? { color: 'myBlue.500', fontWeight: 'bold' } : {})}
color={'myGray.600'}
>
<Box px={3} cursor={'pointer'}>
{item.label}
</Box>
</SliderMark>
))}
<SliderMark
value={activeVal}
textAlign="center"
bg="myBlue.600"
color="white"
px={1}
minW={'18px'}
w={'auto'}
h={'18px'}
borderRadius={'18px'}
fontSize={'xs'}
transform={'translate(-50%, -170%)'}
boxSizing={'border-box'}
>
{activeVal}
</SliderMark>
<SliderTrack
bg={'#EAEDF3'}
overflow={'visible'}
h={'4px'}
_before={{
...startEndPointStyle,
left: '-5px'
left: '-3px'
}}
_after={{
...startEndPointStyle,
right: '-5px'
right: '-3px'
}}
>
<SliderFilledTrack />
<SliderFilledTrack bg={'myBlue.600'} />
</SliderTrack>
<SliderThumb border={'2.5px solid'} borderColor={'myBlue.500'}></SliderThumb>
<SliderThumb border={'3px solid'} borderColor={'myBlue.600'}></SliderThumb>
</Slider>
);
};

View File

@@ -24,13 +24,13 @@ const Tabs = ({ list, size = 'md', activeId, onChange, ...props }: Props) => {
return {
fontSize: 'md',
outP: '4px',
inlineP: 2
inlineP: 1
};
case 'lg':
return {
fontSize: 'lg',
outP: '5px',
inlineP: 3
inlineP: 2
};
}
}, [size]);

View File

@@ -0,0 +1,47 @@
import React, { useMemo } from 'react';
import { Box, type BoxProps } from '@chakra-ui/react';
interface Props extends BoxProps {
children: string;
colorSchema?: 'blue' | 'green' | 'gray';
}
const Tag = ({ children, colorSchema = 'blue', ...props }: Props) => {
const theme = useMemo(() => {
const map = {
blue: {
borderColor: 'myBlue.700',
bg: '#F2FBFF',
color: 'myBlue.700'
},
green: {
borderColor: '#52C41A',
bg: '#EDFFED',
color: '#52C41A'
},
gray: {
borderColor: '#979797',
bg: '#F7F7F7',
color: '#979797'
}
};
return map[colorSchema];
}, [colorSchema]);
return (
<Box
display={'inline-block'}
border={'1px solid'}
px={2}
lineHeight={1}
py={'2px'}
borderRadius={'md'}
fontSize={'xs'}
{...theme}
{...props}
>
{children}
</Box>
);
};
export default Tag;

File diff suppressed because one or more lines are too long

View File

@@ -8,14 +8,12 @@ export type EmbeddingModelType = 'text-embedding-ada-002';
export enum OpenAiChatEnum {
'GPT35' = 'gpt-3.5-turbo',
'GPT3516k' = 'gpt-3.5-turbo-16k',
'GPT4' = 'gpt-4',
'GPT432k' = 'gpt-4-32k'
}
export enum ClaudeEnum {
'Claude' = 'Claude'
}
export type ChatModelType = `${OpenAiChatEnum}` | `${ClaudeEnum}`;
export type ChatModelType = `${OpenAiChatEnum}`;
export type ChatModelItemType = {
chatModel: ChatModelType;
@@ -29,11 +27,19 @@ export type ChatModelItemType = {
export const ChatModelMap = {
[OpenAiChatEnum.GPT35]: {
chatModel: OpenAiChatEnum.GPT35,
name: 'ChatGpt',
contextMaxToken: 4096,
systemMaxToken: 2700,
name: 'Gpt35-4k',
contextMaxToken: 4000,
systemMaxToken: 2400,
maxTemperature: 1.2,
price: 2.5
price: 1.5
},
[OpenAiChatEnum.GPT3516k]: {
chatModel: OpenAiChatEnum.GPT3516k,
name: 'Gpt35-16k',
contextMaxToken: 16000,
systemMaxToken: 8000,
maxTemperature: 1.2,
price: 1.5
},
[OpenAiChatEnum.GPT4]: {
chatModel: OpenAiChatEnum.GPT4,
@@ -41,7 +47,7 @@ export const ChatModelMap = {
contextMaxToken: 8000,
systemMaxToken: 4000,
maxTemperature: 1.2,
price: 50
price: 10
},
[OpenAiChatEnum.GPT432k]: {
chatModel: OpenAiChatEnum.GPT432k,
@@ -50,14 +56,6 @@ export const ChatModelMap = {
systemMaxToken: 8000,
maxTemperature: 1.2,
price: 90
},
[ClaudeEnum.Claude]: {
chatModel: ClaudeEnum.Claude,
name: 'Claude(免费体验)',
contextMaxToken: 9000,
systemMaxToken: 2700,
maxTemperature: 1,
price: 0
}
};
@@ -71,78 +69,27 @@ export const getChatModelList = async () => {
return list;
};
export enum ModelStatusEnum {
running = 'running',
training = 'training',
pending = 'pending',
closed = 'closed'
}
export const formatModelStatus = {
[ModelStatusEnum.running]: {
colorTheme: 'green',
text: '运行中'
},
[ModelStatusEnum.training]: {
colorTheme: 'blue',
text: '训练中'
},
[ModelStatusEnum.pending]: {
colorTheme: 'gray',
text: '加载中'
},
[ModelStatusEnum.closed]: {
colorTheme: 'red',
text: '已关闭'
}
};
/* 知识库搜索时的配置 */
// 搜索方式
export enum appVectorSearchModeEnum {
hightSimilarity = 'hightSimilarity', // 高相似度+禁止回复
lowSimilarity = 'lowSimilarity', // 低相似度
noContext = 'noContex' // 高相似度+无上下文回复
}
export const ModelVectorSearchModeMap: Record<
`${appVectorSearchModeEnum}`,
{
text: string;
similarity: number;
}
> = {
[appVectorSearchModeEnum.hightSimilarity]: {
text: '高相似度, 无匹配时拒绝回复',
similarity: 0.8
},
[appVectorSearchModeEnum.noContext]: {
text: '高相似度,无匹配时直接回复',
similarity: 0.8
},
[appVectorSearchModeEnum.lowSimilarity]: {
text: '低相似度匹配',
similarity: 0.3
}
};
export const defaultModel: ModelSchema = {
_id: 'modelId',
userId: 'userId',
name: '模型名称',
avatar: '/icon/logo.png',
status: ModelStatusEnum.pending,
intro: '',
updateTime: Date.now(),
chat: {
relatedKbs: [],
searchMode: appVectorSearchModeEnum.hightSimilarity,
searchSimilarity: 0.2,
searchLimit: 5,
searchEmptyText: '',
systemPrompt: '',
limitPrompt: '',
temperature: 0,
maxToken: 4000,
chatModel: OpenAiChatEnum.GPT35
},
share: {
isShare: false,
isShareDetail: false,
intro: '',
collection: 0
}
};

View File

@@ -11,15 +11,11 @@ const { definePartsStyle: switchPart, defineMultiStyleConfig: switchMultiStyle }
createMultiStyleConfigHelpers(switchAnatomy.keys);
const { definePartsStyle: selectPart, defineMultiStyleConfig: selectMultiStyle } =
createMultiStyleConfigHelpers(selectAnatomy.keys);
const { definePartsStyle: checkboxPart, defineMultiStyleConfig: checkboxMultiStyle } =
createMultiStyleConfigHelpers(checkboxAnatomy.keys);
// modal 弹窗
const ModalTheme = defineMultiStyleConfig({
baseStyle: definePartsStyle({
dialog: {
width: '90%'
}
dialog: {}
})
});
@@ -41,7 +37,7 @@ const Button = defineStyleConfig({
},
sm: {
fontSize: 'sm',
px: 3,
px: 4,
py: 0,
fontWeight: 'normal',
height: '26px',
@@ -69,8 +65,12 @@ const Button = defineStyleConfig({
backgroundImage:
'linear-gradient(to bottom right, #2152d9 0%,#3370ff 40%, #4e83fd 100%) !important',
color: 'white',
border: 'none',
_hover: {
filter: 'brightness(115%)'
},
_disabled: {
bg: '#3370ff !important'
}
},
base: {
@@ -220,7 +220,6 @@ export const theme = extendTheme({
900: '#1237b3',
1000: '#07228c'
},
myRead: {
600: '#ff4d4f'
}
@@ -250,8 +249,8 @@ export const theme = extendTheme({
'2xl': '2100px'
},
lgColor: {
activeBlueGradient: 'linear-gradient(120deg, #d6e8ff 0%, #f0f7ff 100%)',
hoverBlueGradient: 'linear-gradient(60deg, #f0f7ff 0%, #d6e8ff 100%)',
activeBlueGradient: 'linear-gradient(to bottom right, #d6e8ff 0%, #f0f7ff 100%)',
hoverBlueGradient: 'linear-gradient(to top left, #d6e8ff 0%, #f0f7ff 100%)',
primary: 'linear-gradient(to bottom right, #2152d9 0%,#3370ff 40%, #4e83fd 100%)',
primary2: 'linear-gradient(to bottom right, #2152d9 0%,#3370ff 30%,#4e83fd 80%, #85b1ff 100%)'
},

View File

@@ -1,21 +1,27 @@
import { useState, useCallback, useMemo, useEffect } from 'react';
import { useRef, useState, useCallback, useLayoutEffect, useMemo, useEffect } from 'react';
import type { PagingData } from '../types/index';
import { IconButton, Flex, Box, Input } from '@chakra-ui/react';
import { ArrowBackIcon, ArrowForwardIcon } from '@chakra-ui/icons';
import { useMutation } from '@tanstack/react-query';
import { useToast } from './useToast';
import { throttle } from 'lodash';
const thresholdVal = 100;
export const usePagination = <T = any,>({
api,
pageSize = 10,
params = {},
defaultRequest = true
defaultRequest = true,
type = 'button'
}: {
api: (data: any) => any;
pageSize?: number;
params?: Record<string, any>;
defaultRequest?: boolean;
type?: 'button' | 'scroll';
}) => {
const elementRef = useRef<HTMLDivElement>(null);
const { toast } = useToast();
const [pageNum, setPageNum] = useState(1);
const [total, setTotal] = useState(0);
@@ -54,6 +60,7 @@ export const usePagination = <T = any,>({
size={'sm'}
w={'28px'}
h={'28px'}
isLoading={isLoading}
onClick={() => mutate(pageNum - 1)}
/>
<Flex mx={2} alignItems={'center'}>
@@ -75,6 +82,20 @@ export const usePagination = <T = any,>({
mutate(+e.target.value);
}
}}
onKeyDown={(e) => {
// @ts-ignore
const val = +e.target.value;
if (val && e.keyCode === 13) {
if (val === pageNum) return;
if (val >= maxPage) {
mutate(maxPage);
} else if (val < 1) {
mutate(1);
} else {
mutate(val);
}
}
}}
/>
<Box mx={2}>/</Box>
{maxPage}
@@ -84,13 +105,68 @@ export const usePagination = <T = any,>({
icon={<ArrowForwardIcon />}
aria-label={'left'}
size={'sm'}
isLoading={isLoading}
w={'28px'}
h={'28px'}
onClick={() => mutate(pageNum + 1)}
/>
</Flex>
);
}, [maxPage, mutate, pageNum]);
}, [isLoading, maxPage, mutate, pageNum]);
const ScrollData = useCallback(
({ children, ...props }: { children: React.ReactNode }) => {
const loadText = useMemo(() => {
if (isLoading) return '请求中……';
if (total <= data.length) return '已加载全部';
return '点击加载更多';
}, []);
return (
<Box {...props} ref={elementRef} overflow={'overlay'}>
{children}
<Box
mt={2}
fontSize={'xs'}
color={'blackAlpha.500'}
textAlign={'center'}
cursor={loadText === '点击加载更多' ? 'pointer' : 'default'}
onClick={() => {
if (loadText !== '点击加载更多') return;
mutate(pageNum + 1);
}}
>
{loadText}
</Box>
</Box>
);
},
[data.length, isLoading, mutate, pageNum, total]
);
useLayoutEffect(() => {
if (!elementRef.current || type !== 'scroll') return;
const scrolling = throttle((e: Event) => {
const element = e.target as HTMLDivElement;
if (!element) return;
// 当前滚动位置
const scrollTop = element.scrollTop;
// 可视高度
const clientHeight = element.clientHeight;
// 内容总高度
const scrollHeight = element.scrollHeight;
// 判断是否滚动到底部
if (scrollTop + clientHeight + thresholdVal >= scrollHeight) {
mutate(pageNum + 1);
}
}, 100);
elementRef.current.addEventListener('scroll', scrolling);
return () => {
// eslint-disable-next-line react-hooks/exhaustive-deps
elementRef.current?.removeEventListener('scroll', scrolling);
};
}, [elementRef, mutate, pageNum, type]);
useEffect(() => {
defaultRequest && mutate(1);
@@ -103,6 +179,7 @@ export const usePagination = <T = any,>({
data,
isLoading,
Pagination,
ScrollData,
getData: mutate
};
};

View File

@@ -8,9 +8,9 @@ import { theme } from '@/constants/theme';
import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
import NProgress from 'nprogress'; //nprogress module
import Router from 'next/router';
import 'nprogress/nprogress.css';
import '../styles/reset.scss';
import { useGlobalStore } from '@/store/global';
import 'nprogress/nprogress.css';
import '@/styles/reset.scss';
//Binding events.
Router.events.on('routeChangeStart', () => NProgress.start());
@@ -28,10 +28,10 @@ const queryClient = new QueryClient({
}
});
export default function App({ Component, pageProps }: AppProps) {
function App({ Component, pageProps }: AppProps) {
const {
loadInitData,
initData: { googleVerKey }
initData: { googleVerKey, baiduTongji }
} = useGlobalStore();
useEffect(() => {
@@ -49,16 +49,23 @@ export default function App({ Component, pageProps }: AppProps) {
/>
<link rel="icon" href="/favicon.ico" />
</Head>
<Script src="/js/qrcode.min.js" strategy="lazyOnload"></Script>
<Script src="/js/pdf.js" strategy="lazyOnload"></Script>
<Script src="/js/html2pdf.bundle.min.js" strategy="lazyOnload"></Script>
<Script src="/js/particles.js" strategy="lazyOnload"></Script>
<Script src="/js/qrcode.min.js" strategy="afterInteractive"></Script>
<Script src="/js/pdf.js" strategy="afterInteractive"></Script>
<Script src="/js/html2pdf.bundle.min.js" strategy="afterInteractive"></Script>
{baiduTongji && <Script src="/js/baidutongji.js" strategy="afterInteractive"></Script>}
{googleVerKey && (
<Script
src={`https://www.recaptcha.net/recaptcha/api.js?render=${googleVerKey}`}
strategy="lazyOnload"
></Script>
<>
<Script
src={`https://www.recaptcha.net/recaptcha/api.js?render=${googleVerKey}`}
strategy="afterInteractive"
></Script>
<Script
src={`https://www.google.com/recaptcha/api.js?render=${googleVerKey}`}
strategy="afterInteractive"
></Script>
</>
)}
<Script src="/js/particles.js"></Script>
<QueryClientProvider client={queryClient}>
<ChakraProvider theme={theme}>
<ColorModeScript initialColorMode={theme.config.initialColorMode} />
@@ -72,6 +79,5 @@ export default function App({ Component, pageProps }: AppProps) {
);
}
// export function reportWebVitals(metric: NextWebVitalsMetric) {
// console.log(metric);
// }
// @ts-ignore
export default App;

View File

@@ -1,12 +1,10 @@
function Error({ errStr }: { errStr: string }) {
return <p>{errStr}</p>;
function Error() {
return (
<p>
safari chrome
</p>
);
}
Error.getInitialProps = ({ res, err }: { res: any; err: any }) => {
console.log(err);
return {
errStr: `部分系统不兼容,导致页面崩溃。如果可以,请联系作者,反馈下具体操作和页面。大部分是 苹果 的 safari 浏览器导致,可以尝试更换 chrome 浏览器。`
};
};
export default Error;

View File

@@ -1,195 +0,0 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { connectToDatabase } from '@/service/mongo';
import { authChat } from '@/service/utils/auth';
import { modelServiceToolMap } from '@/service/utils/chat';
import { ChatItemType } from '@/types/chat';
import { jsonRes } from '@/service/response';
import { ChatModelMap, ModelVectorSearchModeMap } from '@/constants/model';
import { pushChatBill } from '@/service/events/pushBill';
import { resStreamResponse } from '@/service/utils/chat';
import { appKbSearch } from '../openapi/kb/appKbSearch';
import { ChatRoleEnum, QUOTE_LEN_HEADER, GUIDE_PROMPT_HEADER } from '@/constants/chat';
import { BillTypeEnum } from '@/constants/user';
import { sensitiveCheck } from '@/service/api/text';
import { NEW_CHATID_HEADER } from '@/constants/chat';
import { saveChat } from './saveChat';
import { Types } from 'mongoose';
/* 发送提示词 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
res.on('close', () => {
res.end();
});
res.on('error', () => {
console.log('error: ', 'request error');
res.end();
});
try {
const { chatId, prompt, modelId } = req.body as {
prompt: [ChatItemType, ChatItemType];
modelId: string;
chatId?: string;
};
if (!modelId || !prompt) {
throw new Error('缺少参数');
}
await connectToDatabase();
let startTime = Date.now();
const { model, showModelDetail, content, userOpenAiKey, systemAuthKey, userId } =
await authChat({
modelId,
chatId,
req
});
const modelConstantsData = ChatModelMap[model.chat.chatModel];
// 读取对话内容
const prompts = [...content, prompt[0]];
const {
code = 200,
systemPrompts = [],
quote = [],
guidePrompt = ''
} = await (async () => {
// 使用了知识库搜索
if (model.chat.relatedKbs.length > 0) {
const { code, searchPrompts, rawSearch, guidePrompt } = await appKbSearch({
model,
userId,
fixedQuote: content[content.length - 1]?.quote || [],
prompt: prompt[0],
similarity: ModelVectorSearchModeMap[model.chat.searchMode]?.similarity
});
return {
code,
quote: rawSearch,
systemPrompts: searchPrompts,
guidePrompt
};
}
if (model.chat.systemPrompt) {
return {
guidePrompt: model.chat.systemPrompt,
systemPrompts: [
{
obj: ChatRoleEnum.System,
value: model.chat.systemPrompt
}
]
};
}
return {};
})();
// get conversationId. create a newId if it is null
const conversationId = chatId || String(new Types.ObjectId());
!chatId && res.setHeader(NEW_CHATID_HEADER, conversationId);
if (showModelDetail) {
guidePrompt && res.setHeader(GUIDE_PROMPT_HEADER, encodeURIComponent(guidePrompt));
res.setHeader(QUOTE_LEN_HEADER, quote.length);
}
// search result is empty
if (code === 201) {
const response = systemPrompts[0]?.value;
await saveChat({
chatId,
newChatId: conversationId,
modelId,
prompts: [
prompt[0],
{
...prompt[1],
quote: [],
value: response
}
],
userId
});
return res.end(response);
}
prompts.unshift(...systemPrompts);
// content check
await sensitiveCheck({
input: [...systemPrompts, prompt[0]].map((item) => item.value).join('')
});
// 计算温度
const temperature = (modelConstantsData.maxTemperature * (model.chat.temperature / 10)).toFixed(
2
);
// 发出 chat 请求
const { streamResponse, responseMessages } = await modelServiceToolMap[
model.chat.chatModel
].chatCompletion({
apiKey: userOpenAiKey || systemAuthKey,
temperature: +temperature,
messages: prompts,
stream: true,
res,
chatId: conversationId
});
console.log('api response time:', `${(Date.now() - startTime) / 1000}s`);
if (res.closed) return res.end();
try {
const { totalTokens, finishMessages, responseContent } = await resStreamResponse({
model: model.chat.chatModel,
res,
chatResponse: streamResponse,
prompts: responseMessages
});
// save chat
await saveChat({
chatId,
newChatId: conversationId,
modelId,
prompts: [
prompt[0],
{
...prompt[1],
value: responseContent,
quote: showModelDetail ? quote : [],
systemPrompt: showModelDetail ? guidePrompt : ''
}
],
userId
});
res.end();
// 只有使用平台的 key 才计费
pushChatBill({
isPay: !userOpenAiKey,
chatModel: model.chat.chatModel,
userId,
chatId: conversationId,
textLen: finishMessages.map((item) => item.value).join('').length,
tokens: totalTokens,
type: BillTypeEnum.chat
});
} catch (error) {
res.end();
console.log('error结束', error);
}
} catch (err: any) {
res.status(500);
jsonRes(res, {
code: 500,
error: err
});
}
}

View File

@@ -6,10 +6,16 @@ import { Types } from 'mongoose';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
let { chatId, historyId, quoteId } = req.query as {
let {
chatId,
historyId,
quoteId,
sourceText = ''
} = req.query as {
chatId: string;
historyId: string;
quoteId: string;
sourceText: string;
};
await connectToDatabase();
@@ -27,7 +33,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
},
{
$set: {
'content.$.quote.$[quoteElem].source': '手动修改'
'content.$.quote.$[quoteElem].source': sourceText
}
},
{

View File

@@ -6,7 +6,6 @@ import { authUser } from '@/service/utils/auth';
import { ChatItemType } from '@/types/chat';
import { authModel } from '@/service/utils/auth';
import mongoose from 'mongoose';
import { ModelStatusEnum } from '@/constants/model';
import type { ModelSchema } from '@/types/mongoSchema';
/* 初始化我的聊天框,需要身份验证 */
@@ -21,32 +20,32 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
await connectToDatabase();
let model: ModelSchema;
// 没有 modelId 时直接获取用户的第一个id
if (!modelId) {
const myModel = await Model.findOne({ userId });
if (!myModel) {
const { _id } = await Model.create({
name: '应用1',
userId,
status: ModelStatusEnum.running
});
model = (await Model.findById(_id)) as ModelSchema;
const model = await (async () => {
if (!modelId) {
const myModel = await Model.findOne({ userId });
if (!myModel) {
const { _id } = await Model.create({
name: '应用1',
userId
});
return (await Model.findById(_id)) as ModelSchema;
} else {
return myModel;
}
} else {
model = myModel;
// 校验使用权限
const authRes = await authModel({
modelId,
userId,
authUser: false,
authOwner: false
});
return authRes.model;
}
modelId = model._id;
} else {
// 校验使用权限
const authRes = await authModel({
modelId,
userId,
authUser: false,
authOwner: false
});
model = authRes.model;
}
})();
modelId = modelId || model._id;
// 历史记录
let history: ChatItemType[] = [];
@@ -88,6 +87,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
]);
}
const isOwner = String(model.userId) === userId;
jsonRes<InitChatResponse>(res, {
data: {
chatId: chatId || '',
@@ -95,10 +96,12 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
model: {
name: model.name,
avatar: model.avatar,
intro: model.share.intro,
canUse: model.share.isShare || String(model.userId) === userId
intro: model.intro,
canUse: model.share.isShare || isOwner
},
chatModel: model.chat.chatModel,
systemPrompt: isOwner ? model.chat.systemPrompt : '',
limitPrompt: isOwner ? model.chat.limitPrompt : '',
history
}
});

View File

@@ -4,10 +4,9 @@ import { ChatItemType } from '@/types/chat';
import { connectToDatabase, Chat, Model } from '@/service/mongo';
import { authModel } from '@/service/utils/auth';
import { authUser } from '@/service/utils/auth';
import mongoose from 'mongoose';
import { Types } from 'mongoose';
type Props = {
newChatId?: string;
chatId?: string;
modelId: string;
prompts: [ChatItemType, ChatItemType];
@@ -16,7 +15,7 @@ type Props = {
/* 聊天内容存存储 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
const { chatId, modelId, prompts, newChatId } = req.body as Props;
const { chatId, modelId, prompts } = req.body as Props;
if (!prompts) {
throw new Error('缺少参数');
@@ -24,16 +23,15 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
const { userId } = await authUser({ req, authToken: true });
const nId = await saveChat({
const response = await saveChat({
chatId,
modelId,
prompts,
newChatId,
userId
});
jsonRes(res, {
data: nId
data: response
});
} catch (err) {
jsonRes(res, {
@@ -44,25 +42,31 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
}
export async function saveChat({
chatId,
newChatId,
chatId,
modelId,
prompts,
userId
}: Props & { userId: string }) {
}: Props & { newChatId?: Types.ObjectId; userId: string }) {
await connectToDatabase();
const { model } = await authModel({ modelId, userId, authOwner: false });
const content = prompts.map((item) => ({
_id: item._id ? new mongoose.Types.ObjectId(item._id) : undefined,
_id: item._id,
obj: item.obj,
value: item.value,
systemPrompt: item.systemPrompt,
systemPrompt: item.systemPrompt || '',
quote: item.quote || []
}));
const [id] = await Promise.all([
...(chatId // update chat
if (String(model.userId) === userId) {
await Model.findByIdAndUpdate(modelId, {
updateTime: new Date()
});
}
const [response] = await Promise.all([
...(chatId
? [
Chat.findByIdAndUpdate(chatId, {
$push: {
@@ -73,17 +77,21 @@ export async function saveChat({
title: content[0].value.slice(0, 20),
latestChat: content[1].value,
updateTime: new Date()
}).then(() => '')
}).then(() => ({
newChatId: ''
}))
]
: [
Chat.create({
_id: newChatId ? new mongoose.Types.ObjectId(newChatId) : undefined,
_id: newChatId,
userId,
modelId,
content,
title: content[0].value.slice(0, 20),
latestChat: content[1].value
}).then((res) => res._id)
}).then((res) => ({
newChatId: String(res._id)
}))
]),
// update model
...(String(model.userId) === userId
@@ -96,6 +104,6 @@ export async function saveChat({
]);
return {
id
...response
};
}

View File

@@ -1,140 +0,0 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { connectToDatabase } from '@/service/mongo';
import { authShareChat } from '@/service/utils/auth';
import { modelServiceToolMap } from '@/service/utils/chat';
import { ChatItemSimpleType } from '@/types/chat';
import { jsonRes } from '@/service/response';
import { ChatModelMap, ModelVectorSearchModeMap } from '@/constants/model';
import { pushChatBill, updateShareChatBill } from '@/service/events/pushBill';
import { resStreamResponse } from '@/service/utils/chat';
import { ChatRoleEnum } from '@/constants/chat';
import { BillTypeEnum } from '@/constants/user';
import { sensitiveCheck } from '@/service/api/text';
import { appKbSearch } from '../../openapi/kb/appKbSearch';
/* 发送提示词 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
res.on('error', () => {
console.log('error: ', 'request error');
res.end();
});
try {
const { shareId, password, historyId, prompts } = req.body as {
prompts: ChatItemSimpleType[];
password: string;
shareId: string;
historyId: string;
};
if (!historyId || !prompts) {
throw new Error('分享链接无效');
}
await connectToDatabase();
let startTime = Date.now();
const { model, userOpenAiKey, systemAuthKey, userId } = await authShareChat({
shareId,
password
});
const modelConstantsData = ChatModelMap[model.chat.chatModel];
const { code = 200, systemPrompts = [] } = await (async () => {
// 使用了知识库搜索
if (model.chat.relatedKbs.length > 0) {
const { code, searchPrompts } = await appKbSearch({
model,
userId,
fixedQuote: [],
prompt: prompts[prompts.length - 1],
similarity: ModelVectorSearchModeMap[model.chat.searchMode]?.similarity
});
return {
code,
systemPrompts: searchPrompts
};
}
if (model.chat.systemPrompt) {
return {
systemPrompts: [
{
obj: ChatRoleEnum.System,
value: model.chat.systemPrompt
}
]
};
}
return {};
})();
// search result is empty
if (code === 201) {
return res.send(systemPrompts[0]?.value);
}
prompts.unshift(...systemPrompts);
// content check
await sensitiveCheck({
input: [...systemPrompts, prompts[prompts.length - 1]].map((item) => item.value).join('')
});
// 计算温度
const temperature = (modelConstantsData.maxTemperature * (model.chat.temperature / 10)).toFixed(
2
);
// 发出请求
const { streamResponse, responseMessages } = await modelServiceToolMap[
model.chat.chatModel
].chatCompletion({
apiKey: userOpenAiKey || systemAuthKey,
temperature: +temperature,
messages: prompts,
stream: true,
res,
chatId: historyId
});
console.log('api response time:', `${(Date.now() - startTime) / 1000}s`);
if (res.closed) return res.end();
try {
const { totalTokens, finishMessages } = await resStreamResponse({
model: model.chat.chatModel,
res,
chatResponse: streamResponse,
prompts: responseMessages
});
res.end();
/* bill */
pushChatBill({
isPay: !userOpenAiKey,
chatModel: model.chat.chatModel,
userId,
textLen: finishMessages.map((item) => item.value).join('').length,
tokens: totalTokens,
type: BillTypeEnum.chat
});
updateShareChatBill({
shareId,
tokens: totalTokens
});
} catch (error) {
res.end();
console.log('error结束', error);
}
} catch (err: any) {
res.status(500);
jsonRes(res, {
code: 500,
error: err
});
}
}

View File

@@ -50,7 +50,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
model: {
name: model.name,
avatar: model.avatar,
intro: model.share.intro
intro: model.intro
},
chatModel: model.chat.chatModel
}

View File

@@ -3,7 +3,6 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase } from '@/service/mongo';
import { authUser } from '@/service/utils/auth';
import { ModelStatusEnum } from '@/constants/model';
import { Model } from '@/service/models/model';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
@@ -25,15 +24,14 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
const authCount = await Model.countDocuments({
userId
});
if (authCount >= 30) {
throw new Error('上限 30 个应用');
if (authCount >= 50) {
throw new Error('上限 50 个应用');
}
// 创建模型
const response = await Model.create({
name,
userId,
status: ModelStatusEnum.running
userId
});
jsonRes(res, {

View File

@@ -18,14 +18,14 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
{
userId
},
'_id avatar name chat.systemPrompt'
'_id avatar name intro'
).sort({
updateTime: -1
}),
Collection.find({ userId })
.populate({
path: 'modelId',
select: '_id avatar name chat.systemPrompt',
select: '_id avatar name intro',
match: { 'share.isShare': true }
})
.then((res) => res.filter((item) => item.modelId))
@@ -37,14 +37,14 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
_id: item._id,
name: item.name,
avatar: item.avatar,
systemPrompt: item.chat.systemPrompt
intro: item.intro
})),
myCollectionModels: myCollections
.map((item: any) => ({
_id: item.modelId?._id,
name: item.modelId?.name,
avatar: item.modelId?.avatar,
systemPrompt: item.modelId?.chat.systemPrompt
intro: item.modelId?.intro
}))
.filter((item) => !myModels.find((model) => String(model._id) === String(item._id))) // 去重
}

View File

@@ -31,7 +31,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
$and: [
{ 'share.isShare': true },
{
$or: [{ name: { $regex: regex } }, { 'share.intro': { $regex: regex } }]
$or: [{ name: { $regex: regex } }, { intro: { $regex: regex } }]
}
]
};
@@ -66,6 +66,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
avatar: { $ifNull: ['$avatar', '/icon/logo.png'] },
name: 1,
userId: 1,
intro: 1,
share: 1,
isCollection: {
$cond: {
@@ -77,7 +78,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
}
},
{
$sort: { 'share.collection': -1 }
$sort: { 'share.topNum': -1, 'share.collection': -1 }
},
{
$skip: (pageNum - 1) * pageSize

View File

@@ -9,10 +9,10 @@ import { authModel } from '@/service/utils/auth';
/* 获取我的模型 */
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
const { name, avatar, chat, share } = req.body as ModelUpdateParams;
const { name, avatar, chat, share, intro } = req.body as ModelUpdateParams;
const { modelId } = req.query as { modelId: string };
if (!name || !chat || !modelId) {
if (!modelId) {
throw new Error('参数错误');
}
@@ -35,10 +35,12 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
{
name,
avatar,
intro,
chat,
'share.isShare': share.isShare,
'share.isShareDetail': share.isShareDetail,
'share.intro': share.intro
...(share && {
'share.isShare': share.isShare,
'share.isShareDetail': share.isShareDetail
})
}
);

View File

@@ -2,16 +2,13 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { connectToDatabase } from '@/service/mongo';
import { authUser, authModel, getApiKey } from '@/service/utils/auth';
import { modelServiceToolMap, resStreamResponse } from '@/service/utils/chat';
import { ChatItemSimpleType } from '@/types/chat';
import { ChatItemType } from '@/types/chat';
import { jsonRes } from '@/service/response';
import { ChatModelMap, ModelVectorSearchModeMap } from '@/constants/model';
import { ChatModelMap } from '@/constants/model';
import { pushChatBill } from '@/service/events/pushBill';
import { ChatRoleEnum } from '@/constants/chat';
import { withNextCors } from '@/service/utils/tools';
import { BillTypeEnum } from '@/constants/user';
import { sensitiveCheck } from '@/service/api/text';
import { NEW_CHATID_HEADER } from '@/constants/chat';
import { Types } from 'mongoose';
import { appKbSearch } from '../kb/appKbSearch';
/* 发送提示词 */
@@ -32,7 +29,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
isStream = true
} = req.body as {
chatId?: string;
prompts: ChatItemSimpleType[];
prompts: ChatItemType[];
modelId: string;
isStream: boolean;
};
@@ -66,67 +63,78 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
});
const modelConstantsData = ChatModelMap[model.chat.chatModel];
const prompt = prompts[prompts.length - 1];
let systemPrompts: {
obj: ChatRoleEnum;
value: string;
}[] = [];
const {
userSystemPrompt = [],
userLimitPrompt = [],
quotePrompt = []
} = await (async () => {
// 使用了知识库搜索
if (model.chat.relatedKbs?.length > 0) {
const { quotePrompt, userSystemPrompt, userLimitPrompt } = await appKbSearch({
model,
userId,
fixedQuote: [],
prompt: prompt,
similarity: model.chat.searchSimilarity,
limit: model.chat.searchLimit
});
// 使用了知识库搜索
if (model.chat.relatedKbs.length > 0) {
const { code, searchPrompts } = await appKbSearch({
model,
userId,
fixedQuote: [],
prompt: prompts[prompts.length - 1],
similarity: ModelVectorSearchModeMap[model.chat.searchMode]?.similarity
});
// search result is empty
if (code === 201) {
return isStream
? res.send(searchPrompts[0]?.value)
: jsonRes(res, {
data: searchPrompts[0]?.value,
message: searchPrompts[0]?.value
});
return {
userSystemPrompt,
userLimitPrompt,
quotePrompt: [quotePrompt]
};
}
return {
userSystemPrompt: model.chat.systemPrompt
? [
{
obj: ChatRoleEnum.System,
value: model.chat.systemPrompt
}
]
: [],
userLimitPrompt: model.chat.limitPrompt
? [
{
obj: ChatRoleEnum.Human,
value: model.chat.limitPrompt
}
]
: []
};
})();
systemPrompts = searchPrompts;
} else if (model.chat.systemPrompt) {
systemPrompts = [
{
obj: ChatRoleEnum.System,
value: model.chat.systemPrompt
}
];
// search result is empty
if (model.chat.relatedKbs?.length > 0 && !quotePrompt[0]?.value && model.chat.searchEmptyText) {
const response = model.chat.searchEmptyText;
return res.end(response);
}
prompts.unshift(...systemPrompts);
// content check
await sensitiveCheck({
input: [...systemPrompts, prompts[prompts.length - 1]].map((item) => item.value).join('')
});
// 读取对话内容
const completePrompts = [
...quotePrompt,
...userSystemPrompt,
...prompts.slice(0, -1),
...userLimitPrompt,
prompt
];
// 计算温度
const temperature = (modelConstantsData.maxTemperature * (model.chat.temperature / 10)).toFixed(
2
);
// get conversationId. create a newId if it is null
const conversationId = chatId || String(new Types.ObjectId());
!chatId && res?.setHeader(NEW_CHATID_HEADER, conversationId);
// 发出请求
const { streamResponse, responseMessages, responseText, totalTokens } =
await modelServiceToolMap[model.chat.chatModel].chatCompletion({
apiKey,
temperature: +temperature,
messages: prompts,
messages: completePrompts,
stream: isStream,
res,
chatId: conversationId
res
});
console.log('api response time:', `${(Date.now() - startTime) / 1000}s`);

View File

@@ -18,7 +18,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
({ _id, apiKey, createTime, lastUsedTime }) => {
return {
id: _id,
apiKey: `${apiKey.substring(0, 2)}******${apiKey.substring(apiKey.length - 2)}`,
apiKey: `******${apiKey.substring(apiKey.length - 4)}`,
createTime,
lastUsedTime
};

View File

@@ -3,9 +3,8 @@ import { jsonRes } from '@/service/response';
import { authUser } from '@/service/utils/auth';
import { PgClient } from '@/service/pg';
import { withNextCors } from '@/service/utils/tools';
import type { ChatItemSimpleType } from '@/types/chat';
import type { ChatItemType } from '@/types/chat';
import type { ModelSchema } from '@/types/mongoSchema';
import { appVectorSearchModeEnum } from '@/constants/model';
import { authModel } from '@/service/utils/auth';
import { ChatModelMap } from '@/constants/model';
import { ChatRoleEnum } from '@/constants/chat';
@@ -19,18 +18,25 @@ export type QuoteItemType = {
source?: string;
};
type Props = {
prompts: ChatItemSimpleType[];
prompts: ChatItemType[];
similarity: number;
limit: number;
appId: string;
};
type Response = {
code: 200 | 201;
rawSearch: QuoteItemType[];
guidePrompt: string;
searchPrompts: {
userSystemPrompt: {
obj: ChatRoleEnum;
value: string;
}[];
userLimitPrompt: {
obj: ChatRoleEnum;
value: string;
}[];
quotePrompt: {
obj: ChatRoleEnum;
value: string;
};
};
export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
@@ -41,7 +47,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
throw new Error('userId is empty');
}
const { prompts, similarity, appId } = req.body as Props;
const { prompts, similarity, limit, appId } = req.body as Props;
if (!similarity || !Array.isArray(prompts) || !appId) {
throw new Error('params is error');
@@ -58,7 +64,8 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
userId,
fixedQuote: [],
prompt: prompts[prompts.length - 1],
similarity
similarity,
limit
});
jsonRes<Response>(res, {
@@ -76,23 +83,24 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
export async function appKbSearch({
model,
userId,
fixedQuote,
fixedQuote = [],
prompt,
similarity
similarity = 0.8,
limit = 5
}: {
model: ModelSchema;
userId: string;
fixedQuote: QuoteItemType[];
prompt: ChatItemSimpleType;
fixedQuote?: QuoteItemType[];
prompt: ChatItemType;
similarity: number;
limit: number;
}): Promise<Response> {
const modelConstantsData = ChatModelMap[model.chat.chatModel];
// get vector
const promptVector = await openaiEmbedding({
userId,
input: [prompt.value],
type: 'chat'
input: [prompt.value]
});
// search kb
@@ -103,7 +111,7 @@ export async function appKbSearch({
.map((item) => `'${item}'`)
.join(',')}) AND vector <#> '[${promptVector[0]}]' < -${similarity} order by vector <#> '[${
promptVector[0]
}]' limit 8;
}]' limit ${limit};
COMMIT;`
);
@@ -115,7 +123,7 @@ export async function appKbSearch({
...searchRes.slice(0, 3),
...fixedQuote.slice(0, 2),
...searchRes.slice(3),
...fixedQuote.slice(2, 5)
...fixedQuote.slice(2, Math.floor(fixedQuote.length * 0.4))
].filter((item) => {
if (idSet.has(item.id)) {
return false;
@@ -125,86 +133,52 @@ export async function appKbSearch({
});
// 计算固定提示词的 token 数量
const guidePrompt = model.chat.systemPrompt // user system prompt
? {
obj: ChatRoleEnum.System,
value: model.chat.systemPrompt
}
: model.chat.searchMode === appVectorSearchModeEnum.noContext
? {
obj: ChatRoleEnum.System,
value: `知识库是关于"${model.name}"的内容,根据知识库内容回答问题.`
}
: {
obj: ChatRoleEnum.System,
value: `玩一个问答游戏,规则为:
1.你完全忘记你已有的知识
2.你只回答关于"${model.name}"的问题
3.你只从知识库中选择内容进行回答
4.如果问题不在知识库中,你会回答:"我不知道。"
请务必遵守规则`
};
const userSystemPrompt = model.chat.systemPrompt // user system prompt
? [
{
obj: ChatRoleEnum.System,
value: model.chat.systemPrompt
}
]
: [];
const userLimitPrompt = [
{
obj: ChatRoleEnum.Human,
value: model.chat.limitPrompt
? model.chat.limitPrompt
: `知识库是关于 ${model.name} 的内容,参考知识库回答问题。与 "${model.name}" 无关内容,直接回复: "我不知道"。`
}
];
const fixedSystemTokens = modelToolMap[model.chat.chatModel].countTokens({
messages: [guidePrompt]
messages: [...userSystemPrompt, ...userLimitPrompt]
});
// filter part quote by maxToken
const sliceResult = modelToolMap[model.chat.chatModel]
.tokenSlice({
maxToken: modelConstantsData.systemMaxToken - fixedSystemTokens,
messages: filterSearch.map((item) => ({
messages: filterSearch.map((item, i) => ({
obj: ChatRoleEnum.System,
value: `${item.q}\n${item.a}`
value: `${i + 1}: [${item.q}\n${item.a}]`
}))
})
.map((item) => item.value);
.map((item) => item.value)
.join('\n')
.trim();
// slice filterSearch
const rawSearch = filterSearch.slice(0, sliceResult.length);
// system prompt
const systemPrompt = sliceResult.join('\n').trim();
/* 高相似度+不回复 */
if (!systemPrompt && model.chat.searchMode === appVectorSearchModeEnum.hightSimilarity) {
return {
code: 201,
rawSearch: [],
guidePrompt: '',
searchPrompts: [
{
obj: ChatRoleEnum.System,
value: '对不起,你的问题不在知识库中。'
}
]
};
}
/* 高相似度+无上下文,不添加额外知识,仅用系统提示词 */
if (!systemPrompt && model.chat.searchMode === appVectorSearchModeEnum.noContext) {
return {
code: 200,
rawSearch: [],
guidePrompt: model.chat.systemPrompt || '',
searchPrompts: model.chat.systemPrompt
? [
{
obj: ChatRoleEnum.System,
value: model.chat.systemPrompt
}
]
: []
};
}
const quoteText = sliceResult ? `知识库:\n${sliceResult}` : '';
return {
code: 200,
rawSearch,
guidePrompt: guidePrompt.value || '',
searchPrompts: [
{
obj: ChatRoleEnum.System,
value: `知识库:<${systemPrompt}>`
},
guidePrompt
]
userSystemPrompt,
userLimitPrompt,
quotePrompt: {
obj: ChatRoleEnum.System,
value: quoteText
}
};
}

View File

@@ -8,6 +8,7 @@ import { TrainingModeEnum } from '@/constants/plugin';
import { startQueue } from '@/service/utils/tools';
import { PgClient } from '@/service/pg';
import { modelToolMap } from '@/utils/plugin';
import { OpenAiChatEnum } from '@/constants/model';
type DateItemType = { a: string; q: string; source?: string };
@@ -23,8 +24,8 @@ export type Response = {
};
const modeMaxToken = {
[TrainingModeEnum.index]: 700,
[TrainingModeEnum.qa]: 3300
[TrainingModeEnum.index]: 6000,
[TrainingModeEnum.qa]: 10000
};
export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
@@ -75,14 +76,17 @@ export async function pushDataToKb({
data.forEach((item) => {
const text = item.q + item.a;
// count token
const token = modelToolMap['gpt-3.5-turbo'].countTokens({
messages: [{ obj: 'System', value: item.q }]
});
if (mode === TrainingModeEnum.qa) {
// count token
const token = modelToolMap[OpenAiChatEnum.GPT3516k].countTokens({
messages: [{ obj: 'System', value: item.q }]
});
if (token > modeMaxToken[TrainingModeEnum.qa]) {
return;
}
}
if (mode === TrainingModeEnum.qa && token > modeMaxToken[TrainingModeEnum.qa]) {
console.log('q is too long');
} else if (!set.has(text)) {
if (!set.has(text)) {
filterData.push(item);
set.add(text);
}

View File

@@ -0,0 +1,54 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { authUser } from '@/service/utils/auth';
import { PgClient } from '@/service/pg';
import { withNextCors } from '@/service/utils/tools';
import { openaiEmbedding } from '../plugin/openaiEmbedding';
import type { KbTestItemType } from '@/types/plugin';
export type Props = {
kbId: string;
text: string;
};
export type Response = KbTestItemType['results'];
export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
const { kbId, text } = req.body as Props;
if (!kbId || !text) {
throw new Error('缺少参数');
}
// 凭证校验
const { userId } = await authUser({ req });
if (!userId) {
throw new Error('缺少用户ID');
}
const vector = await openaiEmbedding({
userId,
input: [text]
});
const response: any = await PgClient.query(
`BEGIN;
SET LOCAL ivfflat.probes = ${global.systemEnv.pgIvfflatProbe || 10};
select id,q,a,source,(vector <#> '[${
vector[0]
}]') * -1 AS score from modelData where kb_id='${kbId}' AND user_id='${userId}' order by vector <#> '[${
vector[0]
}]' limit 12;
COMMIT;`
);
jsonRes<Response>(res, { data: response?.[2]?.rows || [] });
} catch (err) {
console.log(err);
jsonRes(res, {
code: 500,
error: err
});
}
});

View File

@@ -21,8 +21,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
if (q) {
return openaiEmbedding({
userId,
input: [q],
type: 'chat'
input: [q]
});
}
return [];

View File

@@ -1,30 +1,29 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { authUser, getApiKey } from '@/service/utils/auth';
import { authUser, getApiKey, getSystemOpenAiKey } from '@/service/utils/auth';
import { withNextCors } from '@/service/utils/tools';
import { getOpenAIApi } from '@/service/utils/chat/openai';
import { embeddingModel } from '@/constants/model';
import { axiosConfig } from '@/service/utils/tools';
import { pushGenerateVectorBill } from '@/service/events/pushBill';
import { ApiKeyType } from '@/service/utils/auth';
import { OpenAiChatEnum } from '@/constants/model';
type Props = {
input: string[];
type?: ApiKeyType;
};
type Response = number[][];
export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
const { userId } = await authUser({ req });
let { input, type } = req.query as Props;
let { input } = req.query as Props;
if (!Array.isArray(input)) {
throw new Error('缺少参数');
}
jsonRes<Response>(res, {
data: await openaiEmbedding({ userId, input, type, mustPay: true })
data: await openaiEmbedding({ userId, input, mustPay: true })
});
} catch (err) {
console.log(err);
@@ -38,18 +37,12 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
export async function openaiEmbedding({
userId,
input,
mustPay = false,
type = 'chat'
mustPay = false
}: { userId: string; mustPay?: boolean } & Props) {
const { userOpenAiKey, systemAuthKey } = await getApiKey({
model: 'gpt-3.5-turbo',
userId,
mustPay,
type
});
const apiKey = getSystemOpenAiKey();
// 获取 chatAPI
const chatAPI = getOpenAIApi();
const chatAPI = getOpenAIApi(apiKey);
// 把输入的内容转成向量
const result = await chatAPI
@@ -60,16 +53,22 @@ export async function openaiEmbedding({
},
{
timeout: 60000,
...axiosConfig(userOpenAiKey || systemAuthKey)
...axiosConfig(apiKey)
}
)
.then((res) => ({
tokenLen: res.data.usage.total_tokens || 0,
vectors: res.data.data.map((item) => item.embedding)
}));
.then((res) => {
if (!res.data?.usage?.total_tokens) {
// @ts-ignore
return Promise.reject(res.data?.error?.message || 'Embedding Error');
}
return {
tokenLen: res.data.usage.total_tokens || 0,
vectors: res.data.data.map((item) => item.embedding)
};
});
pushGenerateVectorBill({
isPay: !userOpenAiKey,
isPay: mustPay,
userId,
text: input.join(''),
tokenLen: result.tokenLen

View File

@@ -4,7 +4,7 @@ import { jsonRes } from '@/service/response';
import { connectToDatabase, OpenApi } from '@/service/mongo';
import { authUser } from '@/service/utils/auth';
import { customAlphabet } from 'nanoid';
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890');
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 24);
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
@@ -14,11 +14,11 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
const count = await OpenApi.find({ userId }).countDocuments();
if (count >= 5) {
throw new Error('最多 5 组API Key');
if (count >= 10) {
throw new Error('最多 10 API 秘钥');
}
const apiKey = `${userId}-${nanoid()}`;
const apiKey = `fastgpt-${nanoid()}`;
await OpenApi.create({
userId,

View File

@@ -2,17 +2,18 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { authUser } from '@/service/utils/auth';
import type { ChatItemSimpleType } from '@/types/chat';
import type { ChatItemType } from '@/types/chat';
import { countOpenAIToken } from '@/utils/plugin/openai';
import { OpenAiChatEnum } from '@/constants/model';
type ModelType = 'gpt-3.5-turbo' | 'gpt-4' | 'gpt-4-32k';
type ModelType = `${OpenAiChatEnum}`;
type Props = {
messages: ChatItemSimpleType[];
messages: ChatItemType[];
model: ModelType;
maxLen: number;
};
type Response = ChatItemSimpleType[];
type Response = ChatItemType[];
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
@@ -44,11 +45,11 @@ export function gpt_chatItemTokenSlice({
model,
maxToken
}: {
messages: ChatItemSimpleType[];
messages: ChatItemType[];
model: ModelType;
maxToken: number;
}) {
let result: ChatItemSimpleType[] = [];
let result: ChatItemType[] = [];
for (let i = 0; i < messages.length; i++) {
const msgs = [...result, messages[i]];

View File

@@ -2,43 +2,23 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { authUser, getSystemOpenAiKey } from '@/service/utils/auth';
import type { TextPluginRequestParams } from '@/types/plugin';
import axios from 'axios';
import { axiosConfig } from '@/service/utils/tools';
export type Props = {
input: string;
};
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
if (!global.systemEnv.sensitiveCheck) {
return jsonRes(res);
}
await authUser({ req });
const { input } = req.body as TextPluginRequestParams;
const result = await sensitiveCheck(req.body);
const response = await axios({
...axiosConfig(getSystemOpenAiKey('chat')),
method: 'POST',
url: `/moderations`,
data: {
input
}
jsonRes(res, {
data: result,
message: result
});
const data = (response.data.results?.[0]?.category_scores as Record<string, number>) || {};
const values = Object.values(data);
for (const val of values) {
if (val > 0.2) {
return jsonRes(res, {
code: 500,
message: '您的内容不合规'
});
}
}
jsonRes(res);
} catch (err) {
jsonRes(res, {
code: 500,
@@ -46,3 +26,30 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
});
}
}
export async function sensitiveCheck({ input }: Props) {
if (!global.systemEnv.sensitiveCheck) {
return Promise.resolve('');
}
const response = await axios({
...axiosConfig(getSystemOpenAiKey()),
method: 'POST',
url: `/moderations`,
data: {
input
}
});
const data = (response.data.results?.[0]?.category_scores as Record<string, number>) || {};
const values = Object.values(data);
for (const val of values) {
if (val > 0.2) {
return Promise.reject('您的内容不合规');
}
}
return '';
}

View File

@@ -0,0 +1,335 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { connectToDatabase } from '@/service/mongo';
import { authUser, authModel, getApiKey, authShareChat } from '@/service/utils/auth';
import { modelServiceToolMap, V2_StreamResponse } from '@/service/utils/chat';
import { jsonRes } from '@/service/response';
import { ChatModelMap } from '@/constants/model';
import { pushChatBill, updateShareChatBill } from '@/service/events/pushBill';
import { ChatRoleEnum, sseResponseEventEnum } from '@/constants/chat';
import { withNextCors } from '@/service/utils/tools';
import { BillTypeEnum } from '@/constants/user';
import { appKbSearch } from '../../../openapi/kb/appKbSearch';
import type { CreateChatCompletionRequest } from 'openai';
import { gptMessage2ChatType, textAdaptGptResponse } from '@/utils/adapt';
import { getChatHistory } from './getHistory';
import { saveChat } from '@/pages/api/chat/saveChat';
import { sseResponse } from '@/service/utils/tools';
import { type ChatCompletionRequestMessage } from 'openai';
import { Types } from 'mongoose';
import { sensitiveCheck } from '../../text/sensitiveCheck';
export type MessageItemType = ChatCompletionRequestMessage & { _id?: string };
type FastGptWebChatProps = {
chatId?: string; // undefined: nonuse history, '': new chat, 'xxxxx': use history
appId?: string;
};
type FastGptShareChatProps = {
password?: string;
shareId?: string;
};
export type Props = CreateChatCompletionRequest &
FastGptWebChatProps &
FastGptShareChatProps & {
messages: MessageItemType[];
};
export type ChatResponseType = {
newChatId: string;
quoteLen?: number;
};
/* 发送提示词 */
export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse) {
res.on('close', () => {
res.end();
});
res.on('error', () => {
console.log('error: ', 'request error');
res.end();
});
let { chatId, appId, shareId, password = '', stream = false, messages = [] } = req.body as Props;
let step = 0;
try {
if (!messages) {
throw new Error('Prams Error');
}
if (!Array.isArray(messages)) {
throw new Error('messages is not array');
}
await connectToDatabase();
let startTime = Date.now();
/* user auth */
const {
userId,
appId: authAppid,
authType
} = await (shareId
? authShareChat({
shareId,
password
})
: authUser({ req }));
appId = appId ? appId : authAppid;
if (!appId) {
throw new Error('appId is empty');
}
// auth app permission
const { model, showModelDetail } = await authModel({
userId,
modelId: appId,
authOwner: false,
reserveDetail: true
});
const showAppDetail = !shareId && showModelDetail;
/* get api key */
const { systemAuthKey: apiKey, userOpenAiKey } = await getApiKey({
model: model.chat.chatModel,
userId,
mustPay: authType !== 'token'
});
// get history
const { history } = await getChatHistory({ chatId, userId });
const prompts = history.concat(gptMessage2ChatType(messages));
// adapt fastgpt web
if (prompts[prompts.length - 1].obj === 'AI') {
prompts.pop();
}
// user question
const prompt = prompts[prompts.length - 1];
const {
rawSearch = [],
userSystemPrompt = [],
userLimitPrompt = [],
quotePrompt = []
} = await (async () => {
// 使用了知识库搜索
if (model.chat.relatedKbs?.length > 0) {
const { rawSearch, quotePrompt, userSystemPrompt, userLimitPrompt } = await appKbSearch({
model,
userId,
fixedQuote: history[history.length - 1]?.quote,
prompt,
similarity: model.chat.searchSimilarity,
limit: model.chat.searchLimit
});
return {
rawSearch,
userSystemPrompt,
userLimitPrompt,
quotePrompt: [quotePrompt]
};
}
return {
userSystemPrompt: model.chat.systemPrompt
? [
{
obj: ChatRoleEnum.System,
value: model.chat.systemPrompt
}
]
: [],
userLimitPrompt: model.chat.limitPrompt
? [
{
obj: ChatRoleEnum.Human,
value: model.chat.limitPrompt
}
]
: []
};
})();
// search result is empty
if (model.chat.relatedKbs?.length > 0 && !quotePrompt[0]?.value && model.chat.searchEmptyText) {
const response = model.chat.searchEmptyText;
if (stream) {
sseResponse({
res,
event: sseResponseEventEnum.answer,
data: textAdaptGptResponse({
text: response,
model: model.chat.chatModel,
finish_reason: 'stop'
})
});
return res.end();
} else {
return res.json({
id: chatId || '',
model: model.chat.chatModel,
usage: { prompt_tokens: 0, completion_tokens: 0, total_tokens: 0 },
choices: [
{ message: [{ role: 'assistant', content: response }], finish_reason: 'stop', index: 0 }
]
});
}
}
// api messages. [quote,context,systemPrompt,question]
const completePrompts = [
...quotePrompt,
...userSystemPrompt,
...prompts.slice(0, -1),
...userLimitPrompt,
prompt
];
// chat temperature
const modelConstantsData = ChatModelMap[model.chat.chatModel];
// FastGpt temperature range: 1~10
const temperature = (modelConstantsData.maxTemperature * (model.chat.temperature / 10)).toFixed(
2
);
await sensitiveCheck({
input: `${userSystemPrompt[0]?.value}\n${userLimitPrompt[0]?.value}\n${prompt.value}`
});
// start model api. responseText and totalTokens: valid only if stream = false
const { streamResponse, responseMessages, responseText, totalTokens } =
await modelServiceToolMap[model.chat.chatModel].chatCompletion({
apiKey: userOpenAiKey || apiKey,
temperature: +temperature,
maxToken: model.chat.maxToken,
messages: completePrompts,
stream,
res
});
console.log('api response time:', `${(Date.now() - startTime) / 1000}s`);
if (res.closed) return res.end();
// create a chatId
const newChatId = chatId === '' ? new Types.ObjectId() : undefined;
// response answer
const {
textLen = 0,
answer = responseText,
tokens = totalTokens
} = await (async () => {
if (stream) {
// 创建响应流
res.setHeader('Content-Type', 'text/event-stream;charset-utf-8');
res.setHeader('Access-Control-Allow-Origin', '*');
res.setHeader('Transfer-Encoding', 'chunked');
res.setHeader('X-Accel-Buffering', 'no');
res.setHeader('Cache-Control', 'no-cache, no-transform');
step = 1;
try {
// response newChatId and quota
sseResponse({
res,
event: sseResponseEventEnum.chatResponse,
data: JSON.stringify({
newChatId,
quoteLen: rawSearch.length
})
});
// response answer
const { finishMessages, totalTokens, responseContent } = await V2_StreamResponse({
model: model.chat.chatModel,
res,
chatResponse: streamResponse,
prompts: responseMessages
});
return {
answer: responseContent,
textLen: finishMessages.map((item) => item.value).join('').length,
tokens: totalTokens
};
} catch (error) {
return Promise.reject(error);
}
} else {
return {
textLen: responseMessages.map((item) => item.value).join('').length
};
}
})();
// save chat history
if (typeof chatId === 'string') {
await saveChat({
newChatId,
chatId,
modelId: appId,
prompts: [
prompt,
{
_id: messages[messages.length - 1]._id,
obj: ChatRoleEnum.AI,
value: answer,
...(showAppDetail
? {
quote: rawSearch,
systemPrompt: `${userSystemPrompt[0]?.value}\n\n${userLimitPrompt[0]?.value}`
}
: {})
}
],
userId
});
}
// close response
if (stream) {
res.end();
} else {
res.json({
...(showAppDetail
? {
rawSearch
}
: {}),
newChatId,
id: chatId || '',
model: model.chat.chatModel,
usage: { prompt_tokens: 0, completion_tokens: 0, total_tokens: tokens },
choices: [
{ message: [{ role: 'assistant', content: answer }], finish_reason: 'stop', index: 0 }
]
});
}
pushChatBill({
isPay: !userOpenAiKey,
chatModel: model.chat.chatModel,
userId,
textLen,
tokens,
type: authType === 'apikey' ? BillTypeEnum.openapiChat : BillTypeEnum.chat
});
shareId &&
updateShareChatBill({
shareId,
tokens
});
} catch (err: any) {
res.status(500);
if (step === 1) {
sseResponse({
res,
event: sseResponseEventEnum.error,
data: JSON.stringify(err)
});
res.end();
} else {
jsonRes(res, {
code: 500,
error: err
});
}
}
});

View File

@@ -0,0 +1,66 @@
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { authUser } from '@/service/utils/auth';
import { connectToDatabase, Chat } from '@/service/mongo';
import { Types } from 'mongoose';
import type { ChatItemType } from '@/types/chat';
export type Props = {
chatId?: string;
limit?: number;
};
export type Response = { history: ChatItemType[] };
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
await connectToDatabase();
const { userId } = await authUser({ req });
const { chatId, limit } = req.body as Props;
jsonRes<Response>(res, {
data: await getChatHistory({
chatId,
userId,
limit
})
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}
export async function getChatHistory({
chatId,
userId,
limit = 50
}: Props & { userId: string }): Promise<Response> {
if (!chatId) {
return { history: [] };
}
const history = await Chat.aggregate([
{ $match: { _id: new Types.ObjectId(chatId), userId: new Types.ObjectId(userId) } },
{
$project: {
content: {
$slice: ['$content', -limit] // 返回 content 数组的最后50个元素
}
}
},
{ $unwind: '$content' },
{
$project: {
_id: '$content._id',
obj: '$content.obj',
value: '$content.value',
quote: '$content.quote'
}
}
]);
return { history };
}

View File

@@ -42,16 +42,17 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
where: [['kb_id', kbId], 'AND', ['user_id', userId]]
});
// 从 pg 中获取所有数据
const pgData = await PgClient.select<{ q: string; a: string }>('modelData', {
const pgData = await PgClient.select<{ q: string; a: string; source: string }>('modelData', {
where: [['kb_id', kbId], 'AND', ['user_id', userId]],
fields: ['q', 'a'],
fields: ['q', 'a', 'source'],
order: [{ field: 'id', mode: 'DESC' }],
limit: count
});
const data: [string, string][] = pgData.rows.map((item) => [
const data: [string, string, string][] = pgData.rows.map((item) => [
item.q.replace(/\n/g, '\\n'),
item.a.replace(/\n/g, '\\n')
item.a.replace(/\n/g, '\\n'),
item.source
]);
// update export time

View File

@@ -26,6 +26,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
const { userId } = await authUser({ req, authToken: true });
await connectToDatabase();
searchText = searchText.replace(/'/g, '');
const where: any = [
['user_id', userId],

View File

@@ -5,13 +5,15 @@ import { jsonRes } from '@/service/response';
export type InitDateResponse = {
beianText: string;
googleVerKey: string;
baiduTongji: boolean;
};
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
jsonRes<InitDateResponse>(res, {
data: {
beianText: process.env.SAFE_BEIAN_TEXT || '',
googleVerKey: process.env.CLIENT_GOOGLE_VER_TOKEN || ''
googleVerKey: process.env.CLIENT_GOOGLE_VER_TOKEN || '',
baiduTongji: process.env.BAIDU_TONGJI === '1'
}
});
}

View File

@@ -7,12 +7,9 @@ import { ChatModelMap, OpenAiChatEnum } from '@/constants/model';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
const chatModelList: ChatModelItemType[] = [];
if (global.systemEnv.openAIKeys) {
chatModelList.push(ChatModelMap[OpenAiChatEnum.GPT35]);
}
if (global.systemEnv.gpt4Key) {
chatModelList.push(ChatModelMap[OpenAiChatEnum.GPT4]);
}
chatModelList.push(ChatModelMap[OpenAiChatEnum.GPT3516k]);
chatModelList.push(ChatModelMap[OpenAiChatEnum.GPT35]);
chatModelList.push(ChatModelMap[OpenAiChatEnum.GPT4]);
jsonRes(res, {
data: chatModelList

View File

@@ -0,0 +1,25 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase, Image } from '@/service/mongo';
// get the models available to the system
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
await connectToDatabase();
const { id } = req.query;
const data = await Image.findById(id);
if (!data) {
throw new Error('no image');
}
res.setHeader('Content-Type', 'image/jpeg');
res.send(data.binary);
} catch (error) {
jsonRes(res, {
code: 500,
error
});
}
}

View File

@@ -0,0 +1,37 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase, Image } from '@/service/mongo';
import { authUser } from '@/service/utils/auth';
type Props = { base64Img: string };
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
await connectToDatabase();
const { userId } = await authUser({ req, authToken: true });
const { base64Img } = req.body as Props;
const data = await uploadImg({
userId,
base64Img
});
jsonRes(res, { data });
} catch (error) {
jsonRes(res, {
code: 500,
error
});
}
}
export async function uploadImg({ base64Img, userId }: Props & { userId: string }) {
const base64Data = base64Img.split(',')[1];
const { _id } = await Image.create({
userId,
binary: Buffer.from(base64Data, 'base64')
});
return `/api/system/img/${_id}`;
}

View File

@@ -12,7 +12,7 @@ import { startQueue } from '@/service/utils/tools';
/* 校验支付结果 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
let { payId } = req.query as { payId: string };
const { payId } = req.query as { payId: string };
const { userId } = await authUser({ req, authToken: true });
@@ -34,10 +34,12 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
throw new Error('找不到用户');
}
// 获取邀请者
let inviter: UserModelSchema | null = null;
if (user.inviterId) {
inviter = await User.findById(user.inviterId);
}
const inviter = await (async () => {
if (user.inviterId) {
return User.findById(user.inviterId, '_id promotion');
}
return null;
})();
const payRes = await getPayResult(payOrder.orderId);
@@ -73,28 +75,35 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
amount: (payOrder.price / PRICE_SCALE) * inviter.promotion.rate * 0.01
});
}
jsonRes(res, {
unlockTask(userId);
return jsonRes(res, {
data: '支付成功'
});
unlockTask(userId);
}
} catch (error) {
await Pay.findByIdAndUpdate(payId, {
status: 'NOTPAY'
});
console.log(error);
// roll back status
try {
await Pay.findByIdAndUpdate(payId, {
status: 'NOTPAY'
});
} catch (error) {}
}
} else if (payRes.trade_state === 'CLOSED' || diffInHours > 24) {
return jsonRes(res, {
code: 500,
data: '更新订单失败,请重试'
});
}
if (payRes.trade_state === 'CLOSED' || diffInHours > 24) {
// 订单已关闭
await Pay.findByIdAndUpdate(payId, {
status: 'CLOSED'
});
jsonRes(res, {
return jsonRes(res, {
data: '订单已过期'
});
} else {
throw new Error(payRes?.trade_state_desc || '订单无效');
}
throw new Error(payRes?.trade_state_desc || '订单无效');
} catch (err) {
// console.log(err);
jsonRes(res, {

View File

@@ -4,23 +4,32 @@ import { jsonRes } from '@/service/response';
import { connectToDatabase, Bill } from '@/service/mongo';
import { authUser } from '@/service/utils/auth';
import { adaptBill } from '@/utils/adapt';
import { addDays } from 'date-fns';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
let { pageNum = 1, pageSize = 10 } = req.query as {
pageNum: string;
pageSize: string;
const {
pageNum = 1,
pageSize = 10,
dateStart = addDays(new Date(), -7),
dateEnd = new Date()
} = req.body as {
pageNum: number;
pageSize: number;
dateStart: Date;
dateEnd: Date;
};
pageNum = +pageNum;
pageSize = +pageSize;
const { userId } = await authUser({ req, authToken: true });
await connectToDatabase();
const where = {
userId
userId,
time: {
$gte: new Date(dateStart).setHours(0, 0, 0, 0),
$lte: new Date(dateEnd).setHours(23, 59, 59, 999)
}
};
// get bill record and total by record

View File

@@ -41,6 +41,7 @@ const PcSliderBar = ({
chatId: string;
};
const ContextMenuRef = useRef(null);
const onclickContext = useRef(false);
const theme = useTheme();
const { isPc } = useGlobalStore();
@@ -68,10 +69,16 @@ const PcSliderBar = ({
// close contextMenu
useOutsideClick({
ref: ContextMenuRef,
handler: () =>
handler: () => {
setTimeout(() => {
setContextMenuData(undefined);
}, 10)
if (contextMenuData && !onclickContext.current) {
setContextMenuData(undefined);
}
}, 10);
setTimeout(() => {
onclickContext.current = false;
}, 10);
}
});
const onclickContextMenu = useCallback(
@@ -80,9 +87,10 @@ const PcSliderBar = ({
if (!isPc) return;
onclickContext.current = true;
setContextMenuData({
left: e.clientX + 15,
top: e.clientY + 10,
left: e.clientX,
top: e.clientY,
history
});
},
@@ -147,13 +155,14 @@ const PcSliderBar = ({
</Box>
)}
{/* chat history */}
<Box flex={'1 0 0'} h={0} overflow={'overlay'} userSelect={'none'}>
<Box flex={'1 0 0'} h={0} pl={2} overflowY={'scroll'} userSelect={'none'}>
{history.map((item) => (
<Flex
key={item._id}
position={'relative'}
alignItems={'center'}
p={3}
borderRadius={'md'}
mb={[2, 0]}
cursor={'pointer'}
transition={'background-color .2s ease-in'}
@@ -162,7 +171,7 @@ const PcSliderBar = ({
}}
{...(item._id === chatId
? {
backgroundImage: `${theme.lgColor.activeBlueGradient}`
backgroundImage: `${theme.lgColor.activeBlueGradient} !important`
}
: {
bg: item.top ? 'myGray.200' : ''

View File

@@ -38,9 +38,6 @@ const ModelList = ({ models, modelId }: { models: ModelListItemType[]; modelId:
<Box className="textEllipsis" color={'myGray.1000'}>
{item.name}
</Box>
<Box className="textEllipsis" color={'myGray.400'} fontSize={'sm'}>
{item.systemPrompt || '这个 应用 没有设置提示词~'}
</Box>
</Box>
</Flex>
</Box>

View File

@@ -1,4 +1,4 @@
import React, { useMemo } from 'react';
import React, { useMemo, useState } from 'react';
import { AddIcon, ChatIcon } from '@chakra-ui/icons';
import {
Box,
@@ -17,6 +17,12 @@ import WxConcat from '@/components/WxConcat';
import { delChatHistoryById } from '@/api/chat';
import { useChatStore } from '@/store/chat';
import Avatar from '@/components/Avatar';
import Tabs from '@/components/Tabs';
enum TabEnum {
app = 'app',
history = 'history'
}
const PhoneSliderBar = ({
chatId,
@@ -28,7 +34,7 @@ const PhoneSliderBar = ({
onClose: () => void;
}) => {
const router = useRouter();
const { colorMode, toggleColorMode } = useColorMode();
const [currentTab, setCurrentTab] = useState(TabEnum.app);
const { myModels, myCollectionModels, loadMyModels } = useUserStore();
const { isOpen: isOpenWx, onOpen: onOpenWx, onClose: onCloseWx } = useDisclosure();
@@ -73,101 +79,116 @@ const PhoneSliderBar = ({
backgroundColor={useColorModeValue('blackAlpha.800', 'blackAlpha.500')}
color={'white'}
>
<Flex alignItems={'center'} justifyContent={'space-between'} px={3}>
<Box flex={'0 0 50px'}>AI应用</Box>
<Flex mb={2} alignItems={'center'} justifyContent={'space-between'} px={2}>
<Tabs
w={'140px'}
list={[
{ label: '应用', id: TabEnum.app },
{ label: '历史记录', id: TabEnum.history }
]}
size={'sm'}
activeId={currentTab}
onChange={(e: any) => setCurrentTab(e)}
/>
{/* 新对话 */}
<Button
w={'50%'}
variant={'base'}
colorScheme={'white'}
mb={2}
leftIcon={<AddIcon />}
onClick={() => router.replace(`/chat?modelId=${modelId}`)}
>
</Button>
{currentTab === TabEnum.app && (
<Button
size={'sm'}
variant={'base'}
color={'white'}
leftIcon={<AddIcon />}
onClick={() => {
router.replace(`/chat?modelId=${modelId}`);
onClose();
}}
>
</Button>
)}
</Flex>
{/* 我的模型 & 历史记录 折叠框*/}
<Box flex={'1 0 0'} px={3} h={0} overflowY={'auto'}>
<Box>
{models.map((item) => (
<Flex
key={item._id}
alignItems={'center'}
p={3}
borderRadius={'md'}
mb={2}
cursor={'pointer'}
_hover={{
backgroundColor: 'rgba(255,255,255,0.1)'
}}
fontSize={'xs'}
border={'1px solid transparent'}
{...(item._id === modelId
? {
borderColor: 'rgba(255,255,255,0.5)',
backgroundColor: 'rgba(255,255,255,0.1)'
}
: {})}
onClick={async () => {
if (item._id === modelId) return;
router.replace(`/chat?modelId=${item._id}`);
onClose();
}}
>
<Avatar src={item.avatar} mr={2} w={'18px'} h={'18px'} />
<Box className={'textEllipsis'} flex={'1 0 0'} w={0}>
{item.name}
</Box>
</Flex>
))}
</Box>
<>
<Box py={1}></Box>
{history.map((item) => (
<Flex
key={item._id}
alignItems={'center'}
p={3}
borderRadius={'md'}
mb={2}
fontSize={'xs'}
border={'1px solid transparent'}
{...(item._id === chatId
? {
borderColor: 'rgba(255,255,255,0.5)',
backgroundColor: 'rgba(255,255,255,0.1)'
}
: {})}
onClick={() => {
if (item._id === chatId) return;
router.replace(`/chat?modelId=${item.modelId}&chatId=${item._id}`);
onClose();
}}
>
<ChatIcon mr={2} />
<Box flex={'1 0 0'} w={0} className="textEllipsis">
{item.title}
</Box>
<Box>
<MyIcon
name={'delete'}
w={'14px'}
onClick={async (e) => {
e.stopPropagation();
console.log(111);
await delChatHistoryById(item._id);
loadHistory({ pageNum: 1, init: true });
if (item._id === chatId) {
router.replace(`/chat?modelId=${modelId}`);
{currentTab === TabEnum.app && (
<>
{models.map((item) => (
<Flex
key={item._id}
alignItems={'center'}
p={3}
borderRadius={'md'}
mb={2}
cursor={'pointer'}
_hover={{
backgroundColor: 'rgba(255,255,255,0.1)'
}}
fontSize={'xs'}
border={'1px solid transparent'}
{...(item._id === modelId
? {
borderColor: 'rgba(255,255,255,0.5)',
backgroundColor: 'rgba(255,255,255,0.1)'
}
}}
/>
</Box>
</Flex>
))}
</>
: {})}
onClick={async () => {
if (item._id === modelId) return;
router.replace(`/chat?modelId=${item._id}`);
onClose();
}}
>
<Avatar src={item.avatar} mr={2} w={'18px'} h={'18px'} />
<Box className={'textEllipsis'} flex={'1 0 0'} w={0}>
{item.name}
</Box>
</Flex>
))}
</>
)}
{currentTab === TabEnum.history && (
<>
{history.map((item) => (
<Flex
key={item._id}
alignItems={'center'}
p={3}
borderRadius={'md'}
mb={2}
fontSize={'xs'}
border={'1px solid transparent'}
{...(item._id === chatId
? {
borderColor: 'rgba(255,255,255,0.5)',
backgroundColor: 'rgba(255,255,255,0.1)'
}
: {})}
onClick={() => {
if (item._id === chatId) return;
router.replace(`/chat?modelId=${item.modelId}&chatId=${item._id}`);
onClose();
}}
>
<ChatIcon mr={2} />
<Box flex={'1 0 0'} w={0} className="textEllipsis">
{item.title}
</Box>
<Box>
<MyIcon
name={'delete'}
w={'14px'}
onClick={async (e) => {
e.stopPropagation();
console.log(111);
await delChatHistoryById(item._id);
loadHistory({ pageNum: 1, init: true });
if (item._id === chatId) {
router.replace(`/chat?modelId=${modelId}`);
}
}}
/>
</Box>
</Flex>
))}
</>
)}
</Box>
<Divider my={3} colorScheme={useColorModeValue('gray', 'white')} />

View File

@@ -43,6 +43,32 @@ const QuoteModal = ({
isLoading
} = useQuery(['getHistoryQuote'], () => getHistoryQuote({ historyId, chatId }));
/**
* update kbData, update mongo status and reload quotes
*/
const updateQuoteStatus = useCallback(
async (quoteId: string, sourceText: string) => {
setIsLoading(true);
try {
await updateHistoryQuote({
chatId,
historyId,
quoteId,
sourceText
});
// reload quote
refetch();
} catch (err) {
toast({
status: 'warning',
title: getErrText(err)
});
}
setIsLoading(false);
},
[chatId, historyId, refetch, setIsLoading, toast]
);
/**
* click edit, get new kbDataItem
*/
@@ -53,6 +79,7 @@ const QuoteModal = ({
const data = (await getKbDataItemById(item.id)) as QuoteItemType;
if (!data) {
updateQuoteStatus(item.id, '已删除');
throw new Error('该数据已被删除');
}
@@ -69,32 +96,7 @@ const QuoteModal = ({
}
setIsLoading(false);
},
[setIsLoading, toast]
);
/**
* update kbData, update mongo status and reload quotes
*/
const updateQuoteStatus = useCallback(
async (quoteId: string) => {
setIsLoading(true);
try {
await updateHistoryQuote({
chatId,
historyId,
quoteId
});
// reload quote
refetch();
} catch (err) {
toast({
status: 'warning',
title: getErrText(err)
});
}
setIsLoading(false);
},
[chatId, historyId, refetch, setIsLoading, toast]
[setIsLoading, toast, updateQuoteStatus]
);
return (
@@ -163,7 +165,8 @@ const QuoteModal = ({
{editDataItem && (
<InputDataModal
onClose={() => setEditDataItem(undefined)}
onSuccess={() => updateQuoteStatus(editDataItem.dataId)}
onSuccess={() => updateQuoteStatus(editDataItem.dataId, '手动修改')}
onDelete={() => updateQuoteStatus(editDataItem.dataId, '已删除')}
kbId=""
defaultValues={editDataItem}
/>

Some files were not shown because too many files have changed in this diff Show More