Compare commits

...

105 Commits
v2.0 ... v2.7

Author SHA1 Message Date
archer
7f26b31f53 feat: csv导入去重;文档说明 2023-04-10 20:58:23 +08:00
archer
2a597964a2 perf: csv导入导出 2023-04-10 20:39:27 +08:00
archer
c1d3a46dc7 perf: csv文件选择 2023-04-10 19:47:03 +08:00
archer
0c55beb72d perf: comment 2023-04-10 14:39:46 +08:00
archer
9b1c0e1a3c perf: openapi. error catch 2023-04-10 13:16:24 +08:00
archer
a7988c164e perf: readme 2023-04-10 01:59:32 +08:00
archer
99e5fbd0f5 perf: md引入;docker-compose; 2023-04-09 22:56:08 +08:00
archer
5e4c4dd79b README 2023-04-09 12:38:36 +08:00
archer
70584783a5 perf: 环境变量例子 2023-04-09 12:37:13 +08:00
archer
705ac1c27e perf: 专线代理配置 2023-04-08 20:49:15 +08:00
archer
52d00d0562 feat: 知识库对外api 2023-04-08 20:27:43 +08:00
archer
9a145f223f fix: chat 页有些手机无法加载 2023-04-08 13:21:03 +08:00
archer
b7cd4dec89 fix: model auth 2023-04-08 11:57:13 +08:00
archer
33154a9c19 fix: 去除share 2023-04-08 11:51:51 +08:00
archer
e1c7503611 fix: api page hidden 2023-04-08 10:26:34 +08:00
archer
d04c298132 README 2023-04-08 00:47:31 +08:00
archer
eceda01c19 perf: openapi auth and lafgpt 2023-04-08 00:35:35 +08:00
archer
ea1681e1eb feat: auth openapi key 2023-04-07 23:33:59 +08:00
archer
f6c4b4c96d feat: openapi crd 2023-04-07 23:15:30 +08:00
archer
22cc9c85be feat: openapi page 2023-04-07 22:48:21 +08:00
archer
43f8d6008f fix: README.md 2023-04-07 21:51:21 +08:00
archer
29c5554f9e perf: 分页组件 2023-04-07 21:34:51 +08:00
archer
9b18a46456 perf: 账号api结构 2023-04-07 20:58:41 +08:00
archer
d5923bc64f perf: 去掉testapi 2023-04-07 17:23:52 +08:00
archer
f19c2d2ca1 perf: 去掉raw content 2023-04-07 16:12:43 +08:00
archer
84d91f3f76 perf: 接口大小 2023-04-07 15:46:30 +08:00
archer
7811f7482b fix: 账单第一页不展示 2023-04-07 01:20:41 +08:00
archer
9c8ca7dd25 perf: 压缩上下文 2023-04-07 01:11:23 +08:00
archer
1409916bd0 perf: 知识库范围 2023-04-06 23:43:34 +08:00
archer
fc7edcb54f perf: log和向量对话 2023-04-06 22:24:23 +08:00
archer
87d35042de perf: 阈值 2023-04-06 19:44:44 +08:00
archer
77dc961a07 perf: 账号页异步加载组件 2023-04-06 18:30:47 +08:00
archer
9a45fb64c2 perf: 未更新信息时也能保存 2023-04-06 16:12:36 +08:00
archer
881c36542c perf: 连续手动输入数据 2023-04-06 16:02:35 +08:00
archer
f88c6031f5 feat: lafgpt。openapi schema 2023-04-06 15:25:48 +08:00
archer
8a02b3b04a perf: 响应流抽离 2023-04-06 11:42:47 +08:00
archer
d460305871 perf: 文案优化 2023-04-06 09:07:07 +08:00
archer
144bed5a77 perf: 优化tokens计算 2023-04-05 23:43:20 +08:00
archer
96fc917bad perf: 支付文案 2023-04-05 22:32:14 +08:00
archer
794a3698ad feat: wx pay 2023-04-05 22:07:02 +08:00
archer
fbbc32361b perf: 加快拆分QA和生成向量;余额不足提醒 2023-04-05 20:37:37 +08:00
archer
dc329041f3 feat: 根据url获取网站文本 2023-04-05 16:10:47 +08:00
archer
5feb2e19bf fix: word解析失败 2023-04-05 11:16:12 +08:00
archer
ec22cd8320 fix: 价格表 2023-04-05 10:59:53 +08:00
archer
8c7efcbd1a perf: 二维码 2023-04-04 23:54:33 +08:00
archer
afc5947bfb feat: maxtokens 2023-04-04 23:00:01 +08:00
archer
40189a6899 feat: 队列任务余额不足时退出 2023-04-04 22:36:14 +08:00
archer
b73829a25c fix: 重复生成向量 2023-04-04 22:12:48 +08:00
archer
a7c5d3cc05 Merge branch 'dev2.4' into dev2.5 2023-04-04 22:00:16 +08:00
archer
cc36a13f17 Merge branch 'dev2.4' of https://github.com/c121914yu/FastGPT into dev2.4 2023-04-04 21:59:38 +08:00
archer
943abbe0fb perf: 5进程同时进行 2023-04-04 21:41:55 +08:00
archer
b13c3c4da5 fix: 账单余额问题 2023-04-04 21:32:51 +08:00
archer
c12aa7fdf7 fix: 文本长度过长 2023-04-04 14:20:10 +08:00
archer
e08e8aa00b feat: 修改模型数据可修改问题 2023-04-04 13:15:34 +08:00
archer
85e11abc0a perf: 文件拆分 2023-04-03 21:04:38 +08:00
archer
becee69d6a perf: 发送区域样式 2023-04-03 17:28:35 +08:00
archer
042b0c535a perf: 发送按键 2023-04-03 17:14:46 +08:00
archer
f97c29b41e feat: lafgpt请求;fix: 修复发送按键 2023-04-03 16:35:48 +08:00
archer
4d6616cbfa fix: ts 2023-04-03 11:03:51 +08:00
archer
cf37992b5c feat: 封装向量生成和账单 2023-04-03 10:59:32 +08:00
archer
6c4026ccef perf: 文件结构 2023-04-03 10:20:17 +08:00
archer
caf31faf31 perf: 生成qa prompt 2023-04-03 01:39:00 +08:00
archer
a0832af14b perf: 数据集刷新导致页面抖动 2023-04-03 00:51:53 +08:00
archer
677e61416d perf: 版本文案 2023-04-03 00:48:56 +08:00
archer
56ba6fa5f7 feat: 拆分数据自定义prompt 2023-04-03 00:37:40 +08:00
archer
16a31de1c7 feat: 数据集导出 2023-04-03 00:18:21 +08:00
archer
05b2e9e99c feat: 拆分测试环境 2023-04-02 23:38:28 +08:00
archer
ae4243b522 perf: 知识库数据结构 2023-04-01 22:31:56 +08:00
archer
5759cbeae0 perf: 知识库录入 2023-03-31 18:23:07 +08:00
archer
a3d74ec4a6 perrf chat不请求余额 2023-03-31 14:13:00 +08:00
archer
df9ac99ef2 fix: 复制和代理 2023-03-31 12:17:08 +08:00
archer
ef1e8aef5c fix: 发送区按键 2023-03-31 11:45:42 +08:00
archer
56dab7abba perf: api调用和余额校验 2023-03-31 11:20:45 +08:00
archer
ed1f93d836 perf: 改用hash索引 2023-03-31 02:58:09 +08:00
archer
5ec8aac3ac README 2023-03-31 00:41:42 +08:00
archer
837c132d24 feat: 删除模型数据 2023-03-31 00:23:41 +08:00
archer
8239c58494 perf: 知识库优化 2023-03-31 00:05:04 +08:00
archer
456686f3d0 conflict 2023-03-30 22:33:58 +08:00
archer
2099a87908 feat: 模型数据管理
feat: 模型数据导入

feat: redis 向量入库

feat: 向量索引

feat: 文件导入模型

perf: 交互

perf: prompt
2023-03-30 21:45:54 +08:00
archer
2b2c70e53d feat: 模型数据导入 2023-03-30 01:04:52 +08:00
archer
f32c557bdd feat: 模型数据管理 2023-03-29 00:22:48 +08:00
archer
713332522f Merge branch 'dev2.1' into dev2.2 2023-03-28 17:57:57 +08:00
archer
586607a9ce feat: modeldata接口。fix: 部分权限校验bug 2023-03-28 17:56:31 +08:00
archer
8c70205940 README 2023-03-28 00:48:24 +08:00
archer
c3ccbcb7f6 perf: 输入超长提示 2023-03-28 00:36:26 +08:00
archer
7a6d0ea650 perf: 减少聊天内容配置,自动截断上下文 2023-03-28 00:07:32 +08:00
archer
7fb6f62cf6 perf: 文本拆分 2023-03-27 19:19:47 +08:00
archer
af385b1b42 feat: 一次性获取data集合 2023-03-27 18:55:38 +08:00
archer
5249297cb1 perf: bill logs 2023-03-27 13:58:57 +08:00
archer
9280a21d12 perf: 提示词 2023-03-27 01:53:40 +08:00
archer
650a9dd651 fix: 生成词向量代理 2023-03-27 01:06:40 +08:00
archer
42e12d7db1 perf: 文本 2023-03-26 23:52:37 +08:00
archer
249ed18d15 fix: 抽取概要 2023-03-26 22:43:39 +08:00
archer
3e4487ad9a feat: 摘要拆分 2023-03-26 22:09:59 +08:00
archer
888642f154 perf: 参数 2023-03-26 15:47:30 +08:00
archer
963e590dfd perf: 参数 2023-03-26 15:41:50 +08:00
archer
e547893ac1 perf: 微调截止词 2023-03-26 15:27:57 +08:00
archer
9125910cfe fix: 连续滚动重复请求;perf: 训练参数 2023-03-26 14:37:49 +08:00
archer
98c458dcf8 fix: 训练后模型没选中 2023-03-26 13:56:00 +08:00
archer
41b6401c13 feat: 复制和删除对话功能 2023-03-26 13:14:50 +08:00
archer
936e36205e fix: 滚动条样式 2023-03-26 11:52:46 +08:00
archer
e289c4ec53 fix: ts数据 2023-03-26 10:51:47 +08:00
archer
1cbcc62494 perf: 正则提取文本, 2023-03-26 10:31:13 +08:00
archer
da31ef286b perf: 完善拆分数据模式 2023-03-25 23:37:50 +08:00
archer
60f62507bd feat: 版本更新 2023-03-25 23:16:59 +08:00
153 changed files with 6301 additions and 2051 deletions

View File

@@ -7,4 +7,4 @@ README.md
.git
.yalc/
yalc.lock
yalc.lock

View File

@@ -1,6 +1,9 @@
AXIOS_PROXY_HOST=127.0.0.1
AXIOS_PROXY_PORT=33210
MONGODB_URI=
MY_MAIL=
MAILE_CODE=
TOKEN_KEY=
AXIOS_PROXY_PORT_FAST=7890
AXIOS_PROXY_PORT_NORMAL=7890
MONGODB_URI=mongodb://username:password@0.0.0.0:27017/?authSource=admin&readPreference=primary&appname=MongoDB%20Compass&ssl=false
MY_MAIL=11111111@qq.com
MAILE_CODE=sdasadasfasfad
TOKEN_KEY=sssssssss
OPENAIKEY=sk-afadfadfadfsd
REDIS_URL=redis://default:password@0.0.0.0:8100

3
.gitignore vendored
View File

@@ -36,4 +36,5 @@ yarn-error.log*
next-env.d.ts
/public/trainData/
/.vscode/
platform.json
platform.json
testApi/

View File

@@ -1,4 +1,4 @@
dist
.vscode
**/.DS_Store
node_modules
node_modules

View File

@@ -1,21 +1,20 @@
module.exports = {
printWidth: 100,
tabWidth: 2,
useTabs: false,
semi: true,
singleQuote: true,
quoteProps: 'as-needed',
jsxSingleQuote: false,
trailingComma: 'none',
bracketSpacing: true,
jsxBracketSameLine: false,
arrowParens: 'always',
rangeStart: 0,
rangeEnd: Infinity,
requirePragma: false,
insertPragma: false,
proseWrap: 'preserve',
htmlWhitespaceSensitivity: 'css',
endOfLine: 'lf'
printWidth: 100,
tabWidth: 2,
useTabs: false,
semi: true,
singleQuote: true,
quoteProps: 'as-needed',
jsxSingleQuote: false,
trailingComma: 'none',
bracketSpacing: true,
jsxBracketSameLine: false,
arrowParens: 'always',
rangeStart: 0,
rangeEnd: Infinity,
requirePragma: false,
insertPragma: false,
proseWrap: 'preserve',
htmlWhitespaceSensitivity: 'css',
endOfLine: 'lf'
};

View File

@@ -54,13 +54,4 @@ USER nextjs
EXPOSE 3000
ENV PORT 3000
ENV MAX_USER ''
ENV AXIOS_PROXY_HOST ''
ENV AXIOS_PROXY_PORT ''
ENV MONGODB_URI ''
ENV MY_MAIL ''
ENV MAILE_CODE ''
ENV TOKEN_KEY ''
CMD ["node", "server.js"]

183
README.md
View File

@@ -1,49 +1,35 @@
# Fast GPT
Fast GPT 允许你用自己的 openai API KEY 来快速的调用 openai 接口,包括 GPT3 及其微调方法,以及最新的 gpt3.5 接口。
Fast GPT 允许你使用自己的 openai API KEY 来快速的调用 openai 接口,包括 GPT3 及其微调方法,以及最新的 gpt3.5 接口。
## 初始化
## 开发
复制 .env.template 成 .env.local ,填写核心参数
```
AXIOS_PROXY_HOST=axios代理地址目前 openai 接口都需要走代理,本机的话就填 127.0.0.1
AXIOS_PROXY_PORT=代理端口
MONGODB_URI=mongo数据库地址例如mongodb://username:password@ip:27017/?authSource=admin&readPreference=primary&appname=MongoDB%20Compass&directConnection=true&ssl=false
AXIOS_PROXY_PORT_FAST=代理端口1,clash默认为7890
AXIOS_PROXY_PORT_NORMAL=代理端口2
MONGODB_URI=mongo数据库地址
MY_MAIL=发送验证码邮箱
MAILE_CODE=邮箱秘钥代理里设置的是QQ邮箱不知道怎么找这个 code 的,可以百度搜"nodemailer发送邮件"
TOKEN_KEY=随便填一个用于生成和校验token
TOKEN_KEY=随便填一个,用于生成和校验 token
OPENAIKEY=openai的key
REDIS_URL=redis的地址
```
```bash
pnpm dev
```
Open [http://localhost:3000](http://localhost:3000) with your browser to see the result.
## 部署
### docker 模式
请准备好 docker mongo代理, 和nginx。 镜像走本机的代理,所以用 network=hostport 改成代理的端口clash 一般都是 7890。
请准备好 docker mongo代理, 和 nginx。 镜像走本机的代理,所以用 network=hostport 改成代理的端口clash 一般都是 7890。
#### docker 打包
```bash
docker build -t imageName:tag .
docker push imageName:tag
```
#### 服务器拉取镜像和运行
```bash
# 服务器拉取部署, imageName 替换成镜像名
docker pull imageName:tag
docker stop fast-gpt || true
docker rm fast-gpt || true
docker run -d --network=host --name fast-gpt \
-e AXIOS_PROXY_HOST=127.0.0.1 \
-e AXIOS_PROXY_PORT=7890 \
-e MY_MAIL=your email\
-e MAILE_CODE=your email code \
-e TOKEN_KEY=任意一个内容 \
-e MONGODB_URI="mongodb://user:password@127.0.0.0:27017/?authSource=admin&readPreference=primary&appname=MongoDB%20Compass&ssl=false" \
imageName:tag
# 或者直接拉镜像,见下方
```
#### 软件教程docker 安装
@@ -53,20 +39,6 @@ curl -sSL https://get.daocloud.io/docker | sh
sudo systemctl start docker
```
#### 软件教程mongo 安装
```bash
docker pull mongo:6.0.4
docker stop mongo
docker rm mongo
docker run -d --name mongo \
-e MONGO_INITDB_ROOT_USERNAME= \
-e MONGO_INITDB_ROOT_PASSWORD= \
-v /root/service/mongo:/data/db \
mongo:6.0.4
# 检查 mongo 运行情况, 有成功的 logs 代表访问成功
docker logs mongo
```
#### 软件教程: clash 代理
```bash
# 下载包
@@ -84,8 +56,7 @@ export https_proxy=http://127.0.0.1:7890
export HTTP_PROXY=http://127.0.0.1:7890
export HTTPS_PROXY=http://127.0.0.1:7890
# 运行脚本: 删除clash - 到 clash 目录 - 删除缓存 - 执行运行
# 会生成一个 nohup.out 文件,可以看到 clash 的 logs
# 运行脚本: 删除clash - 到 clash 目录 - 删除缓存 - 执行运行. 会生成一个 nohup.out 文件,可以看到 clash 的 logs
OLD_PROCESS=$(pgrep clash)
if [ ! -z "$OLD_PROCESS" ]; then
echo "Killing old process: $OLD_PROCESS"
@@ -99,5 +70,133 @@ nohup ./clash-linux-amd64-v1.10.0 -d ./ &
echo "Restart clash"
```
#### 软件教程Nginx
...没写,这个百度吧。
#### 文件创建
**yml文件**
```yml
version: "3.3"
services:
fast-gpt:
image: c121914yu/fast-gpt:latest
environment:
AXIOS_PROXY_HOST: 127.0.0.1
AXIOS_PROXY_PORT: 7890
MY_MAIL: 11111111@qq.com
MAILE_CODE: sdasadasfasfad
TOKEN_KEY: sssssssss
MONGODB_URI: mongodb://username:password@0.0.0.0:27017/?authSource=admin&readPreference=primary&appname=MongoDB%20Compass&ssl=false
OPENAIKEY: sk-afadfadfadfsd
REDIS_URL: redis://default:password@0.0.0.0:8100
network_mode: host
restart: always
container_name: fast-gpt
mongodb:
image: mongo:6.0.4
container_name: mongo
restart: always
environment:
- MONGO_INITDB_ROOT_USERNAME=root
- MONGO_INITDB_ROOT_PASSWORD=ROOT_1234
- MONGO_DATA_DIR=/data/db
- MONGO_LOG_DIR=/data/logs
volumes:
- /root/fastgpt/mongo/data:/data/db
- /root/fastgpt/mongo/logs:/data/logs
ports:
- 27017:27017
nginx:
image: nginx:alpine3.17
container_name: nginx
restart: always
network_mode: host
ports:
- "80:80"
volumes:
- /root/fastgpt/nginx/nginx.conf:/etc/nginx/nginx.conf:ro
redis-stack:
image: redis/redis-stack:6.2.6-v6
container_name: redis-stack
restart: unless-stopped
ports:
- "8100:6379"
- "8101:8001"
environment:
- REDIS_ARGS=--requirepass psw1234
volumes:
- /etc/localtime:/etc/localtime:ro
- /root/fastgpt/redis/redis.conf:/redis.conf
- /root/fastgpt/redis/data:/data
```
**redis.conf**
```
## 开启aop持久化
appendonly yes
#default: 持久化文件
appendfilename "appendonly.aof"
#default: 每秒同步一次
appendfsync everysec
```
**nginx.conf**
```
user nginx;
worker_processes auto;
error_log /var/log/nginx/error.log;
pid /run/nginx.pid;
events {
worker_connections 1024;
}
http {
include /etc/nginx/mime.types;
default_type application/octet-stream;
include /etc/nginx/conf.d/*.conf;
server {
listen 80;
server_name test.com;
gzip on;
gzip_min_length 1k;
gzip_buffers 4 8k;
gzip_http_version 1.1;
gzip_comp_level 6;
gzip_vary on;
gzip_types text/plain application/x-javascript text/css application/javascript application/json application/xml;
gzip_disable "MSIE [1-6]\.";
location / {
proxy_pass http://localhost:3000;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
}
}
}
```
#### 运行脚本
**redis创建索引**
```bash
FT.CREATE idx:model:data:hash ON HASH PREFIX 1 model:data: SCHEMA modelId TAG userId TAG status TAG q TEXT text TEXT vector VECTOR FLAT 6 DIM 1536 DISTANCE_METRIC COSINE TYPE FLOAT32
```
**run.sh 运行文件**
```bash
#!/bin/bash
docker-compose up -d
echo "Docker Compose 重新拉取镜像完成!"
# 删除本地旧镜像
images=$(docker images --format "{{.ID}} {{.Repository}}" | grep fast-gpt)
# 将镜像 ID 和名称放入数组中
IFS=$'\n' read -rd '' -a image_array <<<"$images"
# 遍历数组并删除所有旧的镜像
for ((i=1; i<${#image_array[@]}; i++))
do
image=${image_array[$i]}
image_id=${image%% *}
docker rmi $image_id
done
```

View File

@@ -1,4 +0,0 @@
{"prompt": "sealos的介绍", "completion": "sealos 是以 kubernetes 为内核的云操作系统发行版"}
{"prompt": "sealos是什么", "completion": "sealos 是以 kubernetes 为内核的云操作系统发行版"}
{"prompt": "sealos安装的先决条件", "completion": "sealos 是一个简单的 go 二进制文件,可以安装在大多数 Linux 操作系统中。"}
{"prompt": "sealos的CPU架构", "completion": "目前支持 amd64 和 arm64 架构。"}

View File

@@ -36,17 +36,20 @@
"nodemailer": "^6.9.1",
"nprogress": "^0.2.0",
"openai": "^3.2.1",
"papaparse": "^5.4.1",
"react": "18.2.0",
"react-dom": "18.2.0",
"react-hook-form": "^7.43.1",
"react-markdown": "^8.0.5",
"react-syntax-highlighter": "^15.5.0",
"redis": "^4.6.5",
"rehype-katex": "^6.0.2",
"remark-gfm": "^3.0.1",
"remark-math": "^5.1.1",
"sass": "^1.58.3",
"sharp": "^0.31.3",
"tunnel": "^0.0.6",
"wxpay-v3": "^3.0.2",
"zustand": "^4.3.5"
},
"devDependencies": {
@@ -56,6 +59,7 @@
"@types/lodash": "^4.14.191",
"@types/node": "18.14.0",
"@types/nodemailer": "^6.4.7",
"@types/papaparse": "^5.3.7",
"@types/react": "18.0.28",
"@types/react-dom": "18.0.11",
"@types/react-syntax-highlighter": "^15.5.6",

980
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,15 +0,0 @@
{"prompt":"什么是百度plato</s>","completion":" 百度PLATO是一种聊天机器人发布于2021年10月参数量为百亿是GPT-3的1/17。</s>"}
{"prompt":"与百度plato相比chatgpt有哪些优点</s>","completion":" 与百度PLATO相比ChatGPT体现出耐心、体贴和妥当被网友们普遍称赞。ChatGPT可以用于个性化聊天机器人例如生成一个很相似的数字人提供一位温柔健谈的朋友指导语言学习等。</s>"}
{"prompt":"replika是什么</s>","completion":" Replika是一个产品使用ChatGPT的技术可以根据自己的需求生成一个虚拟女友、男友或单纯的朋友。目前还引起了不少有关性骚扰、家暴、隐私等问题的争议。</s>"}
{"prompt":"聊天机器人可以用于哪些场景?</s>","completion":" 聊天机器人可以用于很多陪伴的场景,例如亲人去世、抑郁症或自闭症患者、语言学习、小朋友、养老院或独居在家的老人等。</s>"}
{"prompt":"有人反对使用聊天机器人解决孤独困境,为什么?</s>","completion":" 有人反对使用聊天机器人解决孤独困境,认为孤独困境应该用人与人真正的交往解决,机器是冰冷的。</s>"}
{"prompt":"为什么聊天机器人仍然需要解决当下的需求?</s>","completion":" 在社会结构性的困局还没有解开时,长夜漫漫的独身打工人、养老院里卧床的老人、父母在外留守在家的儿童,当下的需求仍需要解决。</s>"}
{"prompt":"ai在未来会被戴上什么样的帽子</s>","completion":" AI在未来各个场景下都会被戴上一个很虚无的帽子“机器是冰冷的”、“没有灵魂”、“它并不是真正在思考”。</s>"}
{"prompt":"怎么回答“ai是否真正思考”的问题</s>","completion":" 引用乔姆斯基老爷子的那句名言“思考是人类特性。AI有天会思考吗那就像是在问潜水艇会不会游泳一样。”</s>"}
{"prompt":"聊天机器人与人类完成任务的方式有何不同?</s>","completion":" 聊天机器人与人类完成任务的方式不同,但并不重要。它们能跟人一样完成任务,这是最重要的。</s>"}
{"prompt":"聊天机器人的发展前景如何?</s>","completion":" 聊天机器人的发展前景广阔,可以应用于很多领域,例如客服、教育、医疗等。</s>"}
{"prompt":"什么是百度plato</s>","completion":" 百度PLATO是一种聊天机器人发布于2021年10月参数量为百亿是GPT-3的1/17。</s>"}
{"prompt":"与百度plato相比chatgpt有哪些优点</s>","completion":" 与百度PLATO相比ChatGPT体现出耐心、体贴和妥当被网友们普遍称赞。ChatGPT还可以用于很多陪伴的场景如亲人去世、抑郁症或自闭症患者、语言学习、小朋友、养老院或独居在家的老人等。</s>"}
{"prompt":"replika是什么产品</s>","completion":" Replika是一个产品用的是ChatGPT的技术。用户可以根据自己的需求生成一个虚拟女友、男友或单纯的朋友。目前还引起了不少有关性骚扰、家暴、隐私等问题的争议。</s>"}
{"prompt":"聊天机器人能够解决哪些现实问题?</s>","completion":" 聊天机器人能够解决现代人面临的孤独困境,如长夜漫漫的独身打工人、养老院里卧床的老人、父母在外留守在家的儿童等。</s>"}
{"prompt":"有人反对使用聊天机器人解决孤独困境,为什么?</s>","completion":" 有人反对使用聊天机器人解决孤独困境,认为孤独困境应该用人与人真正的交往解决,机器是冰冷的。</s>"}

View File

@@ -0,0 +1,9 @@
## 常见问题
**内容长度**
单次最长 4000 tokens, 上下文最长 8000 tokens, 上下文超长时会被截断。
**删除和复制**
点击对话头像,可以选择复制或删除该条内容。
**代理出错**
服务器代理不稳定,可以过一会儿再尝试。

6
public/docs/csvSelect.md Normal file
View File

@@ -0,0 +1,6 @@
接受一个csv文件表格头包含 question 和 answer。question 代表问题answer 代表答案。
导入前会进行去重,如果问题和答案完全相同,则不会被导入,所以最终导入的内容可能会比文件的内容少。
| question | answer |
| --- | --- |
| 什么是 laf | laf 是一个云函数开发平台…… |
| 什么是 sealos | Sealos 是以 kubernetes 为内核的云操作系统发行版,可以…… |

40
public/docs/intro.md Normal file
View File

@@ -0,0 +1,40 @@
## 欢迎使用 Fast GPT
[Git 仓库](https://github.com/c121914yu/FastGPT)
### 交流群/问题反馈
扫码满了,加个小号,定时拉
wx号: fastgpt123
![](/imgs/wx300.jpg)
### 快速开始
1. 使用邮箱注册账号。
2. 进入账号页面,添加关联账号,目前只有 openai 的账号可以添加,直接去 openai 官网,把 API Key 粘贴过来。
3. 如果填写了自己的 openai 账号,使用时会直接用你的账号。如果没有填写,需要付费使用平台的账号。
4. 进入模型页,创建一个模型,建议直接用 ChatGPT。
5. 在模型列表点击【对话】,即可使用 API 进行聊天。
### 定制 prompt
1. 进入模型编辑页
2. 调整温度和提示词
3. 使用该模型对话。每次对话时,提示词和温度都会自动注入,方便管理个人的模型。建议把自己日常经常需要使用的 5~10 个方向预设好。
### 知识库
1. 创建模型时选择【知识库】
2. 进入模型编辑页
3. 导入数据,可以选择手动导入,或者选择文件导入。文件导入会自动调用 chatGPT 理解文件内容,并生成知识库。
4. 使用该模型对话。
注意使用知识库模型对话时tokens 消耗会加快。
### 价格表
如果使用了自己的 Api Key不会计费。可以在账号页看到详细账单。单纯使用 chatGPT 模型进行对话,只有一个计费项目。使用知识库时,包含**对话**和**索引**生成两个计费项。
| 计费项 | 价格: 元/ 1K tokens包含上下文|
| --- | --- |
| chatgpt - 对话 | 0.03 |
| 知识库 - 对话 | 0.03 |
| 知识库 - 索引 | 0.004 |
| 文件拆分 | 0.03 |

3
public/docs/shareHint.md Normal file
View File

@@ -0,0 +1,3 @@
你正准备分享对话,请确保分享链接不会滥用,因为它是使用的是你的 API key。
* 分享空白对话:为该模型创建一个空白的聊天分享出去。
* 分享当前对话:会把当前聊天的内容也分享出去,但是要注意不要多个人同时用一个聊天内容。

View File

@@ -0,0 +1,3 @@
## Fast GPT V2.7
* FastGpt Api 允许你将 Fast Gpt 的部分功能通过 api 的形式,将知识库接入到自己的应用中,例如:飞书、企业微信、客服助手.
* 通过 csv 文件导入和导出你的问答对。你可以将你的 csv 文件放置在飞书文档上,以便团队共享。

BIN
public/imgs/wx300.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 53 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 320 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 50 KiB

File diff suppressed because one or more lines are too long

View File

@@ -1,21 +0,0 @@
{"prompt": "什么是高潜公司\n\n###\n\n", "completion": " 高潜公司是指高潜力的公司,其所处的赛道有发展潜力和前景。###"}
{"prompt": "高潜公司的优势是什么\n\n###\n\n", "completion": " 高潜公司所处的整个行业和赛道有一定的增量空间头部vc通过严格的筛选和加持为高潜公司背书高潜公司的创始者一般都特别优秀。加入高潜公司可以获得很多机会来锻炼自己、强化自己的认知快速成长、展现自己并有可能成为公司扩张后的管理层###"}
{"prompt": "高潜公司的风险是什么\n\n###\n\n", "completion": " 高潜公司的高风险问题也很明显。一旦公司失败,很多边缘人就会被淘汰掉。但宏观来讲,只要赛道是有潜力的,即便这家公司失败了,也可以跳槽到成功的公司去。###"}
{"prompt": "高潜公司的例子有哪些\n\n###\n\n", "completion": " 很多高潜公司在做最近比较火的AIGC包括去年的元宇宙前两年比较火的新消费、企业服务和出海之类的。###"}
{"prompt": "为什么现在加入大厂的前景不广阔?\n\n###\n\n", "completion": " 今年的大形势之下互联网大厂纷纷开始裁员、取消普调、晋升比例严重缩水HC也急剧减少。###"}
{"prompt": "加入高潜公司可以获得什么样的机会?\n\n###\n\n", "completion": " 加入高潜公司可以获得很多机会来锻炼自己、强化自己的认知,快速成长、展现自己,并有可能成为公司扩张后的管理层。###"}
{"prompt": "什么是高潜公司?\n\n###\n\n", "completion": " 高潜公司的定义是“高潜力的公司”所处的行业和赛道有发展潜力和前景头部vc为其背书创始者一般都特别优秀。###"}
{"prompt": "加入高潜公司有哪些优点\n\n###\n\n", "completion": " 加入高潜公司能够获得锻炼自己、强化认知的机会,快速成长、展现自己,在公司扩张后成为团队的管理层。同时,所处的行业还处于上升期,发展前景广阔。###"}
{"prompt": "高潜公司存在哪些风险?\n\n###\n\n", "completion": " 高潜公司存在高风险问题,一旦公司失败,很多边缘人就会被淘汰掉。但宏观来讲,只要赛道是有潜力的,即便这家公司失败了,也可以跳槽到成功的公司去。###"}
{"prompt": "高潜公司和互联网有什么相似之处?\n\n###\n\n", "completion": " 目前来说,高潜公司的风格大部分都比较像互联网大厂,有风格,但又没有那么内卷,因为大家每天都在开拓边疆,突破和探索市场的边界。同时,互联网产业的蓬勃发展也给其他行业,包括新能源汽车、新消费等,带来了用户增长和运营的经验###"}
{"prompt": "互联网大厂的制度和分工如何?\n\n###\n\n", "completion": " 目前的互联网大厂的制度和分工非常明确基本是按照一些标准SOP做事。同时由于已经没有用户增量了内卷和向上管理的问题也比较严重。###"}
{"prompt": "高潜公司对于哪些毕业生是很好的选择\n\n###\n\n", "completion": " 对于追求长期创造价值而非想找份安稳的工作的毕业生来说,高潜公司是很好的选择。###"}
{"prompt": "互联网大厂的晋升方式有哪些?\n\n###\n\n", "completion": " 互联网大厂的晋升方式有两种:老板特别喜欢你或者你的战功支撑你的竞争###"}
{"prompt": "为什么目前更多优秀的人都会加入创业公司?\n\n###\n\n", "completion": " 目前社会的大趋势是更多优秀的人都会加入创业公司,因为真正在创造价值的其实永远是创业公司###"}
{"prompt": "为什么选择高潜公司主要是因为我们迎来了什么三个繁荣?\n\n###\n\n", "completion": " 选择高潜公司主要是因为我们迎来了创新生态的三个繁荣:人才繁荣、资本繁荣和环境繁荣###"}
{"prompt": "资本繁荣是如何推动创业生态的崛起的?\n\n###\n\n", "completion": " 资本繁荣无论中国还是美国创业生态的崛起都是伴随着移动互联网的发展。中国这一代VC的崛起主要是通过投资移动互联网项目所积累的战绩扩大了资金池通过这些成本低、增长快、回报率高的项目才有了底气去推动更多行业发展尝试着投资toB和硬科技这类成本高、增长慢、回报率低、风险大的项目。###"}
{"prompt": "环境繁荣是指什么?\n\n###\n\n", "completion": " 环境繁荣是指互联网带动起经济发展后,各地政府也开始了对于创业进行培育,免费场地、巨额无偿补贴、各地送钱竞赛,大幅度降低了创业成本。###"}
{"prompt": "高潜公司对于追求长期创造价值的毕业生来说为什么是好的选择?\n\n###\n\n", "completion": " 因为在高潜公司,毕业生可以通过参与创业公司的发展,获得更多的机会和成长空间,而不是被固定在某个职位上。此外,高潜公司通常会注重团队建设和文化建设,能够提供更好的工作环境和发展机会。###"}
{"prompt": "如果想要寻找高潜公司,有哪些渠道可以利用?\n\n###\n\n", "completion": " 可以利用领英职场app和知潜微信小程序。领英职场app中入驻了很多高潜公司及高管而知潜小程序现在也已经有100+高潜力创业公司入驻都是经过筛选的、top VC投资过且赛道有前景的公司。此外红杉和真格等VC机构自己的公众号也会经常披露投资的企业。###"}
{"prompt": "为什么现在更多优秀的人才会加入创业公司?\n\n###\n\n", "completion": " 因为在当前社会的大趋势下,创业公司成为了创新生态的重要组成部分。创业公司在创造价值和推动行业发展方面具有更大的空间和潜力,而且随着移动互联网的发展,创业公司的崛起也得到了资本和政策的支持。因此,更多优秀的人才选择加入创业公司,以实现自己的创业梦想和获得更好的发展机会。###"}
{"prompt": "高潜公司的优势在哪些方面?\n\n###\n\n", "completion": " 高潜公司在团队建设、文化建设和发展机会等方面具有优势。高潜公司通常会注重团队建设和文化建设,能够提供更好的工作环境和发展机会。同时,高潜公司的发展潜力和创新能力也很强,可以为毕业生提供更多的机会和成长空间。###"}

View File

@@ -1,21 +0,0 @@
{"prompt":"什么是死锁?如何避免死锁?\n\n###\n\n","completion":" 死锁是指两个或多个进程在执行过程中,由于互相等待对方释放资源而造成的一种僵局。避免死锁的常用方法包括:破坏互斥条件、破坏请求和保持条件、破坏不剥夺条件、破坏循环等待条件。###"}
{"prompt":"什么是虚拟内存?\n\n###\n\n","completion":" 虚拟内存是指一种计算机内存管理技术,它使得操作系统可以将进程所使用的虚拟地址空间映射到物理内存中,从而可以让多个进程共享物理内存。当物理内存不足时,操作系统会将一部分不常用的内存页面暂时存储到硬盘上,这样就可以释放出更多的物理内存供其他进程使用。###"}
{"prompt":"什么是分页和分段?有什么区别?\n\n###\n\n","completion":" 分页和分段是虚拟内存管理中常用的两种技术。分页是将虚拟地址空间划分为固定大小的页面,而分段则是将虚拟地址空间划分为不同大小的段。区别在于,分页的页面大小是固定的,而分段的段大小是可变的;另外,分页是为了实现虚拟内存,而分段则是为了实现逻辑地址空间的管理。###"}
{"prompt":"什么是lru算法\n\n###\n\n","completion":" lru算法是一种页面置换算法它的全称是least recently used即最近最少使用算法。lru算法的基本思想是在物理内存中维护一个页面访问历史记录当需要替换一页时选择最近最少被访问的页面进行替换。###"}
{"prompt":"什么是文件系统?\n\n###\n\n","completion":" 文件系统是一种用于管理计算机存储设备上文件和目录的组织方式。文件系统通常包括存储管理、目录管理、文件访问控制、文件系统维护等功能。常见的文件系统包括fat、ntfs、ext等。###"}
{"prompt":"什么是并发和并行?\n\n###\n\n","completion":" 并发是指多个任务交替执行的情况,而并行是指多个任务同时执行的情况。并发的实现方式是通过时间分片技术,而并行的实现方式是通过多核处理器等硬件设备。###"}
{"prompt":"什么是系统调用?\n\n###\n\n","completion":" 系统调用是操作系统中的一种机制,它允许用户空间程序请求操作系统内核提供一些服务,如读写文件、创建进程等。系统调用通常是通过软中断实现的###"}
{"prompt":"什么是进程间通信?\n\n###\n\n","completion":" 进程间通信是指不同进程之间进行数据交换和通信的机制,常见的方式有管道、共享内存、消息队列、信号量等。###"}
{"prompt":"什么是文件描述符?\n\n###\n\n","completion":" 文件描述符是一个非负整数它是操作系统内核为了管理打开的文件而维护的一种抽象概念。应用程序使用文件描述符来访问文件或其他i\/o设备###"}
{"prompt":"什么是中断? \n\n###\n\n","completion":" 中断是指计算机硬件或软件在执行程序时,暂停当前程序的执行,转而去执行另一个程序或处理器所需的其他任务的过程。中断可以是外部中断、内部中断或软中断。###"}
{"prompt":"什么是页表?\n\n###\n\n","completion":" 页表是一种数据结构,它用于将虚拟地址转换为物理地址。在虚拟内存中,每个进程都有自己的页表,它描述了虚拟地址空间中每个页面对应的物理地址。 ###"}
{"prompt":"什么是僵尸进程?\n\n###\n\n","completion":" 僵尸进程是指一个已经完成执行但其父进程还没有对其进行善后处理的进程,它会一直占用系统资源,直到其父进程将其处理为止。###"}
{"prompt":"什么是守护进程? \n\n###\n\n","completion":" 守护进程是一种在后台运行的特殊进程,它通常用于提供某种服务或执行某种系统任务。守护进程通常不与任何用户界面交互,也不会终止,直到操作系统关机或重启。###"}
{"prompt":"计算机操作系统是什么?\n\n###\n\n","completion":" 计算机操作系统是一种控制计算机硬件和协调应用程序运行的系统软件。###"}
{"prompt":"什么是进程?\n\n###\n\n","completion":" 进程是一个正在执行中的程序实例,它包括了程序计数器、堆栈、数据区等信息。###"}
{"prompt":"什么是线程?\n\n###\n\n","completion":" 线程是操作系统能够进行运算调度的最小单位,它是进程的一个执行流程,共享进程的地址空间。###"}
{"prompt":"什么是虚拟内存?\n\n###\n\n","completion":" 虚拟内存是一种计算机系统内存管理技术,它使得应用程序认为它拥有连续的可用的内存,而实际上它通常是被分成多个物理内存碎片。###"}
{"prompt":"什么是死锁?\n\n###\n\n","completion":" 死锁是指两个或多个进程在执行过程中,因争夺资源而造成的一种互相等待的现象,导致所有进程都无法继续执行。###"}
{"prompt":"什么是缓存?\n\n###\n\n","completion":" 缓存是指在计算机中暂存数据的高速存储器,它可以提高计算机对数据的访问速度,避免频繁地访问较慢的主存储器。###"}
{"prompt":"什么是文件系统?\n\n###\n\n","completion":" 文件系统是计算机中用来管理和组织文件的一种机制,它通过一系列的数据结构来描述文件和目录的组织方式,以及文件如何存储和访问。###"}
{"prompt":"什么是调度算法?\n\n###\n\n","completion":" 调度算法是指操作系统中用来决定进程或线程在cpu上执行顺序的一种算法它的目标是最大化系统吞吐量、最小化响应时间或最大化资源利用率等。###"}

View File

@@ -5,8 +5,7 @@ import type { InitChatResponse } from './response/chat';
/**
* 获取一个聊天框的ID
*/
export const getChatSiteId = (modelId: string, isShare = false) =>
GET<string>(`/chat/generate?modelId=${modelId}&isShare=${isShare ? 'true' : 'false'}`);
export const getChatSiteId = (modelId: string) => GET<string>(`/chat/generate?modelId=${modelId}`);
/**
* 获取初始化聊天内容
@@ -39,6 +38,7 @@ export const postSaveChat = (data: { chatId: string; prompts: ChatItemType[] })
POST('/chat/saveChat', data);
/**
* 删除最后一句
* 删除一句对话
*/
export const delLastMessage = (chatId: string) => DELETE(`/chat/delLastMessage?chatId=${chatId}`);
export const delChatRecordByIndex = (chatId: string, index: number) =>
DELETE(`/chat/delChatRecordByIndex?chatId=${chatId}&index=${index}`);

1
src/api/common.ts Normal file
View File

@@ -0,0 +1 @@
import { GET, POST, DELETE } from './request';

View File

@@ -3,12 +3,12 @@ import { RequestPaging } from '../types/index';
import { Obj2Query } from '@/utils/tools';
import type { DataListItem } from '@/types/data';
import type { PagingData } from '../types/index';
import { DataItemSchema } from '@/types/mongoSchema';
import type { DataItemSchema } from '@/types/mongoSchema';
import type { CreateDataProps } from '@/pages/data/components/CreateDataModal';
export const getDataList = (data: RequestPaging) =>
GET<PagingData<DataListItem>>(`/data/getDataList?${Obj2Query(data)}`);
export const getDataList = () => GET<DataListItem[]>(`/data/getDataList`);
export const postData = (name: string) => POST<string>(`/data/postData?name=${name}`);
export const postData = (data: CreateDataProps) => POST<string>(`/data/postData`, data);
export const postSplitData = (dataId: string, text: string) =>
POST(`/data/splitData`, { dataId, text });

View File

@@ -1,28 +1,105 @@
import { GET, POST, DELETE, PUT } from './request';
import type { ModelSchema } from '@/types/mongoSchema';
import type { ModelSchema, ModelDataSchema, ModelSplitDataSchema } from '@/types/mongoSchema';
import { ModelUpdateParams } from '@/types/model';
import { TrainingItemType } from '../types/training';
import { RequestPaging } from '../types/index';
import { Obj2Query } from '@/utils/tools';
/**
* 获取模型列表
*/
export const getMyModels = () => GET<ModelSchema[]>('/model/list');
/**
* 创建一个模型
*/
export const postCreateModel = (data: { name: string; serviceModelName: string }) =>
POST<ModelSchema>('/model/create', data);
/**
* 根据 ID 删除模型
*/
export const delModelById = (id: string) => DELETE(`/model/del?modelId=${id}`);
/**
* 根据 ID 获取模型
*/
export const getModelById = (id: string) => GET<ModelSchema>(`/model/detail?modelId=${id}`);
/**
* 根据 ID 更新模型
*/
export const putModelById = (id: string, data: ModelUpdateParams) =>
PUT(`/model/update?modelId=${id}`, data);
export const postTrainModel = (id: string, form: FormData) =>
POST(`/model/train?modelId=${id}`, form, {
POST(`/model/train/train?modelId=${id}`, form, {
headers: {
'content-type': 'multipart/form-data'
}
});
export const putModelTrainingStatus = (id: string) => PUT(`/model/putTrainStatus?modelId=${id}`);
export const putModelTrainingStatus = (id: string) =>
PUT(`/model/train/putTrainStatus?modelId=${id}`);
export const getModelTrainings = (id: string) =>
GET<TrainingItemType[]>(`/model/getTrainings?modelId=${id}`);
GET<TrainingItemType[]>(`/model/train/getTrainings?modelId=${id}`);
/* 模型 data */
type GetModelDataListProps = RequestPaging & {
modelId: string;
};
/**
* 获取模型的知识库数据
*/
export const getModelDataList = (props: GetModelDataListProps) =>
GET(`/model/data/getModelData?${Obj2Query(props)}`);
/**
* 获取导出数据(不分页)
*/
export const getExportDataList = (modelId: string) =>
GET<[string, string][]>(`/model/data/exportModelData?modelId=${modelId}`);
/**
* 获取模型正在拆分数据的数量
*/
export const getModelSplitDataListLen = (modelId: string) =>
GET<number>(`/model/data/getSplitData?modelId=${modelId}`);
/**
* 获取 web 页面内容
*/
export const getWebContent = (url: string) => POST<string>(`/model/data/fetchingUrlData`, { url });
/**
* 手动输入数据
*/
export const postModelDataInput = (data: {
modelId: string;
data: { text: ModelDataSchema['text']; q: ModelDataSchema['q'] }[];
}) => POST<number>(`/model/data/pushModelDataInput`, data);
/**
* 拆分数据
*/
export const postModelDataSplitData = (data: { modelId: string; text: string; prompt: string }) =>
POST(`/model/data/splitData`, data);
/**
* json导入数据
*/
export const postModelDataCsvData = (modelId: string, data: string[][]) =>
POST<number>(`/model/data/pushModelDataCsv`, { modelId, data: data });
/**
* 更新模型数据
*/
export const putModelDataById = (data: { dataId: string; text: string; q?: string }) =>
PUT('/model/data/putModelData', data);
/**
* 删除一条模型数据
*/
export const delOneModelData = (dataId: string) =>
DELETE(`/model/data/delModelDataById?dataId=${dataId}`);

16
src/api/openapi.ts Normal file
View File

@@ -0,0 +1,16 @@
import { GET, POST, DELETE } from './request';
import { UserOpenApiKey } from '@/types/openapi';
/**
* crete a api key
*/
export const createAOpenApiKey = () => POST<string>('/openapi/postKey');
/**
* get api keys
*/
export const getOpenApiKeys = () => GET<UserOpenApiKey[]>('/openapi/getKeys');
/**
* delete api by id
*/
export const delOpenApiById = (id: string) => DELETE(`/openapi/delKey?id=${id}`);

View File

@@ -7,8 +7,7 @@ export type InitChatResponse = {
name: string;
avatar: string;
intro: string;
secret: ModelSchema.secret;
chatModel: ModelSchema.service.ChatModel; // 模型
chatModel: ModelSchema.service.chatModel; // 对话模型名
modelName: ModelSchema.service.modelName; // 底层模型
history: ChatItemType[];
isExpiredTime: boolean;
};

View File

@@ -0,0 +1 @@
<?xml version="1.0" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1680878351566" class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="1173" xmlns:xlink="http://www.w3.org/1999/xlink" width="48" height="48"><path d="M896 771.413333h-768c-51.2 0-93.866667-42.666667-93.866667-93.866666V209.92c0-51.2 42.666667-93.866667 93.866667-93.866667h768c51.2 0 93.866667 42.666667 93.866667 93.866667v465.92c0 52.906667-42.666667 95.573333-93.866667 95.573333zM128 167.253333C104.106667 167.253333 85.333333 186.026667 85.333333 209.92v465.92c0 23.893333 18.773333 42.666667 42.666667 42.666667h768c23.893333 0 42.666667-18.773333 42.666667-42.666667V209.92c0-23.893333-18.773333-42.666667-42.666667-42.666667h-768z" p-id="1174"></path><path d="M512 907.946667c-13.653333 0-25.6-11.946667-25.6-25.6v-136.533334c0-13.653333 11.946667-25.6 25.6-25.6s25.6 11.946667 25.6 25.6v136.533334c0 13.653333-11.946667 25.6-25.6 25.6z" p-id="1175"></path><path d="M680.96 907.946667H343.04c-13.653333 0-25.6-11.946667-25.6-25.6s11.946667-25.6 25.6-25.6h337.92c13.653333 0 25.6 11.946667 25.6 25.6s-11.946667 25.6-25.6 25.6zM776.533333 648.533333h-529.066666c-13.653333 0-25.6-11.946667-25.6-25.6s11.946667-25.6 25.6-25.6h530.773333c13.653333 0 25.6 11.946667 25.6 25.6s-11.946667 25.6-27.306667 25.6z" p-id="1176"></path></svg>

After

Width:  |  Height:  |  Size: 1.4 KiB

View File

@@ -0,0 +1 @@
<?xml version="1.0" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1679805359001" class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="1328" xmlns:xlink="http://www.w3.org/1999/xlink" width="48" height="48"><path d="M416.583186 1022.194004c-5.417989 0-10.835979-1.203998-16.253968-3.611993-15.049971-6.621987-24.681952-21.069959-24.681952-37.323927l0-299.795414c0-12.641975 5.417989-24.079953 15.651969-31.905938 9.631981-7.825985 22.273956-10.23398 34.915932-7.825985l417.787184 99.931805 84.279835-599.590829c1.203998-9.631981-8.427984-16.253968-16.855967-11.437978L147.489712 573.102881l139.061728 35.517931c19.865961 4.815991 34.313933 22.875955 32.507937 43.343915-2.407995 25.885949-26.487948 42.139918-50.567901 36.119929L30.70194 627.282775c-16.253968-4.213992-27.691946-17.457966-30.099941-33.711934-2.407995-16.253968 5.417989-32.507937 19.865961-40.93592L962.59612 6.621987c13.243974-7.825985 30.099941-7.223986 43.343915 1.203998 12.641975 8.427984 19.865961 24.079953 17.457966 39.129924l-105.349794 750.090535c-1.805996 11.437978-7.825985 21.671958-17.457966 28.293945-9.631981 6.621987-21.069959 8.427984-32.507937 6.019988l-411.165197-98.125808 0 154.111699 81.87184-76.453851c15.049971-13.845973 37.925926-16.855967 54.179894-4.213992 20.46796 15.651969 21.069959 45.149912 3.009994 62.005879L444.275132 1011.358025C436.449148 1018.582011 426.817166 1022.194004 416.583186 1022.194004L416.583186 1022.194004z" p-id="1329"></path><path d="M416.583186 722.398589c-9.631981 0-19.263962-3.611993-27.089947-10.23398-16.855967-15.049971-18.059965-40.93592-3.009994-57.791887l216.117578-242.003527c15.049971-16.855967 40.93592-18.059965 57.791887-3.009994 16.855967 15.049971 18.059965 40.93592 3.009994 57.791887l-216.117578 242.003527C438.857143 718.184597 427.419165 722.398589 416.583186 722.398589L416.583186 722.398589z" p-id="1330"></path></svg>

After

Width:  |  Height:  |  Size: 1.9 KiB

View File

Before

Width:  |  Height:  |  Size: 2.9 KiB

After

Width:  |  Height:  |  Size: 2.9 KiB

View File

@@ -0,0 +1 @@
<?xml version="1.0" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1679805221456" class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="1173" xmlns:xlink="http://www.w3.org/1999/xlink" width="48" height="48"><path d="M267.3 834.6h-96.5c-27.4 0-49.7-22.3-49.7-49.7V115.2c0-27.4 22.3-49.7 49.7-49.7H727c27.4 0 49.7 22.3 49.7 49.7v96.5h-42.6v-96.5c0-3.9-3.2-7.1-7.1-7.1H170.8c-3.9 0-7.1 3.2-7.1 7.1v669.7c0 3.9 3.2 7.1 7.1 7.1h96.5v42.6z" p-id="1174"></path><path d="M851.9 959.5H295.7c-27.4 0-49.7-22.3-49.7-49.7V240.1c0-27.4 22.3-49.7 49.7-49.7h556.2c27.4 0 49.7 22.3 49.7 49.7v669.7c-0.1 27.4-22.3 49.7-49.7 49.7zM295.7 233c-3.9 0-7.1 3.2-7.1 7.1v669.7c0 3.9 3.2 7.1 7.1 7.1h556.2c3.9 0 7.1-3.2 7.1-7.1V240.1c0-3.9-3.2-7.1-7.1-7.1H295.7z" p-id="1175"></path></svg>

After

Width:  |  Height:  |  Size: 878 B

View File

@@ -0,0 +1 @@
<?xml version="1.0" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1680878410563" class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="2745" xmlns:xlink="http://www.w3.org/1999/xlink" width="48" height="48"><path d="M256 512l81.6 108.8a32 32 0 0 1-51.2 38.4l-96-128a31.968 31.968 0 0 1 0-38.4l96-128a32 32 0 0 1 51.2 38.4L256 512zM670.4 620.8a32 32 0 0 0 51.2 38.4l96-128a31.968 31.968 0 0 0 0-38.4l-96-128a32 32 0 0 0-51.2 38.4L752 512l-81.6 108.8zM503.232 646.944a32 32 0 1 1-62.464-13.888l64-288a32 32 0 1 1 62.464 13.888l-64 288z" p-id="2746"></path><path d="M160 144a32 32 0 0 0-32 32V864a32 32 0 0 0 32 32h688a32 32 0 0 0 32-32V176a32 32 0 0 0-32-32H160z m0-64h688a96 96 0 0 1 96 96V864a96 96 0 0 1-96 96H160a96 96 0 0 1-96-96V176a96 96 0 0 1 96-96z" p-id="2747"></path></svg>

After

Width:  |  Height:  |  Size: 897 B

View File

@@ -0,0 +1 @@
<?xml version="1.0" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1680878383832" class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="1637" xmlns:xlink="http://www.w3.org/1999/xlink" width="48" height="48"><path d="M511.333 63.333c-247.424 0-448 200.576-448 448s200.576 448 448 448 448-200.576 448-448-200.576-448-448-448z m0 832c-51.868 0-102.15-10.144-149.451-30.15-36.011-15.231-69.123-35.67-98.812-60.897 12.177-31.985 42.226-63.875 84.223-88.903C396.189 686.243 456.222 669.53 512 669.53c55.631 0 115.416 16.658 164.026 45.703 41.762 24.953 71.689 56.812 83.863 88.804-29.764 25.342-62.976 45.865-99.106 61.146-47.299 20.006-97.582 30.15-149.45 30.15z m296.268-139.658c-20.493-35.937-54.353-68.855-98.747-95.381C649.75 624.979 579.839 605.53 512 605.53c-67.964 0-138.094 19.488-197.471 54.875-44.644 26.606-78.656 59.594-99.195 95.586-23.835-28.755-43.234-60.652-57.85-95.208-20.006-47.3-30.15-97.583-30.15-149.451s10.144-102.15 30.15-149.451c19.337-45.719 47.034-86.792 82.321-122.078 35.286-35.287 76.359-62.983 122.078-82.321 47.3-20.006 97.583-30.15 149.451-30.15 51.868 0 102.15 10.144 149.451 30.15 45.719 19.337 86.792 47.034 122.078 82.321 35.287 35.286 62.983 76.359 82.321 122.078 20.006 47.3 30.15 97.583 30.15 149.451s-10.144 102.15-30.15 149.451c-14.563 34.429-33.869 66.22-57.583 94.892z" p-id="1638"></path><path d="M512 220.223c-88.224 0-160 71.776-160 160s71.776 160 160 160c88.225 0 160-71.775 160-160s-71.775-160-160-160z m0 256c-52.935 0-96-43.065-96-96s43.065-96 96-96 96 43.065 96 96-43.065 96-96 96z" p-id="1639"></path></svg>

After

Width:  |  Height:  |  Size: 1.6 KiB

View File

@@ -1,14 +1,19 @@
import React from 'react';
import type { IconProps } from '@chakra-ui/react';
import { Icon } from '@chakra-ui/react';
import dynamic from 'next/dynamic';
const map = {
model: require('./icons/model.svg').default,
share: require('./icons/share.svg').default,
home: require('./icons/home.svg').default,
menu: require('./icons/menu.svg').default,
pay: require('./icons/pay.svg').default
pay: require('./icons/pay.svg').default,
copy: require('./icons/copy.svg').default,
chatSend: require('./icons/chatSend.svg').default,
board: require('./icons/board.svg').default,
develop: require('./icons/develop.svg').default,
user: require('./icons/user.svg').default,
chatting: require('./icons/chatting.svg').default
};
export type IconName = keyof typeof map;

View File

@@ -7,8 +7,7 @@ import { useQuery } from '@tanstack/react-query';
const unAuthPage: { [key: string]: boolean } = {
'/': true,
'/login': true,
'/chat': true
'/login': true
};
const Auth = ({ children }: { children: JSX.Element }) => {

View File

@@ -1,5 +1,5 @@
import React, { useEffect } from 'react';
import { Box, useColorMode } from '@chakra-ui/react';
import { Box, useColorMode, Flex } from '@chakra-ui/react';
import Navbar from './navbar';
import NavbarPhone from './navbarPhone';
import { useRouter } from 'next/router';
@@ -16,27 +16,27 @@ const unShowLayoutRoute: { [key: string]: boolean } = {
const navbarList = [
{
label: '介绍',
icon: 'icon-gongzuotai-01',
icon: 'board',
link: '/',
activeLink: ['/']
},
{
label: '模型',
icon: 'icon-moxing',
icon: 'model',
link: '/model/list',
activeLink: ['/model/list', '/model/detail']
},
{
label: '数据',
icon: 'icon-datafull',
link: '/data/list',
activeLink: ['/data/list', '/data/detail']
},
{
label: '账号',
icon: 'icon-yonghu-yuan',
icon: 'user',
link: '/number/setting',
activeLink: ['/number/setting']
},
{
label: '开发',
icon: 'develop',
link: '/openapi',
activeLink: ['/openapi']
}
];
@@ -62,29 +62,21 @@ const Layout = ({ children }: { children: JSX.Element }) => {
<Box h={'100%'} position={'fixed'} left={0} top={0} w={'80px'}>
<Navbar navbarList={navbarList} />
</Box>
<Box ml={'80px'} h={'100%'}>
<Box maxW={'1100px'} m={'auto'} h={'100%'} p={7} overflowY={'auto'}>
<Box h={'100%'} ml={'80px'}>
<Box h={'100%'} py={7} px={'5vw'} m={'auto'} overflowY={'auto'}>
<Auth>{children}</Auth>
</Box>
</Box>
</>
) : (
<Box pt={'60px'}>
<Box
h={'60px'}
position={'fixed'}
top={0}
left={0}
right={0}
zIndex={100}
borderBottom={'1px solid rgba(0,0,0,0.1)'}
>
<Flex h={'100%'} flexDirection={'column'}>
<Box h={'60px'} borderBottom={'1px solid rgba(0,0,0,0.1)'}>
<NavbarPhone navbarList={navbarList} />
</Box>
<Box py={3} px={4}>
<Box flex={'1 0 0'} h={0} py={3} px={4} overflowY={'auto'}>
<Auth>{children}</Auth>
</Box>
</Box>
</Flex>
)}
</Box>
) : (

View File

@@ -2,8 +2,7 @@ import React from 'react';
import { Box, Flex } from '@chakra-ui/react';
import Image from 'next/image';
import { useRouter } from 'next/router';
import Icon from '../Iconfont';
import MyIcon from '../Icon';
export enum NavbarTypeEnum {
normal = 'normal',
small = 'small'
@@ -66,20 +65,16 @@ const Navbar = ({
backgroundColor: 'transparent'
})}
>
<Icon
name={item.icon}
width={24}
height={24}
color={item.activeLink.includes(router.pathname) ? '#2B6CB0' : '#4A5568'}
<MyIcon
name={item.icon as any}
width={'24px'}
height={'24px'}
fill={item.activeLink.includes(router.pathname) ? '#2B6CB0' : '#4A5568'}
/>
<Box mt={1}>{item.label}</Box>
</Flex>
))}
</Box>
{/* 通知 icon */}
{/* <Flex className={styles.informIcon} mb={5} justifyContent={'center'}>
<Icon name={'icon-tongzhi'} width={28} height={28} color={'#718096'}></Icon>
</Flex> */}
</Flex>
);
};

View File

@@ -1,6 +1,6 @@
import React from 'react';
import { useRouter } from 'next/router';
import Icon from '../Iconfont';
import MyIcon from '../Icon';
import {
Flex,
Drawer,
@@ -39,9 +39,8 @@ const NavbarPhone = ({
px={7}
>
<Box onClick={onOpen}>
<Icon name="icon-caidan" width={20} height={20}></Icon>
<MyIcon name="menu" width={'20px'} height={'20px'} color={'blackAlpha.600'}></MyIcon>
</Box>
{/* <Icon name="icon-tongzhi" width={20} height={20}></Icon> */}
</Flex>
<Drawer isOpen={isOpen} placement="left" size={'xs'} onClose={onClose}>
<DrawerOverlay />
@@ -74,11 +73,11 @@ const NavbarPhone = ({
backgroundColor: 'transparent'
})}
>
<Icon
name={item.icon}
width={24}
height={24}
color={item.activeLink.includes(router.pathname) ? '#2B6CB0' : '#4A5568'}
<MyIcon
name={item.icon as any}
width={'24px'}
height={'24px'}
fill={item.activeLink.includes(router.pathname) ? '#2B6CB0' : '#4A5568'}
/>
<Box ml={5}>{item.label}</Box>
</Flex>

View File

@@ -3,7 +3,7 @@ import ReactMarkdown from 'react-markdown';
import { Prism as SyntaxHighlighter } from 'react-syntax-highlighter';
import { Box, Flex, useColorModeValue } from '@chakra-ui/react';
import { useCopyData } from '@/utils/tools';
import Icon from '@/components/Iconfont';
import Icon from '@/components/Icon';
import remarkGfm from 'remark-gfm';
import remarkMath from 'remark-math';
import rehypeKatex from 'rehype-katex';
@@ -41,7 +41,7 @@ const Markdown = ({ source, isChatting = false }: { source: string; isChatting?:
>
<Box flex={1}>{match?.[1]}</Box>
<Flex cursor={'pointer'} onClick={() => copyData(code)} alignItems={'center'}>
<Icon name={'icon-fuzhi'} width={15} height={15} color={'#fff'}></Icon>
<Icon name={'copy'} width={15} height={15} fill={'#fff'}></Icon>
<Box ml={1}></Box>
</Flex>
</Flex>

View File

@@ -53,7 +53,7 @@ const ScrollData = ({
}, [elementRef, nextPage]);
return (
<Box {...props} ref={elementRef} overflow={'auto'} position={'relative'}>
<Box {...props} ref={elementRef} overflowY={'auto'} position={'relative'}>
{children}
<Box
mt={2}

View File

@@ -23,15 +23,15 @@ const WxConcat = ({ onClose }: { onClose: () => void }) => {
<ModalBody textAlign={'center'}>
<Image
style={{ margin: 'auto' }}
src={'/imgs/wxcode.jpg'}
src={'/imgs/wx300.jpg'}
width={200}
height={200}
alt=""
/>
<Box mt={2}>
:{' '}
:
<Box as={'span'} userSelect={'all'}>
YNyiqi
fastgpt123
</Box>
</Box>
</ModalBody>

View File

@@ -4,63 +4,3 @@ export enum EmailTypeEnum {
}
export const PRICE_SCALE = 100000;
export const introPage = `
## 欢迎使用 Fast GPT
[Git 仓库](https://github.com/c121914yu/FastGPT)
### 快速开始
1. 使用邮箱注册账号。
2. 进入账号页面,添加关联账号,目前只有 openai 的账号可以添加,直接去 openai 官网,把 API Key 粘贴过来。
3. 如果填写了自己的 openai 账号,使用时会直接用你的账号。如果没有填写,需要付费使用平台的账号。
4. 进入模型页,创建一个模型,建议直接用 ChatGPT。
5. 在模型列表点击【对话】,即可使用 API 进行聊天。
### 模型配置
1. **提示语**:会在每个对话框的第一句自动加入,用于限定该模型的对话内容。
2. **单句最大长度**:每个聊天,单次输入内容的最大长度。
3. **上下文最大长度**每个聊天最多的轮数除以2建议设置为偶数。可以持续聊天但是旧的聊天内容会被截断AI 就不会知道被截取的内容。
例如上下文最大长度为6。在第 4 轮对话时,第一轮对话的内容不会被计入。
4. **过期时间**:生成对话框后,这个对话框多久过期。
5. **聊天最大加载次数**:单个对话框最多被加载几次,设置为-1代表不限制正数代表只能加载 n 次,防止被盗刷。
### 对话框介绍
1. 每个对话框以 chatId 作为标识。
2. 每次点击【对话】,都会生成新的对话框,无法回到旧的对话框。对话框内刷新,会恢复对话内容。
3. 直接分享对话框(网页)的链接给朋友,会共享同一个对话内容。但是!!!千万不要两个人同时用一个链接,会串味,还没解决这个问题。
4. 如果想分享一个纯的对话框,请点击侧边栏的分享按键。例如:
### 其他问题
还有其他问题,可以加我 wx: YNyiqi拉个交流群大家一起聊聊。
`;
export const chatProblem = `
**代理出错**
服务器代理不稳定,可以过一会儿再尝试。
**API key 问题**
请把 openai 的 API key 粘贴到账号里再创建对话。如果是使用分享的对话,不需要填写 API key。
`;
export const versionIntro = `
* 分享对话:使用的是分享者的 Api Key 生成一个对话窗口进行分享。
* 分享空白对话:为该模型创建一个空白的聊天分享出去。
* 分享当前对话:会把当前聊天的内容也分享出去,但是要注意不要多个人同时用一个聊天内容。
* 增加模型介绍:可以在模型编辑页添加对模型的介绍,方便提示模型的范围。
* 温度调整:可以在模型编辑页调整模型温度,以便适应不同类型的对话。例如,翻译类的模型可以把温度拉低;创作类的模型可以把温度拉高。
`;
export const shareHint = `
你正准备分享对话,请确保分享链接不会滥用,因为它是使用的是你的 API key。
* 分享空白对话:为该模型创建一个空白的聊天分享出去。
* 分享当前对话:会把当前聊天的内容也分享出去,但是要注意不要多个人同时用一个聊天内容。
`;

6
src/constants/data.ts Normal file
View File

@@ -0,0 +1,6 @@
import type { DataType } from '@/types/data';
export const DataTypeTextMap: Record<DataType, string> = {
QA: '问答拆分',
abstract: '摘要总结'
};

View File

@@ -1,17 +1,27 @@
import type { ServiceName } from '@/types/mongoSchema';
import { ModelSchema } from '../types/mongoSchema';
import type { ServiceName, ModelDataType, ModelSchema } from '@/types/mongoSchema';
import type { RedisModelDataItemType } from '@/types/redis';
export enum ChatModelNameEnum {
GPT35 = 'gpt-3.5-turbo',
GPT3 = 'text-davinci-003'
VECTOR_GPT = 'VECTOR_GPT',
GPT3 = 'text-davinci-003',
VECTOR = 'text-embedding-ada-002'
}
export const ChatModelNameMap = {
[ChatModelNameEnum.GPT35]: 'gpt-3.5-turbo',
[ChatModelNameEnum.VECTOR_GPT]: 'gpt-3.5-turbo',
[ChatModelNameEnum.GPT3]: 'text-davinci-003',
[ChatModelNameEnum.VECTOR]: 'text-embedding-ada-002'
};
export type ModelConstantsData = {
serviceCompany: `${ServiceName}`;
name: string;
model: `${ChatModelNameEnum}`;
trainName: string; // 空字符串代表不能训练
maxToken: number;
contextMaxToken: number;
maxTemperature: number;
price: number; // 多少钱 / 1token单位: 0.00001元
};
@@ -23,18 +33,30 @@ export const modelList: ModelConstantsData[] = [
model: ChatModelNameEnum.GPT35,
trainName: '',
maxToken: 4000,
contextMaxToken: 7500,
maxTemperature: 2,
price: 3
},
{
serviceCompany: 'openai',
name: 'GPT3',
model: ChatModelNameEnum.GPT3,
trainName: 'davinci',
name: '知识库',
model: ChatModelNameEnum.VECTOR_GPT,
trainName: 'vector',
maxToken: 4000,
maxTemperature: 2,
price: 30
contextMaxToken: 7000,
maxTemperature: 1,
price: 3
}
// {
// serviceCompany: 'openai',
// name: 'GPT3',
// model: ChatModelNameEnum.GPT3,
// trainName: 'davinci',
// maxToken: 4000,
// contextMaxToken: 7500,
// maxTemperature: 2,
// price: 30
// }
];
export enum TrainingStatusEnum {
@@ -70,6 +92,11 @@ export const formatModelStatus = {
}
};
export const ModelDataStatusMap: Record<RedisModelDataItemType['status'], string> = {
ready: '训练完成',
waiting: '训练中'
};
export const defaultModel: ModelSchema = {
_id: '',
userId: '',

6
src/constants/redis.ts Normal file
View File

@@ -0,0 +1,6 @@
export const VecModelDataPrefix = 'model:data';
export const VecModelDataIdx = `idx:${VecModelDataPrefix}:hash`;
export enum ModelDataStatusEnum {
ready = 'ready',
waiting = 'waiting'
}

View File

@@ -1,6 +1,9 @@
export enum BillTypeEnum {
chat = 'chat',
splitData = 'splitData',
QA = 'QA',
abstract = 'abstract',
vector = 'vector',
return = 'return'
}
export enum PageTypeEnum {
@@ -11,6 +14,9 @@ export enum PageTypeEnum {
export const BillTypeMap: Record<`${BillTypeEnum}`, string> = {
[BillTypeEnum.chat]: '对话',
[BillTypeEnum.splitData]: '文本拆分',
[BillTypeEnum.splitData]: 'QA拆分',
[BillTypeEnum.QA]: 'QA拆分',
[BillTypeEnum.abstract]: '摘要总结',
[BillTypeEnum.vector]: '索引生成',
[BillTypeEnum.return]: '退款'
};

15
src/hooks/useMarkdown.ts Normal file
View File

@@ -0,0 +1,15 @@
import { useQuery } from '@tanstack/react-query';
export const getMd = async (url: string) => {
const response = await fetch(`/docs/${url}`);
const textContent = await response.text();
return textContent;
};
export const useMarkdown = ({ url }: { url: string }) => {
const { data = '' } = useQuery([url], () => getMd(url));
return {
data
};
};

105
src/hooks/usePagination.tsx Normal file
View File

@@ -0,0 +1,105 @@
import { useState, useCallback, useMemo, useEffect } from 'react';
import type { PagingData } from '../types/index';
import { IconButton, Flex, Box, Input } from '@chakra-ui/react';
import { ArrowBackIcon, ArrowForwardIcon } from '@chakra-ui/icons';
import { useMutation } from '@tanstack/react-query';
import { useToast } from './useToast';
export const usePagination = <T = any,>({
api,
pageSize = 10,
params = {}
}: {
api: (data: any) => any;
pageSize?: number;
params?: Record<string, any>;
}) => {
const { toast } = useToast();
const [pageNum, setPageNum] = useState(1);
const [total, setTotal] = useState(0);
const [data, setData] = useState<T[]>([]);
const maxPage = useMemo(() => Math.ceil(total / pageSize), [pageSize, total]);
const { mutate, isLoading } = useMutation({
mutationFn: async (num: number = pageNum) => {
try {
const res: PagingData<T> = await api({
pageNum: num,
pageSize,
...params
});
setPageNum(num);
setTotal(res.total);
setData(res.data);
} catch (error: any) {
toast({
title: error?.message || '获取数据异常',
status: 'error'
});
console.log(error);
}
}
});
useEffect(() => {
mutate(1);
}, []);
const Pagination = useCallback(() => {
return (
<Flex alignItems={'center'} justifyContent={'end'}>
<IconButton
isDisabled={pageNum === 1}
icon={<ArrowBackIcon />}
aria-label={'left'}
size={'sm'}
w={'28px'}
h={'28px'}
onClick={() => mutate(pageNum - 1)}
/>
<Flex mx={2} alignItems={'center'}>
<Input
defaultValue={pageNum}
w={'50px'}
size={'xs'}
type={'number'}
min={1}
max={maxPage}
onBlur={(e) => {
const val = +e.target.value;
if (val === pageNum) return;
if (val >= maxPage) {
mutate(maxPage);
} else if (val < 1) {
mutate(1);
} else {
mutate(+e.target.value);
}
}}
/>
<Box mx={2}>/</Box>
{maxPage}
</Flex>
<IconButton
isDisabled={pageNum === maxPage}
icon={<ArrowForwardIcon />}
aria-label={'left'}
size={'sm'}
w={'28px'}
h={'28px'}
onClick={() => mutate(pageNum + 1)}
/>
</Flex>
);
}, [maxPage, mutate, pageNum]);
return {
pageNum,
pageSize,
total,
data,
isLoading,
Pagination,
getData: mutate
};
};

View File

@@ -1,6 +1,5 @@
import { useState, useCallback } from 'react';
import { useState, useCallback, useEffect } from 'react';
import type { PagingData } from '../types/index';
import { useQuery } from '@tanstack/react-query';
import { useToast } from './useToast';
export const usePaging = <T = any>({
@@ -8,7 +7,7 @@ export const usePaging = <T = any>({
pageSize = 10,
params = {}
}: {
api: (data: any) => Promise<PagingData<T>>;
api: (data: any) => any;
pageSize?: number;
params?: Record<string, any>;
}) => {
@@ -30,7 +29,7 @@ export const usePaging = <T = any>({
setRequesting(true);
try {
const res = await api({
const res: PagingData<T> = await api({
pageNum: num,
pageSize,
...params
@@ -59,9 +58,14 @@ export const usePaging = <T = any>({
[api, isLoadAll, pageSize, params, requesting, toast]
);
const nextPage = useCallback(() => getData(pageNum + 1), [getData, pageNum]);
const nextPage = useCallback(() => {
if (requesting || isLoadAll) return;
getData(pageNum + 1);
}, [getData, isLoadAll, pageNum, requesting]);
useQuery(['init'], () => getData(1, true));
useEffect(() => {
getData(1, true);
}, []);
return {
pageNum,
@@ -72,6 +76,7 @@ export const usePaging = <T = any>({
requesting,
isLoadAll,
nextPage,
initRequesting
initRequesting,
setData
};
};

View File

@@ -1,4 +1,5 @@
import type { AppProps, NextWebVitalsMetric } from 'next/app';
import { useEffect } from 'react';
import type { AppProps } from 'next/app';
import Script from 'next/script';
import Head from 'next/head';
import { ChakraProvider, ColorModeScript } from '@chakra-ui/react';
@@ -9,6 +10,7 @@ import NProgress from 'nprogress'; //nprogress module
import Router from 'next/router';
import 'nprogress/nprogress.css';
import '../styles/reset.scss';
import { useToast } from '@/hooks/useToast';
//Binding events.
Router.events.on('routeChangeStart', () => NProgress.start());
@@ -27,6 +29,17 @@ const queryClient = new QueryClient({
});
export default function App({ Component, pageProps }: AppProps) {
const { toast } = useToast();
// 校验是否支持 click 事件
useEffect(() => {
if (typeof document.createElement('div').click !== 'function') {
toast({
title: '你的浏览器版本过低',
status: 'warning'
});
}
}, [toast]);
return (
<>
<Head>
@@ -34,11 +47,10 @@ export default function App({ Component, pageProps }: AppProps) {
<meta name="description" content="Generated by Fast GPT" />
<meta
name="viewport"
content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=0;"
content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=0"
/>
<link rel="icon" href="/favicon.ico" />
</Head>
<Script src="/js/iconfont.js" strategy="afterInteractive"></Script>
<Script src="/js/qrcode.min.js" strategy="afterInteractive"></Script>
<Script src="/js/pdf.js" strategy="afterInteractive"></Script>
<QueryClientProvider client={queryClient}>

View File

@@ -1,8 +1,7 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { createParser, ParsedEvent, ReconnectInterval } from 'eventsource-parser';
import { connectToDatabase } from '@/service/mongo';
import { getOpenAIApi, authChat } from '@/service/utils/chat';
import { httpsAgent } from '@/service/utils/tools';
import { httpsAgent, openaiChatFilter } from '@/service/utils/tools';
import { ChatCompletionRequestMessage, ChatCompletionRequestMessageRoleEnum } from 'openai';
import { ChatItemType } from '@/types/chat';
import { jsonRes } from '@/service/response';
@@ -10,6 +9,7 @@ import type { ModelSchema } from '@/types/mongoSchema';
import { PassThrough } from 'stream';
import { modelList } from '@/constants/model';
import { pushChatBill } from '@/service/events/pushBill';
import { gpt35StreamResponse } from '@/service/utils/openai';
/* 发送提示词 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
@@ -32,26 +32,38 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
prompt: ChatItemType;
chatId: string;
};
const { authorization } = req.headers;
if (!chatId || !prompt) {
throw new Error('缺少参数');
}
await connectToDatabase();
let startTime = Date.now();
const { chat, userApiKey, systemKey, userId } = await authChat(chatId, authorization);
const model: ModelSchema = chat.modelId;
const modelConstantsData = modelList.find((item) => item.model === model.service.modelName);
if (!modelConstantsData) {
throw new Error('模型加载异常');
}
// 读取对话内容
const prompts = [...chat.content, prompt];
// 上下文长度过滤
const maxContext = model.security.contextMaxLen;
const filterPrompts =
prompts.length > maxContext ? prompts.slice(prompts.length - maxContext) : prompts;
// 如果有系统提示词,自动插入
if (model.systemPrompt) {
prompts.unshift({
obj: 'SYSTEM',
value: model.systemPrompt
});
}
// 格式化文本内容
// 控制在 tokens 数量,防止超出
const filterPrompts = openaiChatFilter(prompts, modelConstantsData.contextMaxToken);
// 格式化文本内容成 chatgpt 格式
const map = {
Human: ChatCompletionRequestMessageRoleEnum.User,
AI: ChatCompletionRequestMessageRoleEnum.Assistant,
@@ -63,25 +75,12 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
content: item.value
})
);
// 如果有系统提示词,自动插入
if (model.systemPrompt) {
formatPrompts.unshift({
role: 'system',
content: model.systemPrompt
});
}
// console.log(formatPrompts);
// 计算温度
const modelConstantsData = modelList.find((item) => item.model === model.service.modelName);
if (!modelConstantsData) {
throw new Error('模型异常');
}
const temperature = modelConstantsData.maxTemperature * (model.temperature / 10);
// 获取 chatAPI
const chatAPI = getOpenAIApi(userApiKey || systemKey);
let startTime = Date.now();
// 发出请求
const chatResponse = await chatAPI.createChatCompletion(
{
@@ -92,70 +91,34 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
frequency_penalty: 0.5, // 越大,重复内容越少
presence_penalty: -0.5, // 越大,越容易出现新内容
stream: true,
stop: ['。.!.']
stop: ['.!?。']
},
{
timeout: 40000,
responseType: 'stream',
httpsAgent
httpsAgent: httpsAgent(!userApiKey)
}
);
console.log('api response time:', `${(Date.now() - startTime) / 1000}s`);
// 创建响应流
res.setHeader('Content-Type', 'text/event-stream;charset-utf-8');
res.setHeader('Access-Control-Allow-Origin', '*');
res.setHeader('X-Accel-Buffering', 'no');
res.setHeader('Cache-Control', 'no-cache, no-transform');
step = 1;
let responseContent = '';
stream.pipe(res);
const onParse = async (event: ParsedEvent | ReconnectInterval) => {
if (event.type !== 'event') return;
const data = event.data;
if (data === '[DONE]') return;
try {
const json = JSON.parse(data);
const content: string = json?.choices?.[0].delta.content || '';
if (!content || (responseContent === '' && content === '\n')) return;
responseContent += content;
// console.log('content:', content)
!stream.destroyed && stream.push(content.replace(/\n/g, '<br/>'));
} catch (error) {
error;
}
};
const decoder = new TextDecoder();
try {
for await (const chunk of chatResponse.data as any) {
if (stream.destroyed) {
// 流被中断了,直接忽略后面的内容
break;
}
const parser = createParser(onParse);
parser.feed(decoder.decode(chunk));
}
} catch (error) {
console.log('pipe error', error);
}
// close stream
!stream.destroyed && stream.push(null);
stream.destroy();
const { responseContent } = await gpt35StreamResponse({
res,
stream,
chatResponse
});
const promptsContent = formatPrompts.map((item) => item.content).join('');
// 只有使用平台的 key 才计费
!userApiKey &&
pushChatBill({
modelName: model.service.modelName,
userId,
chatId,
text: promptsContent + responseContent
});
pushChatBill({
isPay: !userApiKey,
modelName: model.service.modelName,
userId,
chatId,
text: promptsContent + responseContent
});
} catch (err: any) {
if (step === 1) {
// 直接结束流

View File

@@ -0,0 +1,62 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase, Chat } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
const { chatId, index } = req.query as { chatId: string; index: string };
const { authorization } = req.headers;
if (!authorization) {
throw new Error('无权操作');
}
if (!chatId || !index) {
throw new Error('缺少参数');
}
await connectToDatabase();
// 凭证校验
const userId = await authToken(authorization);
const chatRecord = await Chat.findById(chatId);
if (!chatRecord) {
throw new Error('找不到对话');
}
// 重新计算 index跳过已经被删除的内容
let unDeleteIndex = +index;
let deletedIndex = 0;
for (deletedIndex = 0; deletedIndex < chatRecord.content.length; deletedIndex++) {
if (!chatRecord.content[deletedIndex].deleted) {
unDeleteIndex--;
if (unDeleteIndex < 0) {
break;
}
}
}
// 删除最一条数据库记录, 也就是预发送的那一条
await Chat.updateOne(
{
_id: chatId,
userId
},
{
$set: {
[`content.${deletedIndex}.deleted`]: true,
updateTime: Date.now()
}
}
);
jsonRes(res);
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}

View File

@@ -1,28 +0,0 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase, Chat } from '@/service/mongo';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
const { chatId } = req.query as { chatId: string };
if (!chatId) {
throw new Error('缺少参数');
}
await connectToDatabase();
// 删除最一条数据库记录, 也就是预发送的那一条
await Chat.findByIdAndUpdate(chatId, {
$pop: { content: 1 },
updateTime: Date.now()
});
jsonRes(res);
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}

View File

@@ -7,14 +7,13 @@ import type { ModelSchema } from '@/types/mongoSchema';
/* 获取我的模型 */
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
const { modelId, isShare = 'false' } = req.query as {
const { modelId } = req.query as {
modelId: string;
isShare?: 'true' | 'false';
};
const { authorization } = req.headers;
if (!authorization) {
throw new Error('无权操作');
throw new Error('无权生成对话');
}
if (!modelId) {
@@ -26,24 +25,20 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
await connectToDatabase();
// 获取模型配置
// 校验是否为用户的模型
const model = await Model.findOne<ModelSchema>({
_id: modelId,
userId
});
if (!model) {
throw new Error('模型不存在');
throw new Error('无权使用该模型');
}
// 创建 chat 数据
const response = await Chat.create({
userId,
modelId,
expiredTime: Date.now() + model.security.expiredTime,
loadAmount: model.security.maxLoadAmount,
updateTime: Date.now(),
isShare: isShare === 'true',
content: []
});

View File

@@ -1,165 +0,0 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { createParser, ParsedEvent, ReconnectInterval } from 'eventsource-parser';
import { connectToDatabase } from '@/service/mongo';
import { getOpenAIApi, authChat } from '@/service/utils/chat';
import { httpsAgent } from '@/service/utils/tools';
import { ChatItemType } from '@/types/chat';
import { jsonRes } from '@/service/response';
import type { ModelSchema } from '@/types/mongoSchema';
import { PassThrough } from 'stream';
import { modelList } from '@/constants/model';
import { pushChatBill } from '@/service/events/pushBill';
/* 发送提示词 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
let step = 0; // step=1时表示开始了流响应
const stream = new PassThrough();
stream.on('error', () => {
console.log('error: ', 'stream error');
stream.destroy();
});
res.on('close', () => {
stream.destroy();
});
res.on('error', () => {
console.log('error: ', 'request error');
stream.destroy();
});
try {
const { chatId, prompt } = req.body as {
prompt: ChatItemType;
chatId: string;
};
const { authorization } = req.headers;
if (!chatId || !prompt) {
throw new Error('缺少参数');
}
await connectToDatabase();
const { chat, userApiKey, systemKey, userId } = await authChat(chatId, authorization);
const model: ModelSchema = chat.modelId;
// 读取对话内容
const prompts = [...chat.content, prompt];
// 上下文长度过滤
const maxContext = model.security.contextMaxLen;
const filterPrompts =
prompts.length > maxContext ? prompts.slice(prompts.length - maxContext) : prompts;
// 格式化文本内容
const map = {
Human: 'Human',
AI: 'AI',
SYSTEM: 'SYSTEM'
};
const formatPrompts: string[] = filterPrompts.map((item: ChatItemType) => item.value);
// 如果有系统提示词,自动插入
if (model.systemPrompt) {
formatPrompts.unshift(`${model.systemPrompt}`);
}
const promptText = formatPrompts.join('</s>');
// 计算温度
const modelConstantsData = modelList.find((item) => item.model === model.service.modelName);
if (!modelConstantsData) {
throw new Error('模型异常');
}
const temperature = modelConstantsData.maxTemperature * (model.temperature / 10);
// 获取 chatAPI
const chatAPI = getOpenAIApi(userApiKey || systemKey);
let startTime = Date.now();
// 发出请求
const chatResponse = await chatAPI.createCompletion(
{
model: model.service.chatModel,
temperature: temperature,
prompt: promptText,
stream: true,
max_tokens: modelConstantsData.maxToken,
presence_penalty: 0, // 越大,越容易出现新内容
frequency_penalty: 0, // 越大,重复内容越少
stop: ['。!?.!.', `</s>`]
},
{
timeout: 40000,
responseType: 'stream',
httpsAgent
}
);
console.log('api response time:', `${(Date.now() - startTime) / 1000}s`);
// 创建响应流
res.setHeader('Content-Type', 'text/event-stream;charset-utf-8');
res.setHeader('Access-Control-Allow-Origin', '*');
res.setHeader('X-Accel-Buffering', 'no');
res.setHeader('Cache-Control', 'no-cache, no-transform');
step = 1;
let responseContent = '';
stream.pipe(res);
const onParse = async (event: ParsedEvent | ReconnectInterval) => {
if (event.type !== 'event') return;
const data = event.data;
if (data === '[DONE]') return;
try {
const json = JSON.parse(data);
const content: string = json?.choices?.[0].text || '';
if (!content || (responseContent === '' && content === '\n')) return;
responseContent += content;
// console.log('content:', content);
!stream.destroyed && stream.push(content.replace(/\n/g, '<br/>'));
} catch (error) {
error;
}
};
const decoder = new TextDecoder();
try {
for await (const chunk of chatResponse.data as any) {
if (stream.destroyed) {
// 流被中断了,直接忽略后面的内容
break;
}
const parser = createParser(onParse);
parser.feed(decoder.decode(chunk));
}
} catch (error) {
console.log('pipe error', error);
}
// close stream
!stream.destroyed && stream.push(null);
stream.destroy();
// 只有使用平台的 key 才计费
!userApiKey &&
pushChatBill({
modelName: model.service.modelName,
userId,
chatId,
text: promptText + responseContent
});
} catch (err: any) {
// console.log(err?.response);
if (step === 1) {
// 直接结束流
console.log('error结束');
stream.destroy();
} else {
res.status(500);
jsonRes(res, {
code: 500,
error: err
});
}
}
}

View File

@@ -3,10 +3,14 @@ import { jsonRes } from '@/service/response';
import { connectToDatabase, Chat } from '@/service/mongo';
import type { ChatPopulate } from '@/types/mongoSchema';
import type { InitChatResponse } from '@/api/response/chat';
import { authToken } from '@/service/utils/tools';
/* 获取我的模型 */
/* 初始化我的聊天框,需要身份验证 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
const { authorization } = req.headers;
const userId = await authToken(authorization);
const { chatId } = req.query as { chatId: string };
if (!chatId) {
@@ -16,7 +20,10 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
await connectToDatabase();
// 获取 chat 数据
const chat = await Chat.findById<ChatPopulate>(chatId).populate({
const chat = await Chat.findOne<ChatPopulate>({
_id: chatId,
userId
}).populate({
path: 'modelId',
options: {
strictPopulate: false
@@ -27,28 +34,18 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
throw new Error('聊天框不存在');
}
if (chat.loadAmount > 0) {
await Chat.updateOne(
{
_id: chat._id
},
{
$inc: { loadAmount: -1 }
}
);
}
// filter 掉被 deleted 的内容
chat.content = chat.content.filter((item) => item.deleted !== true);
const model = chat.modelId;
jsonRes<InitChatResponse>(res, {
code: 201,
data: {
chatId: chat._id,
isExpiredTime: chat.loadAmount === 0 || chat.expiredTime <= Date.now(),
modelId: model._id,
name: model.name,
avatar: model.avatar,
intro: model.intro,
secret: model.security,
modelName: model.service.modelName,
chatModel: model.service.chatModel,
history: chat.content
}

View File

@@ -27,7 +27,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
}))
}
},
updateTime: Date.now()
updateTime: new Date()
});
jsonRes(res);

View File

@@ -0,0 +1,181 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { connectToDatabase } from '@/service/mongo';
import { authChat } from '@/service/utils/chat';
import { httpsAgent, openaiChatFilter, systemPromptFilter } from '@/service/utils/tools';
import { ChatCompletionRequestMessage, ChatCompletionRequestMessageRoleEnum } from 'openai';
import { ChatItemType } from '@/types/chat';
import { jsonRes } from '@/service/response';
import type { ModelSchema } from '@/types/mongoSchema';
import { PassThrough } from 'stream';
import { modelList } from '@/constants/model';
import { pushChatBill } from '@/service/events/pushBill';
import { connectRedis } from '@/service/redis';
import { VecModelDataPrefix } from '@/constants/redis';
import { vectorToBuffer } from '@/utils/tools';
import { openaiCreateEmbedding, gpt35StreamResponse } from '@/service/utils/openai';
/* 发送提示词 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
let step = 0; // step=1时表示开始了流响应
const stream = new PassThrough();
stream.on('error', () => {
console.log('error: ', 'stream error');
stream.destroy();
});
res.on('close', () => {
stream.destroy();
});
res.on('error', () => {
console.log('error: ', 'request error');
stream.destroy();
});
try {
const { chatId, prompt } = req.body as {
prompt: ChatItemType;
chatId: string;
};
const { authorization } = req.headers;
if (!chatId || !prompt) {
throw new Error('缺少参数');
}
await connectToDatabase();
const redis = await connectRedis();
let startTime = Date.now();
const { chat, userApiKey, systemKey, userId } = await authChat(chatId, authorization);
const model: ModelSchema = chat.modelId;
const modelConstantsData = modelList.find((item) => item.model === model.service.modelName);
if (!modelConstantsData) {
throw new Error('模型加载异常');
}
// 读取对话内容
const prompts = [...chat.content, prompt];
// 获取提示词的向量
const { vector: promptVector, chatAPI } = await openaiCreateEmbedding({
isPay: !userApiKey,
apiKey: userApiKey || systemKey,
userId,
text: prompt.value
});
// 搜索系统提示词, 按相似度从 redis 中搜出相关的 q 和 text
const redisData: any[] = await redis.sendCommand([
'FT.SEARCH',
`idx:${VecModelDataPrefix}:hash`,
`@modelId:{${String(
chat.modelId._id
)}} @vector:[VECTOR_RANGE 0.24 $blob]=>{$YIELD_DISTANCE_AS: score}`,
'RETURN',
'1',
'text',
'SORTBY',
'score',
'PARAMS',
'2',
'blob',
vectorToBuffer(promptVector),
'LIMIT',
'0',
'30',
'DIALECT',
'2'
]);
const formatRedisPrompt: string[] = [];
// 格式化响应值,获取 qa
for (let i = 2; i < 61; i += 2) {
const text = redisData[i]?.[1];
if (text) {
formatRedisPrompt.push(text);
}
}
if (formatRedisPrompt.length === 0) {
throw new Error('对不起,我没有找到你的问题');
}
// textArr 筛选,最多 2800 tokens
const systemPrompt = systemPromptFilter(formatRedisPrompt, 2800);
prompts.unshift({
obj: 'SYSTEM',
value: `${model.systemPrompt} 知识库内容是最新的,知识库内容为: "${systemPrompt}"`
});
// 控制在 tokens 数量,防止超出
const filterPrompts = openaiChatFilter(prompts, modelConstantsData.contextMaxToken);
// 格式化文本内容成 chatgpt 格式
const map = {
Human: ChatCompletionRequestMessageRoleEnum.User,
AI: ChatCompletionRequestMessageRoleEnum.Assistant,
SYSTEM: ChatCompletionRequestMessageRoleEnum.System
};
const formatPrompts: ChatCompletionRequestMessage[] = filterPrompts.map(
(item: ChatItemType) => ({
role: map[item.obj],
content: item.value
})
);
// console.log(formatPrompts);
// 计算温度
const temperature = modelConstantsData.maxTemperature * (model.temperature / 10);
// 发出请求
const chatResponse = await chatAPI.createChatCompletion(
{
model: model.service.chatModel,
temperature: temperature,
// max_tokens: modelConstantsData.maxToken,
messages: formatPrompts,
frequency_penalty: 0.5, // 越大,重复内容越少
presence_penalty: -0.5, // 越大,越容易出现新内容
stream: true
},
{
timeout: 40000,
responseType: 'stream',
httpsAgent: httpsAgent(!userApiKey)
}
);
console.log('api response time:', `${(Date.now() - startTime) / 1000}s`);
step = 1;
const { responseContent } = await gpt35StreamResponse({
res,
stream,
chatResponse
});
const promptsContent = formatPrompts.map((item) => item.content).join('');
// 只有使用平台的 key 才计费
pushChatBill({
isPay: !userApiKey,
modelName: model.service.modelName,
userId,
chatId,
text: promptsContent + responseContent
});
// jsonRes(res);
} catch (err: any) {
if (step === 1) {
// 直接结束流
console.log('error结束');
stream.destroy();
} else {
res.status(500);
jsonRes(res, {
code: 500,
error: err
});
}
}
}

View File

@@ -23,8 +23,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
await authToken(authorization);
const dataItems = await DataItem.find({
dataId,
status: 0
dataId
})
.sort({ _id: -1 }) // 按照创建时间倒序排列
.skip((pageNum - 1) * pageSize)
@@ -36,8 +35,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
pageSize,
data: dataItems,
total: await DataItem.countDocuments({
dataId,
status: 0
dataId
})
}
});

View File

@@ -3,18 +3,12 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase, Data, DataItem } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
import type { DataSchema } from '@/types/mongoSchema';
import type { DataListItem } from '@/types/data';
import type { PagingData } from '@/types';
import mongoose from 'mongoose';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
const { authorization } = req.headers;
let { pageNum = 1, pageSize = 10 } = req.query as { pageNum: string; pageSize: string };
pageNum = +pageNum;
pageSize = +pageSize;
if (!authorization) {
throw new Error('缺少登录凭证');
@@ -34,12 +28,6 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
{
$sort: { createTime: -1 } // 按照创建时间倒序排列
},
{
$skip: (pageNum - 1) * pageSize // 跳过前面的数据
},
{
$limit: pageSize // 取出指定数量的数据
},
{
$lookup: {
from: 'dataitems',
@@ -71,13 +59,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
}
]);
jsonRes<PagingData<DataListItem>>(res, {
data: {
pageNum,
pageSize,
data: datalist,
total: 1
}
jsonRes(res, {
data: datalist
});
} catch (err) {
jsonRes(res, {

View File

@@ -2,11 +2,12 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase, Data } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
import type { DataType } from '@/types/data';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
let { name } = req.query as { name: string };
if (!name) {
let { name, type } = req.body as { name: string; type: DataType };
if (!name || !type) {
throw new Error('参数错误');
}
await connectToDatabase();
@@ -18,7 +19,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
// 生成 data 集合
const data = await Data.create({
userId,
name
name,
type
});
jsonRes(res, {

View File

@@ -1,42 +1,64 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase, Data, DataItem } from '@/service/mongo';
import { connectToDatabase, DataItem, Data } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
import { generateQA } from '@/service/events/generateQA';
import { generateAbstract } from '@/service/events/generateAbstract';
import { encode } from 'gpt-token-utils';
/* 拆分数据成QA */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
let { text, dataId } = req.body as { text: string; dataId: string };
const { text, dataId } = req.body as { text: string; dataId: string };
if (!text || !dataId) {
throw new Error('参数错误');
}
text = text.replace(/\n+/g, '\n');
await connectToDatabase();
const { authorization } = req.headers;
const userId = await authToken(authorization);
const dataItems: any[] = [];
const DataRecord = await Data.findById(dataId);
// 格式化文本长度
for (let i = 0; i <= text.length / 1000; i++) {
dataItems.push({
temperature: 0,
userId,
dataId,
text: text.slice(i * 1000, (i + 1) * 1000),
status: 1
});
if (!DataRecord) {
throw new Error('找不到数据集');
}
const replaceText = text.replace(/[\\n]+/g, ' ');
// 文本拆分成 chunk
let chunks = replaceText.match(/[^!?.。]+[!?.。]/g) || [];
const dataItems: any[] = [];
let splitText = '';
chunks.forEach((chunk) => {
splitText += chunk;
const tokens = encode(splitText).length;
if (tokens >= 780) {
dataItems.push({
userId,
dataId,
type: DataRecord.type,
text: splitText,
status: 1
});
splitText = '';
}
});
// 批量插入数据
await DataItem.insertMany(dataItems);
generateQA();
try {
generateQA();
generateAbstract();
} catch (error) {
error;
}
jsonRes(res, {
data: dataItems.length
data: { chunks, replaceText }
});
} catch (err) {
jsonRes(res, {

View File

@@ -3,7 +3,7 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
import { ModelStatusEnum, modelList, ChatModelNameEnum } from '@/constants/model';
import { ModelStatusEnum, modelList, ChatModelNameEnum, ChatModelNameMap } from '@/constants/model';
import { Model } from '@/service/models/model';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
@@ -33,15 +33,6 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
await connectToDatabase();
// 重名校验
const authRepeatName = await Model.findOne({
name,
userId
});
if (authRepeatName) {
throw new Error('模型名重复');
}
// 上限校验
const authCount = await Model.countDocuments({
userId
@@ -57,9 +48,9 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
status: ModelStatusEnum.running,
service: {
company: modelItem.serviceCompany,
trainId: modelItem.trainName,
chatModel: modelItem.model,
modelName: modelItem.model
trainId: '',
chatModel: ChatModelNameMap[modelItem.model], // 聊天时用的模型
modelName: modelItem.model // 最底层的模型,不会变,用于计费等核心操作
}
});

View File

@@ -0,0 +1,41 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { authToken } from '@/service/utils/tools';
import { connectRedis } from '@/service/redis';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
let { dataId } = req.query as {
dataId: string;
};
const { authorization } = req.headers;
if (!authorization) {
throw new Error('无权操作');
}
if (!dataId) {
throw new Error('缺少参数');
}
// 凭证校验
const userId = await authToken(authorization);
const redis = await connectRedis();
// 校验是否为该用户的数据
const dataItemUserId = await redis.hGet(dataId, 'userId');
if (dataItemUserId !== userId) {
throw new Error('无权操作');
}
// 删除
await redis.del(dataId);
jsonRes(res);
} catch (err) {
console.log(err);
jsonRes(res, {
code: 500,
error: err
});
}
}

View File

@@ -0,0 +1,61 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
import { connectRedis } from '@/service/redis';
import { VecModelDataIdx } from '@/constants/redis';
import { clearStrLineBreak } from '@/utils/tools';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
let { modelId } = req.query as {
modelId: string;
};
const { authorization } = req.headers;
if (!authorization) {
throw new Error('无权操作');
}
if (!modelId) {
throw new Error('缺少参数');
}
// 凭证校验
const userId = await authToken(authorization);
await connectToDatabase();
const redis = await connectRedis();
// 从 redis 中获取数据
const searchRes = await redis.ft.search(
VecModelDataIdx,
`@modelId:{${modelId}} @userId:{${userId}}`,
{
RETURN: ['q', 'text'],
LIMIT: {
from: 0,
size: 10000
}
}
);
const data: [string, string][] = [];
searchRes.documents.forEach((item: any) => {
if (item.value.q && item.value.text) {
data.push([clearStrLineBreak(item.value.q), clearStrLineBreak(item.value.text)]);
}
});
jsonRes(res, {
data
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}

View File

@@ -0,0 +1,36 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
import axios from 'axios';
import { httpsAgent } from '@/service/utils/tools';
/**
* 读取网站的内容
*/
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
const { url } = req.body as { url: string };
if (!url) {
throw new Error('缺少 url');
}
await connectToDatabase();
const { authorization } = req.headers;
await authToken(authorization);
const data = await axios
.get(url, {
httpsAgent: httpsAgent(false)
})
.then((res) => res.data as string);
jsonRes(res, { data });
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}

View File

@@ -0,0 +1,74 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
import { connectRedis } from '@/service/redis';
import { VecModelDataIdx } from '@/constants/redis';
import { SearchOptions } from 'redis';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
let {
modelId,
pageNum = 1,
pageSize = 10
} = req.query as {
modelId: string;
pageNum: string;
pageSize: string;
};
const { authorization } = req.headers;
pageNum = +pageNum;
pageSize = +pageSize;
if (!authorization) {
throw new Error('无权操作');
}
if (!modelId) {
throw new Error('缺少参数');
}
// 凭证校验
const userId = await authToken(authorization);
await connectToDatabase();
const redis = await connectRedis();
// 从 redis 中获取数据
const searchRes = await redis.ft.search(
VecModelDataIdx,
`@modelId:{${modelId}} @userId:{${userId}}`,
{
RETURN: ['q', 'text', 'status'],
LIMIT: {
from: (pageNum - 1) * pageSize,
size: pageSize
},
SORTBY: {
BY: 'modelId',
DIRECTION: 'DESC'
}
}
);
jsonRes(res, {
data: {
pageNum,
pageSize,
data: searchRes.documents.map((item) => ({
id: item.id,
...item.value
})),
total: searchRes.total
}
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}

View File

@@ -0,0 +1,35 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase, SplitData, Model } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
/* 拆分数据成QA */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
const { modelId } = req.query as { modelId: string };
if (!modelId) {
throw new Error('参数错误');
}
await connectToDatabase();
const { authorization } = req.headers;
const userId = await authToken(authorization);
// 找到长度大于0的数据
const data = await SplitData.find({
userId,
modelId,
textList: { $exists: true, $not: { $size: 0 } }
});
jsonRes(res, {
data: data.map((item) => item.textList).flat().length
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}

View File

@@ -0,0 +1,101 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase, Model } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
import { generateVector } from '@/service/events/generateVector';
import { connectRedis } from '@/service/redis';
import { VecModelDataPrefix, ModelDataStatusEnum } from '@/constants/redis';
import { VecModelDataIdx } from '@/constants/redis';
import { customAlphabet } from 'nanoid';
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 12);
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
const { modelId, data } = req.body as {
modelId: string;
data: string[][];
};
const { authorization } = req.headers;
if (!authorization) {
throw new Error('无权操作');
}
if (!modelId || !Array.isArray(data)) {
throw new Error('缺少参数');
}
// 凭证校验
const userId = await authToken(authorization);
await connectToDatabase();
const redis = await connectRedis();
// 验证是否是该用户的 model
const model = await Model.findOne({
_id: modelId,
userId
});
if (!model) {
throw new Error('无权操作该模型');
}
// 去重
const searchRes = await Promise.allSettled(
data.map(async ([q, a]) => {
try {
q = q.replace(/\\n/g, '\n');
a = a.replace(/\\n/g, '\n');
const redisSearch = await redis.ft.search(VecModelDataIdx, `@q:${q} @text:${a}`, {
RETURN: ['q', 'text']
});
if (redisSearch.total > 0) {
return Promise.reject('已经存在');
}
} catch (error) {
error;
}
return Promise.resolve({
q,
a
});
})
);
const filterData = searchRes
.filter((item) => item.status === 'fulfilled')
.map<{ q: string; a: string }>((item: any) => item.value);
// 插入 redis
const insertRedisRes = await Promise.allSettled(
filterData.map((item) => {
return redis.sendCommand([
'HMSET',
`${VecModelDataPrefix}:${nanoid()}`,
'userId',
userId,
'modelId',
String(modelId),
'q',
item.q,
'text',
item.a,
'status',
ModelDataStatusEnum.waiting
]);
})
);
generateVector();
jsonRes(res, {
data: insertRedisRes.filter((item) => item.status === 'fulfilled').length
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}

View File

@@ -0,0 +1,72 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase, Model } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
import { ModelDataSchema } from '@/types/mongoSchema';
import { generateVector } from '@/service/events/generateVector';
import { connectRedis } from '@/service/redis';
import { VecModelDataPrefix, ModelDataStatusEnum } from '@/constants/redis';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
const { modelId, data } = req.body as {
modelId: string;
data: { text: ModelDataSchema['text']; q: ModelDataSchema['q'] }[];
};
const { authorization } = req.headers;
if (!authorization) {
throw new Error('无权操作');
}
if (!modelId || !Array.isArray(data)) {
throw new Error('缺少参数');
}
// 凭证校验
const userId = await authToken(authorization);
await connectToDatabase();
const redis = await connectRedis();
// 验证是否是该用户的 model
const model = await Model.findOne({
_id: modelId,
userId
});
if (!model) {
throw new Error('无权操作该模型');
}
const insertRes = await Promise.allSettled(
data.map((item) => {
return redis.sendCommand([
'HMSET',
`${VecModelDataPrefix}:${item.q.id}`,
'userId',
userId,
'modelId',
modelId,
'q',
item.q.text,
'text',
item.text,
'status',
ModelDataStatusEnum.waiting
]);
})
);
generateVector();
jsonRes(res, {
data: insertRes.filter((item) => item.status === 'rejected').length
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}

View File

@@ -0,0 +1,52 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { authToken } from '@/service/utils/tools';
import { connectRedis } from '@/service/redis';
import { ModelDataStatusEnum } from '@/constants/redis';
import { generateVector } from '@/service/events/generateVector';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
const { dataId, text, q } = req.body as { dataId: string; text: string; q?: string };
const { authorization } = req.headers;
if (!authorization) {
throw new Error('无权操作');
}
if (!dataId) {
throw new Error('缺少参数');
}
// 凭证校验
const userId = await authToken(authorization);
const redis = await connectRedis();
// 校验是否为该用户的数据
const dataItemUserId = await redis.hGet(dataId, 'userId');
if (dataItemUserId !== userId) {
throw new Error('无权操作');
}
// 更新
await redis.sendCommand([
'HMSET',
dataId,
...(q ? ['q', q, 'status', ModelDataStatusEnum.waiting] : []),
'text',
text
]);
if (q) {
generateVector();
}
jsonRes(res);
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}

View File

@@ -0,0 +1,86 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase, SplitData, Model } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
import { generateQA } from '@/service/events/generateQA';
import { encode } from 'gpt-token-utils';
/* 拆分数据成QA */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
const { text, modelId, prompt } = req.body as { text: string; modelId: string; prompt: string };
if (!text || !modelId || !prompt) {
throw new Error('参数错误');
}
await connectToDatabase();
const { authorization } = req.headers;
const userId = await authToken(authorization);
// 验证是否是该用户的 model
const model = await Model.findOne({
_id: modelId,
userId
});
if (!model) {
throw new Error('无权操作该模型');
}
const replaceText = text.replace(/(\\n|\n)+/g, ' ');
// 文本拆分成 chunk
const chunks = replaceText.match(/[^!?.。]+[!?.。]/g) || [];
const textList: string[] = [];
let splitText = '';
/* 取 3k ~ 4K tokens 内容 */
chunks.forEach((chunk) => {
const tokens = encode(splitText + chunk).length;
if (tokens >= 4000) {
// 超过 4000不要这块内容
textList.push(splitText);
splitText = chunk;
} else if (tokens >= 3000) {
// 超过 3000取内容
textList.push(splitText + chunk);
splitText = '';
} else {
//没超过 3000继续添加
splitText += chunk;
}
});
if (splitText) {
textList.push(splitText);
}
// 批量插入数据
await SplitData.create({
userId,
modelId,
rawText: text,
textList,
prompt
});
generateQA();
jsonRes(res);
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}
export const config = {
api: {
bodyParser: {
sizeLimit: '10mb'
}
}
};

View File

@@ -1,11 +1,13 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { Chat, Model, Training, connectToDatabase } from '@/service/mongo';
import { authToken, getUserOpenaiKey } from '@/service/utils/tools';
import { authToken } from '@/service/utils/tools';
import { getUserApiOpenai } from '@/service/utils/openai';
import { TrainingStatusEnum } from '@/constants/model';
import { getOpenAIApi } from '@/service/utils/chat';
import { TrainingItemType } from '@/types/training';
import { httpsAgent } from '@/service/utils/tools';
import { connectRedis } from '@/service/redis';
import { VecModelDataIdx } from '@/constants/redis';
/* 获取我的模型 */
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
@@ -24,14 +26,33 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
// 凭证校验
const userId = await authToken(authorization);
await connectToDatabase();
// 删除模型
await Model.deleteOne({
// 验证是否是该用户的 model
const model = await Model.findOne({
_id: modelId,
userId
});
if (!model) {
throw new Error('无权操作该模型');
}
await connectToDatabase();
const redis = await connectRedis();
// 获取 redis 中模型关联的所有数据
const searchRes = await redis.ft.search(
VecModelDataIdx,
`@modelId:{${modelId}} @userId:{${userId}}`,
{
LIMIT: {
from: 0,
size: 10000
}
}
);
// 删除 redis 内容
await Promise.all(searchRes.documents.map((item) => redis.del(item.id)));
// 删除对应的聊天
await Chat.deleteMany({
modelId
@@ -45,14 +66,16 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
// 如果正在训练需要删除openai上的相关信息
if (training) {
const openai = getOpenAIApi(await getUserOpenaiKey(userId));
const { openai } = await getUserApiOpenai(userId);
// 获取训练记录
const tuneRecord = await openai.retrieveFineTune(training.tuneId, { httpsAgent });
const tuneRecord = await openai.retrieveFineTune(training.tuneId, {
httpsAgent: httpsAgent(false)
});
// 删除训练文件
openai.deleteFile(tuneRecord.data.training_files[0].id, { httpsAgent });
openai.deleteFile(tuneRecord.data.training_files[0].id, { httpsAgent: httpsAgent(false) });
// 取消训练
openai.cancelFineTune(training.tuneId, { httpsAgent });
openai.cancelFineTune(training.tuneId, { httpsAgent: httpsAgent(false) });
}
// 删除对应训练记录
@@ -60,6 +83,12 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
modelId
});
// 删除模型
await Model.deleteOne({
_id: modelId,
userId
});
jsonRes(res);
} catch (err) {
jsonRes(res, {

View File

@@ -1,15 +1,7 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase, Model, Training } from '@/service/mongo';
import { getOpenAIApi } from '@/service/utils/chat';
import formidable from 'formidable';
import { authToken, getUserOpenaiKey } from '@/service/utils/tools';
import { join } from 'path';
import fs from 'fs';
import type { ModelSchema } from '@/types/mongoSchema';
import type { OpenAIApi } from 'openai';
import { ModelStatusEnum, TrainingStatusEnum } from '@/constants/model';
import { httpsAgent } from '@/service/utils/tools';
import { connectToDatabase, Training } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
// 关闭next默认的bodyParser处理方式
export const config = {
@@ -18,7 +10,7 @@ export const config = {
}
};
/* 上传文件,开始微调 */
/* 获取模型训练记录 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
const { authorization } = req.headers;
@@ -30,7 +22,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
if (!modelId) {
throw new Error('参数错误');
}
const userId = await authToken(authorization);
await authToken(authorization);
await connectToDatabase();

View File

@@ -1,8 +1,8 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase, Model, Training } from '@/service/mongo';
import { getOpenAIApi } from '@/service/utils/chat';
import { authToken, getUserOpenaiKey } from '@/service/utils/tools';
import { authToken } from '@/service/utils/tools';
import { getUserApiOpenai } from '@/service/utils/openai';
import type { ModelSchema } from '@/types/mongoSchema';
import { TrainingItemType } from '@/types/training';
import { ModelStatusEnum, TrainingStatusEnum } from '@/constants/model';
@@ -43,16 +43,18 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
}
// 用户的 openai 实例
const openai = getOpenAIApi(await getUserOpenaiKey(userId));
const { openai } = await getUserApiOpenai(userId);
// 获取 openai 的训练情况
const { data } = await openai.retrieveFineTune(training.tuneId, { httpsAgent });
const { data } = await openai.retrieveFineTune(training.tuneId, {
httpsAgent: httpsAgent(false)
});
// console.log(data);
if (data.status === OpenAiTuneStatusEnum.succeeded) {
// 删除训练文件
openai.deleteFile(data.training_files[0].id, { httpsAgent });
openai.deleteFile(data.training_files[0].id, { httpsAgent: httpsAgent(false) });
// 更新模型
// 更新模型状态和模型内容
await Model.findByIdAndUpdate(modelId, {
status: ModelStatusEnum.running,
updateTime: new Date(),
@@ -72,9 +74,10 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
});
}
/* 取消微调 */
if (data.status === OpenAiTuneStatusEnum.cancelled) {
// 删除训练文件
openai.deleteFile(data.training_files[0].id, { httpsAgent });
openai.deleteFile(data.training_files[0].id, { httpsAgent: httpsAgent(false) });
// 更新模型
await Model.findByIdAndUpdate(modelId, {
@@ -87,11 +90,13 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
});
return jsonRes(res, {
data: '模型微调取消'
data: '模型微调取消'
});
}
throw new Error('模型还在训练中');
jsonRes(res, {
data: '模型还在训练中'
});
} catch (err: any) {
jsonRes(res, {
code: 500,

View File

@@ -2,9 +2,9 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase, Model, Training } from '@/service/mongo';
import { getOpenAIApi } from '@/service/utils/chat';
import formidable from 'formidable';
import { authToken, getUserOpenaiKey } from '@/service/utils/tools';
import { authToken } from '@/service/utils/tools';
import { getUserApiOpenai } from '@/service/utils/openai';
import { join } from 'path';
import fs from 'fs';
import type { ModelSchema } from '@/types/mongoSchema';
@@ -30,6 +30,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
throw new Error('无权操作');
}
const { modelId } = req.query;
if (!modelId) {
throw new Error('参数错误');
}
@@ -48,7 +49,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
const trainingType = model.service.trainId; // 目前都默认是 openai text-davinci-03
// 获取用户的 API Key 实例化后的对象
openai = getOpenAIApi(await getUserOpenaiKey(userId));
const user = await getUserApiOpenai(userId);
openai = user.openai;
// 接收文件并保存
const form = formidable({
@@ -67,13 +69,13 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
});
const file = files.file;
// 上传文件
// 上传文件到 openai
// @ts-ignore
const uploadRes = await openai.createFile(
// @ts-ignore
fs.createReadStream(file.filepath),
'fine-tune',
{ httpsAgent }
{ httpsAgent: httpsAgent(false) }
);
uploadFileId = uploadRes.data.id; // 记录上传文件的 ID
@@ -82,9 +84,10 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
{
training_file: uploadFileId,
model: trainingType,
suffix: model.name
suffix: model.name,
n_epochs: 4
},
{ httpsAgent }
{ httpsAgent: httpsAgent(false) }
);
trainId = trainRes.data.id; // 记录训练 ID
@@ -114,9 +117,9 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
// @ts-ignore
if (openai) {
// @ts-ignore
uploadFileId && openai.deleteFile(uploadFileId, { httpsAgent });
uploadFileId && openai.deleteFile(uploadFileId, { httpsAgent: httpsAgent(false) });
// @ts-ignore
trainId && openai.cancelFineTune(trainId, { httpsAgent });
trainId && openai.cancelFineTune(trainId, { httpsAgent: httpsAgent(false) });
}
jsonRes(res, {

View File

@@ -37,7 +37,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
systemPrompt,
intro,
temperature,
service,
// service,
security
}
);

View File

@@ -0,0 +1,250 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { connectToDatabase, Model } from '@/service/mongo';
import { getOpenAIApi } from '@/service/utils/chat';
import { authOpenApiKey } from '@/service/utils/tools';
import { httpsAgent, openaiChatFilter, systemPromptFilter } from '@/service/utils/tools';
import { ChatCompletionRequestMessage, ChatCompletionRequestMessageRoleEnum } from 'openai';
import { ChatItemType } from '@/types/chat';
import { jsonRes } from '@/service/response';
import { PassThrough } from 'stream';
import { ChatModelNameEnum, modelList, ChatModelNameMap } from '@/constants/model';
import { pushChatBill } from '@/service/events/pushBill';
import { connectRedis } from '@/service/redis';
import { VecModelDataPrefix } from '@/constants/redis';
import { vectorToBuffer } from '@/utils/tools';
import { openaiCreateEmbedding, gpt35StreamResponse } from '@/service/utils/openai';
/* 发送提示词 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
let step = 0; // step=1时表示开始了流响应
const stream = new PassThrough();
stream.on('error', () => {
console.log('error: ', 'stream error');
stream.destroy();
});
res.on('close', () => {
stream.destroy();
});
res.on('error', () => {
console.log('error: ', 'request error');
stream.destroy();
});
try {
const {
prompt,
modelId,
isStream = true
} = req.body as {
prompt: ChatItemType;
modelId: string;
isStream: boolean;
};
if (!prompt || !modelId) {
throw new Error('缺少参数');
}
await connectToDatabase();
const redis = await connectRedis();
let startTime = Date.now();
/* 凭证校验 */
const { apiKey, userId } = await authOpenApiKey(req);
/* 查找数据库里的模型信息 */
const model = await Model.findById(modelId);
if (!model) {
throw new Error('找不到模型');
}
const modelConstantsData = modelList.find(
(item) => item.model === ChatModelNameEnum.VECTOR_GPT
);
if (!modelConstantsData) {
throw new Error('模型已下架');
}
console.log('laf gpt start');
// 获取 chatAPI
const chatAPI = getOpenAIApi(apiKey);
// 请求一次 chatgpt 拆解需求
const promptResponse = await chatAPI.createChatCompletion(
{
model: ChatModelNameMap[ChatModelNameEnum.GPT35],
temperature: 0,
frequency_penalty: 0.5, // 越大,重复内容越少
presence_penalty: -0.5, // 越大,越容易出现新内容
messages: [
{
role: 'system',
content: `服务端逻辑生成器.根据用户输入的需求,拆解成代码实现的步骤,并按格式返回: 1.\n2.\n3.\n ......
下面是一些例子:
实现一个手机号发生注册验证码方法.
1. 从 query 中获取 phone.
2. 校验手机号格式是否正确,不正确返回{error: "手机号格式错误"}.
3. 给 phone 发送一个短信验证码,验证码长度为6位字符串,内容为:你正在注册laf,验证码为:code.
4. 数据库添加数据,表为"codes",内容为 {phone, code}.
实现根据手机号注册账号,需要验证手机验证码.
1. 从 body 中获取 phone 和 code.
2. 校验手机号格式是否正确,不正确返回{error: "手机号格式错误"}.
2. 获取数据库数据,表为"codes",查找是否有符合 phone, code 等于body参数的记录,没有的话返回 {error:"验证码不正确"}.
4. 添加数据库数据,表为"users" ,内容为{phone, code, createTime}.
5. 删除数据库数据,删除 code 记录.
更新博客记录。传入blogId,blogText,tags,还需要记录更新的时间.
1. 从 body 中获取 blogId,blogText 和 tags.
2. 校验 blogId 是否为空,为空则返回 {error: "博客ID不能为空"}.
3. 校验 blogText 是否为空,为空则返回 {error: "博客内容不能为空"}.
4. 校验 tags 是否为数组,不是则返回 {error: "标签必须为数组"}.
5. 获取当前时间,记录为 updateTime.
6. 更新数据库数据,表为"blogs",更新符合 blogId 的记录的内容为{blogText, tags, updateTime}.
7. 返回结果 {message: "更新博客记录成功"}.`
},
{
role: 'user',
content: prompt.value
}
]
},
{
timeout: 120000,
httpsAgent: httpsAgent(true)
}
);
const promptResolve = promptResponse.data.choices?.[0]?.message?.content || '';
if (!promptResolve) {
throw new Error('gpt 异常');
}
prompt.value += ` ${promptResolve}`;
console.log('prompt resolve success, time:', `${(Date.now() - startTime) / 1000}s`);
// 获取提示词的向量
const { vector: promptVector } = await openaiCreateEmbedding({
isPay: true,
apiKey,
userId,
text: prompt.value
});
// 读取对话内容
const prompts = [prompt];
// 搜索系统提示词, 按相似度从 redis 中搜出相关的 q 和 text
const redisData: any[] = await redis.sendCommand([
'FT.SEARCH',
`idx:${VecModelDataPrefix}:hash`,
`@modelId:{${String(model._id)}}=>[KNN 20 @vector $blob AS score]`,
'RETURN',
'1',
'text',
'SORTBY',
'score',
'PARAMS',
'2',
'blob',
vectorToBuffer(promptVector),
'DIALECT',
'2'
]);
// 格式化响应值,获取 qa
const formatRedisPrompt: string[] = [];
for (let i = 2; i < 42; i += 2) {
const text = redisData[i]?.[1];
if (text) {
formatRedisPrompt.push(text);
}
}
// textArr 筛选,最多 3200 tokens
const systemPrompt = systemPromptFilter(formatRedisPrompt, 3200);
prompts.unshift({
obj: 'SYSTEM',
value: `${model.systemPrompt} 知识库内容是最新的,知识库内容为: "${systemPrompt}"`
});
// 控制在 tokens 数量,防止超出
const filterPrompts = openaiChatFilter(prompts, modelConstantsData.contextMaxToken);
// 格式化文本内容成 chatgpt 格式
const map = {
Human: ChatCompletionRequestMessageRoleEnum.User,
AI: ChatCompletionRequestMessageRoleEnum.Assistant,
SYSTEM: ChatCompletionRequestMessageRoleEnum.System
};
const formatPrompts: ChatCompletionRequestMessage[] = filterPrompts.map(
(item: ChatItemType) => ({
role: map[item.obj],
content: item.value
})
);
// console.log(formatPrompts);
// 计算温度
const temperature = modelConstantsData.maxTemperature * (model.temperature / 10);
// 发出请求
const chatResponse = await chatAPI.createChatCompletion(
{
model: model.service.chatModel,
temperature,
messages: formatPrompts,
frequency_penalty: 0.5, // 越大,重复内容越少
presence_penalty: -0.5, // 越大,越容易出现新内容
stream: isStream
},
{
timeout: 120000,
responseType: isStream ? 'stream' : 'json',
httpsAgent: httpsAgent(true)
}
);
console.log('code response. time:', `${(Date.now() - startTime) / 1000}s`);
step = 1;
let responseContent = '';
if (isStream) {
const streamResponse = await gpt35StreamResponse({
res,
stream,
chatResponse
});
responseContent = streamResponse.responseContent;
} else {
responseContent = chatResponse.data.choices?.[0]?.message?.content || '';
jsonRes(res, {
data: responseContent
});
}
console.log('laf gpt done. time:', `${(Date.now() - startTime) / 1000}s`);
const promptsContent = formatPrompts.map((item) => item.content).join('');
pushChatBill({
isPay: true,
modelName: model.service.modelName,
userId,
text: promptsContent + responseContent
});
} catch (err: any) {
if (step === 1) {
// 直接结束流
console.log('error结束');
stream.destroy();
} else {
res.status(500);
jsonRes(res, {
code: 500,
error: err
});
}
}
}

View File

@@ -0,0 +1,210 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { connectToDatabase, Model } from '@/service/mongo';
import {
httpsAgent,
openaiChatFilter,
systemPromptFilter,
authOpenApiKey
} from '@/service/utils/tools';
import { ChatCompletionRequestMessage, ChatCompletionRequestMessageRoleEnum } from 'openai';
import { ChatItemType } from '@/types/chat';
import { jsonRes } from '@/service/response';
import { PassThrough } from 'stream';
import { modelList } from '@/constants/model';
import { pushChatBill } from '@/service/events/pushBill';
import { connectRedis } from '@/service/redis';
import { VecModelDataPrefix } from '@/constants/redis';
import { vectorToBuffer } from '@/utils/tools';
import { openaiCreateEmbedding, gpt35StreamResponse } from '@/service/utils/openai';
/* 发送提示词 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
let step = 0; // step=1时表示开始了流响应
const stream = new PassThrough();
stream.on('error', () => {
console.log('error: ', 'stream error');
stream.destroy();
});
res.on('close', () => {
stream.destroy();
});
res.on('error', () => {
console.log('error: ', 'request error');
stream.destroy();
});
try {
const {
prompts,
modelId,
isStream = true
} = req.body as {
prompts: ChatItemType[];
modelId: string;
isStream: boolean;
};
if (!prompts || !modelId) {
throw new Error('缺少参数');
}
if (!Array.isArray(prompts)) {
throw new Error('prompts is not array');
}
if (prompts.length > 30 || prompts.length === 0) {
throw new Error('prompts length range 1-30');
}
await connectToDatabase();
const redis = await connectRedis();
let startTime = Date.now();
/* 凭证校验 */
const { apiKey, userId } = await authOpenApiKey(req);
const model = await Model.findOne({
_id: modelId,
userId
});
if (!model) {
throw new Error('无权使用该模型');
}
const modelConstantsData = modelList.find((item) => item.model === model?.service?.modelName);
if (!modelConstantsData) {
throw new Error('模型初始化异常');
}
// 获取提示词的向量
const { vector: promptVector, chatAPI } = await openaiCreateEmbedding({
isPay: true,
apiKey,
userId,
text: prompts[prompts.length - 1].value // 取最后一个
});
// 搜索系统提示词, 按相似度从 redis 中搜出相关的 q 和 text
const redisData: any[] = await redis.sendCommand([
'FT.SEARCH',
`idx:${VecModelDataPrefix}:hash`,
`@modelId:{${modelId}} @vector:[VECTOR_RANGE 0.24 $blob]=>{$YIELD_DISTANCE_AS: score}`,
'RETURN',
'1',
'text',
'SORTBY',
'score',
'PARAMS',
'2',
'blob',
vectorToBuffer(promptVector),
'LIMIT',
'0',
'30',
'DIALECT',
'2'
]);
const formatRedisPrompt: string[] = [];
// 格式化响应值,获取 qa
for (let i = 2; i < 61; i += 2) {
const text = redisData[i]?.[1];
if (text) {
formatRedisPrompt.push(text);
}
}
if (formatRedisPrompt.length === 0) {
throw new Error('对不起,我没有找到你的问题');
}
// system 合并
if (prompts[0].obj === 'SYSTEM') {
formatRedisPrompt.unshift(prompts.shift()?.value || '');
}
// 系统提示词筛选,最多 2800 tokens
const systemPrompt = systemPromptFilter(formatRedisPrompt, 2800);
prompts.unshift({
obj: 'SYSTEM',
value: `${model.systemPrompt} 知识库内容是最新的,知识库内容为: "${systemPrompt}"`
});
// 控制在 tokens 数量,防止超出
const filterPrompts = openaiChatFilter(prompts, modelConstantsData.contextMaxToken);
// 格式化文本内容成 chatgpt 格式
const map = {
Human: ChatCompletionRequestMessageRoleEnum.User,
AI: ChatCompletionRequestMessageRoleEnum.Assistant,
SYSTEM: ChatCompletionRequestMessageRoleEnum.System
};
const formatPrompts: ChatCompletionRequestMessage[] = filterPrompts.map(
(item: ChatItemType) => ({
role: map[item.obj],
content: item.value
})
);
// console.log(formatPrompts);
// 计算温度
const temperature = modelConstantsData.maxTemperature * (model.temperature / 10);
// 发出请求
const chatResponse = await chatAPI.createChatCompletion(
{
model: model.service.chatModel,
temperature: temperature,
messages: formatPrompts,
frequency_penalty: 0.5, // 越大,重复内容越少
presence_penalty: -0.5, // 越大,越容易出现新内容
stream: isStream
},
{
timeout: 120000,
responseType: isStream ? 'stream' : 'json',
httpsAgent: httpsAgent(true)
}
);
console.log('api response time:', `${(Date.now() - startTime) / 1000}s`);
step = 1;
let responseContent = '';
if (isStream) {
const streamResponse = await gpt35StreamResponse({
res,
stream,
chatResponse
});
responseContent = streamResponse.responseContent;
} else {
responseContent = chatResponse.data.choices?.[0]?.message?.content || '';
jsonRes(res, {
data: responseContent
});
}
const promptsContent = formatPrompts.map((item) => item.content).join('');
pushChatBill({
isPay: true,
modelName: model.service.modelName,
userId,
text: promptsContent + responseContent
});
// jsonRes(res);
} catch (err: any) {
if (step === 1) {
// 直接结束流
console.log('error结束');
stream.destroy();
} else {
res.status(500);
jsonRes(res, {
code: 500,
error: err
});
}
}
}

View File

@@ -0,0 +1,33 @@
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase, OpenApi } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
const { id } = req.query as { id: string };
const { authorization } = req.headers;
if (!authorization) {
throw new Error('缺少登录凭证');
}
if (!id) {
throw new Error('缺少参数');
}
const userId = await authToken(authorization);
await connectToDatabase();
await OpenApi.findOneAndRemove({ _id: id, userId });
jsonRes(res);
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}

View File

@@ -0,0 +1,43 @@
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase, OpenApi } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
import { UserOpenApiKey } from '@/types/openapi';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
const { authorization } = req.headers;
if (!authorization) {
throw new Error('缺少登录凭证');
}
const userId = await authToken(authorization);
await connectToDatabase();
const findResponse = await OpenApi.find({ userId }).sort({ _id: -1 });
// jus save four data
const apiKeys = findResponse.map<UserOpenApiKey>(
({ _id, apiKey, createTime, lastUsedTime }) => {
return {
id: _id,
apiKey: `${apiKey.substring(0, 2)}******${apiKey.substring(apiKey.length - 2)}`,
createTime,
lastUsedTime
};
}
);
jsonRes(res, {
data: apiKeys
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}

View File

@@ -0,0 +1,43 @@
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase, OpenApi } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
import { customAlphabet } from 'nanoid';
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890');
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
const { authorization } = req.headers;
if (!authorization) {
throw new Error('缺少登录凭证');
}
const userId = await authToken(authorization);
await connectToDatabase();
const count = await OpenApi.find({ userId }).countDocuments();
if (count >= 5) {
throw new Error('最多 5 组API Key');
}
const apiKey = `${userId}-${nanoid()}`;
await OpenApi.create({
userId,
apiKey
});
jsonRes(res, {
data: apiKey
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}

View File

@@ -1,27 +0,0 @@
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { AuthCode } from '@/service/models/authCode';
import { connectToDatabase } from '@/service/mongo';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
if (req.headers.auth !== 'archer') {
throw new Error('凭证错误');
}
try {
await connectToDatabase();
const authCode = await AuthCode.deleteMany({
expiredTime: { $lt: Date.now() }
});
jsonRes(res, {
message: `删除了${authCode.deletedCount}条记录`
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}

View File

@@ -1,28 +0,0 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase, Chat } from '@/service/mongo';
/* 定时删除那些不活跃的内容 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
if (req.headers.auth !== 'archer') {
throw new Error('凭证错误');
}
try {
await connectToDatabase();
const response = await Chat.deleteMany(
{ $expr: { $lt: [{ $size: '$content' }, 5] } },
// 使用 $pull 操作符删除数组中的元素
{ $pull: { content: { $exists: true } } }
);
jsonRes(res, {
message: `删除了${response.deletedCount}条记录`
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}

View File

@@ -1,35 +0,0 @@
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase, Bill } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
import type { BillSchema } from '@/types/mongoSchema';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
if (req.headers.auth !== 'archer') {
throw new Error('凭证错误');
}
await connectToDatabase();
const bills = await Bill.find({
tokenLen: { $exists: false }
});
await Promise.all(
bills.map((bill) =>
Bill.findByIdAndUpdate(bill._id, {
tokenLen: bill.textLen
})
)
);
jsonRes(res, {
data: {}
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}

View File

@@ -1,36 +0,0 @@
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase, DataItem, Data } from '@/service/mongo';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
if (req.headers.auth !== 'archer') {
throw new Error('凭证错误');
}
await connectToDatabase();
// await DataItem.updateMany(
// {},
// {
// times: 2
// }
// );
await Data.updateMany(
{},
{
isDeleted: false
}
);
jsonRes(res, {
data: {}
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}

View File

@@ -1,79 +0,0 @@
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase, Training, Model } from '@/service/mongo';
import type { TrainingItemType } from '@/types/training';
import { TrainingStatusEnum, ModelStatusEnum } from '@/constants/model';
import { getOpenAIApi } from '@/service/utils/chat';
import { getUserOpenaiKey } from '@/service/utils/tools';
import { OpenAiTuneStatusEnum } from '@/service/constants/training';
import { sendTrainSucceed } from '@/service/utils/sendEmail';
import { httpsAgent } from '@/service/utils/tools';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
if (req.headers.auth !== 'archer') {
throw new Error('凭证错误');
}
try {
await connectToDatabase();
// 查询正在训练中的训练记录
const trainingRecords: TrainingItemType[] = await Training.find({
status: TrainingStatusEnum.pending
});
const openai = getOpenAIApi(await getUserOpenaiKey('63f9a14228d2a688d8dc9e1b'));
const response = await Promise.all(
trainingRecords.map(async (item) => {
const { data } = await openai.retrieveFineTune(item.tuneId, { httpsAgent });
if (data.status === OpenAiTuneStatusEnum.succeeded) {
// 删除训练文件
openai.deleteFile(data.training_files[0].id, { httpsAgent });
const model = await Model.findById(item.modelId).populate({
path: 'userId',
options: {
strictPopulate: false
}
});
if (!model) {
throw new Error('模型不存在');
}
// 更新模型
await Model.findByIdAndUpdate(item.modelId, {
status: ModelStatusEnum.running,
updateTime: new Date(),
service: {
...model.service,
trainId: data.fine_tuned_model, // 训练完后,再次训练和对话使用的 model 是一样的
chatModel: data.fine_tuned_model
}
});
// 更新训练数据
await Training.findByIdAndUpdate(item._id, {
status: TrainingStatusEnum.succeed
});
// 发送邮件通知
await sendTrainSucceed(model.userId.email as string, model.name);
return 'succeed';
}
return 'pending';
})
);
jsonRes(res, {
data: `${response.length}个训练线程,${
response.filter((item) => item === 'succeed').length
}个完成`
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}

View File

@@ -1,11 +1,12 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import axios from 'axios';
import { connectToDatabase, User, Pay } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
import { PaySchema } from '@/types/mongoSchema';
import dayjs from 'dayjs';
import { getPayResult } from '@/service/utils/wxpay';
/* 校验支付结果 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
const { authorization } = req.headers;
@@ -25,18 +26,16 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
throw new Error('订单已结算');
}
const { data } = await axios.get(
`https://sif268.laf.dev/wechat-order-query?order_number=${payOrder.orderId}&api_key=${process.env.WXPAYCODE}`
);
const payRes = await getPayResult(payOrder.orderId);
// 校验下是否超过一天
const orderTime = dayjs(payOrder.createTime);
const diffInHours = dayjs().diff(orderTime, 'hours');
if (data.trade_state === 'SUCCESS') {
if (payRes.trade_state === 'SUCCESS') {
// 订单已支付
try {
// 更新订单状态
// 更新订单状态. 如果没有合适的订单,说明订单重复了
const updateRes = await Pay.updateOne(
{
_id: payId,
@@ -61,7 +60,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
});
console.log(error);
}
} else if (data.trade_state === 'CLOSED' || diffInHours > 24) {
} else if (payRes.trade_state === 'CLOSED' || diffInHours > 24) {
// 订单已关闭
await Pay.findByIdAndUpdate(payId, {
status: 'CLOSED'
@@ -70,7 +69,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
data: '订单已过期'
});
} else {
throw new Error(data.trade_state_desc);
throw new Error(payRes?.trade_state_desc || '订单无效');
}
} catch (err) {
// console.log(err);

View File

@@ -1,14 +1,14 @@
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import axios from 'axios';
import { authToken } from '@/service/utils/tools';
import { customAlphabet } from 'nanoid';
import { connectToDatabase, Pay } from '@/service/mongo';
import { PRICE_SCALE } from '@/constants/common';
import { nativePay } from '@/service/utils/wxpay';
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 20);
/* 获取支付二维码 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
const { authorization } = req.headers;
@@ -23,15 +23,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
const id = nanoid();
await connectToDatabase();
const response = await axios({
url: 'https://sif268.laf.dev/wechat-pay',
method: 'POST',
data: {
trade_order_number: id,
amount: amount * 100,
api_key: process.env.WXPAYCODE
}
});
const code_url = await nativePay(amount * 100, id);
// 充值记录 + 1
const payOrder = await Pay.create({
@@ -43,11 +35,11 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
jsonRes(res, {
data: {
payId: payOrder._id,
codeUrl: response.data?.code_url
codeUrl: code_url
}
});
} catch (err) {
console.log(err);
console.log(err, '==');
jsonRes(res, {
code: 500,
error: err

View File

@@ -0,0 +1,18 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import axios from 'axios';
import { connectToDatabase, User, Pay } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
import { PaySchema } from '@/types/mongoSchema';
import dayjs from 'dayjs';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
res.send('');
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}

View File

@@ -46,6 +46,10 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
// 根据 id 获取用户信息
const user = await User.findById(response._id);
if (!user) {
throw new Error('获取用户信息异常');
}
jsonRes(res, {
data: {
token: generateToken(user._id),

View File

@@ -16,15 +16,6 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
await connectToDatabase();
// 注册人数限流
if (type === EmailTypeEnum.register) {
const maxCount = process.env.MAX_USER ? +process.env.MAX_USER : Infinity;
const userCount = await User.count();
if (userCount >= maxCount) {
throw new Error('当前注册用户已满,请等待名额~');
}
}
let code = '';
for (let i = 0; i < 6; i++) {
code += Math.floor(Math.random() * 10);

View File

@@ -9,7 +9,7 @@ import { UserUpdateParams } from '@/types/user';
/* 更新一些基本信息 */
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
const { accounts } = req.body as UserUpdateParams;
const { openaiKey } = req.body as UserUpdateParams;
const { authorization } = req.headers;
if (!authorization) {
@@ -26,8 +26,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
_id: userId
},
{
// 限定字段
...(accounts ? { accounts } : {})
openaiKey
}
);

View File

@@ -44,6 +44,10 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
email
});
if (!user) {
throw new Error('获取用户信息异常');
}
jsonRes(res, {
data: {
token: generateToken(user._id),

View File

@@ -1,6 +1,6 @@
import React from 'react';
import { Card, Box, Mark } from '@chakra-ui/react';
import { versionIntro, chatProblem } from '@/constants/common';
import { Card, Box } from '@chakra-ui/react';
import { useMarkdown } from '@/hooks/useMarkdown';
import Markdown from '@/components/Markdown';
const Empty = ({ intro }: { intro: string }) => {
@@ -9,6 +9,9 @@ const Empty = ({ intro }: { intro: string }) => {
{children}
</Box>
);
const { data: chatProblem } = useMarkdown({ url: '/chatProblem.md' });
const { data: versionIntro } = useMarkdown({ url: '/versionIntro.md' });
return (
<Box
minH={'100%'}
@@ -25,19 +28,13 @@ const Empty = ({ intro }: { intro: string }) => {
<Box whiteSpace={'pre-line'}>{intro}</Box>
</Card>
)}
<Card p={4} mb={10}>
<Header></Header>
<Markdown source={chatProblem} />
</Card>
{/* version intro */}
<Card p={4}>
<Header>Fast Gpt version1.4</Header>
<Box>
</Box>
<br />
<Card p={4} mb={10}>
<Markdown source={versionIntro} />
</Card>
<Card p={4}>
<Markdown source={chatProblem} />
</Card>
</Box>
);
};

View File

@@ -30,9 +30,9 @@ import { getToken } from '@/utils/user';
import MyIcon from '@/components/Icon';
import { useCopyData } from '@/utils/tools';
import Markdown from '@/components/Markdown';
import { shareHint } from '@/constants/common';
import { getChatSiteId } from '@/api/chat';
import WxConcat from '@/components/WxConcat';
import { useMarkdown } from '@/hooks/useMarkdown';
const SlideBar = ({
name,
@@ -55,6 +55,7 @@ const SlideBar = ({
const [hasReady, setHasReady] = useState(false);
const { isOpen: isOpenShare, onOpen: onOpenShare, onClose: onCloseShare } = useDisclosure();
const { isOpen: isOpenWx, onOpen: onOpenWx, onClose: onCloseWx } = useDisclosure();
const { data: shareHint } = useMarkdown({ url: '/chatProblem.md' });
const { isSuccess } = useQuery(['init'], getMyModels, {
cacheTime: 5 * 60 * 1000
@@ -231,12 +232,12 @@ const SlideBar = ({
</>
</RenderButton>
<RenderButton onClick={onOpenShare}>
{/* <RenderButton onClick={onOpenShare}>
<>
<MyIcon name="share" fill={'white'} w={'16px'} h={'16px'} mr={4} />
分享
</>
</RenderButton>
</RenderButton> */}
<RenderButton onClick={() => router.push('/number/setting')}>
<>
<MyIcon name="pay" fill={'white'} w={'16px'} h={'16px'} mr={4} />
@@ -281,7 +282,7 @@ const SlideBar = ({
mr={3}
onClick={async () => {
copyData(
`${location.origin}/chat?chatId=${await getChatSiteId(modelId, true)}`,
`${location.origin}/chat?chatId=${await getChatSiteId(modelId)}`,
'已复制分享链接'
);
onCloseShare();

View File

@@ -1,40 +1,37 @@
import React, { useCallback, useState, useRef, useMemo, useEffect } from 'react';
import { useRouter } from 'next/router';
import Image from 'next/image';
import {
getInitChatSiteInfo,
getChatSiteId,
postGPT3SendPrompt,
delLastMessage,
postSaveChat
} from '@/api/chat';
import { getInitChatSiteInfo, getChatSiteId, delChatRecordByIndex, postSaveChat } from '@/api/chat';
import type { InitChatResponse } from '@/api/response/chat';
import { ChatSiteItemType } from '@/types/chat';
import {
Textarea,
Box,
Flex,
Button,
useDisclosure,
Drawer,
DrawerOverlay,
DrawerContent,
useColorModeValue
useColorModeValue,
Menu,
MenuButton,
MenuList,
MenuItem
} from '@chakra-ui/react';
import { useToast } from '@/hooks/useToast';
import Icon from '@/components/Iconfont';
import { useScreen } from '@/hooks/useScreen';
import { useQuery } from '@tanstack/react-query';
import { ChatModelNameEnum } from '@/constants/model';
import dynamic from 'next/dynamic';
import { useGlobalStore } from '@/store/global';
import { useChatStore } from '@/store/chat';
import { useCopyData } from '@/utils/tools';
import { streamFetch } from '@/api/fetch';
import SlideBar from './components/SlideBar';
import Empty from './components/Empty';
import { getToken } from '@/utils/user';
import MyIcon from '@/components/Icon';
import Icon from '@/components/Icon';
import { modelList } from '@/constants/model';
const SlideBar = dynamic(() => import('./components/SlideBar'));
const Empty = dynamic(() => import('./components/Empty'));
const Markdown = dynamic(() => import('@/components/Markdown'));
const textareaMinH = '22px';
@@ -44,60 +41,36 @@ interface ChatType extends InitChatResponse {
}
const Chat = ({ chatId }: { chatId: string }) => {
const ChatBox = useRef<HTMLDivElement>(null);
const TextareaDom = useRef<HTMLTextAreaElement>(null);
const { toast } = useToast();
const router = useRouter();
const { isPc, media } = useScreen();
const { setLoading } = useGlobalStore();
// 中断请求
const controller = useRef(new AbortController());
const [chatData, setChatData] = useState<ChatType>({
chatId: '',
modelId: '',
name: '',
avatar: '',
intro: '',
secret: {},
chatModel: '',
history: [],
isExpiredTime: false
modelName: '',
history: []
}); // 聊天框整体数据
const ChatBox = useRef<HTMLDivElement>(null);
const TextareaDom = useRef<HTMLTextAreaElement>(null);
const [inputVal, setInputVal] = useState(''); // 输入的内容
const { isOpen: isOpenSlider, onClose: onCloseSlider, onOpen: onOpenSlider } = useDisclosure();
const isChatting = useMemo(
() => chatData.history[chatData.history.length - 1]?.status === 'loading',
[chatData.history]
);
const chatWindowError = useMemo(() => {
if (chatData.history[chatData.history.length - 1]?.obj === 'Human') {
return {
text: '内容出现异常',
canDelete: true
};
}
if (chatData.isExpiredTime) {
return {
text: '聊天框已过期',
canDelete: false
};
}
return '';
}, [chatData]);
const { isOpen: isOpenSlider, onClose: onCloseSlider, onOpen: onOpenSlider } = useDisclosure();
const { copyData } = useCopyData();
const { isPc, media } = useScreen();
const { setLoading } = useGlobalStore();
const { pushChatHistory } = useChatStore();
// 中断请求
const controller = useRef(new AbortController());
useEffect(() => {
controller.current = new AbortController();
return () => {
console.log('close========');
// eslint-disable-next-line react-hooks/exhaustive-deps
controller.current?.abort();
};
}, [chatId]);
// 滚动到底部
const scrollToBottom = useCallback(() => {
@@ -110,42 +83,6 @@ const Chat = ({ chatId }: { chatId: string }) => {
}, 100);
}, []);
// 初始化聊天框
useQuery(
['init', chatId],
() => {
setLoading(true);
return getInitChatSiteInfo(chatId);
},
{
onSuccess(res) {
setChatData({
...res,
history: res.history.map((item) => ({
...item,
status: 'finish'
}))
});
if (res.history.length > 0) {
setTimeout(() => {
scrollToBottom();
}, 500);
}
},
onError(e: any) {
toast({
title: e?.message || '初始化异常,请检查地址',
status: 'error',
isClosable: true,
duration: 5000
});
},
onSettled() {
setLoading(false);
}
}
);
// 重置输入内容
const resetInputVal = useCallback((val: string) => {
setInputVal(val);
@@ -172,39 +109,16 @@ const Chat = ({ chatId }: { chatId: string }) => {
onCloseSlider();
}, [chatData, onCloseSlider, router, toast]);
// gpt3 方法
const gpt3ChatPrompt = useCallback(
async (newChatList: ChatSiteItemType[]) => {
// 请求内容
const response = await postGPT3SendPrompt({
prompt: newChatList,
chatId: chatId as string
});
// 更新 AI 的内容
setChatData((state) => ({
...state,
history: state.history.map((item, index) => {
if (index !== state.history.length - 1) return item;
return {
...item,
status: 'finish',
value: response
};
})
}));
},
[chatId]
);
// gpt 对话
const gptChatPrompt = useCallback(
async (prompts: ChatSiteItemType) => {
const urlMap: Record<string, string> = {
[ChatModelNameEnum.GPT35]: '/api/chat/chatGpt',
[ChatModelNameEnum.VECTOR_GPT]: '/api/chat/vectorGpt',
[ChatModelNameEnum.GPT3]: '/api/chat/gpt3'
};
if (!urlMap[chatData.chatModel]) return Promise.reject('找不到模型');
if (!urlMap[chatData.modelName]) return Promise.reject('找不到模型');
const prompt = {
obj: prompts.obj,
@@ -212,7 +126,7 @@ const Chat = ({ chatId }: { chatId: string }) => {
};
// 流请求,获取数据
const res = await streamFetch({
url: urlMap[chatData.chatModel],
url: urlMap[chatData.modelName],
data: {
prompt,
chatId
@@ -265,21 +179,40 @@ const Chat = ({ chatId }: { chatId: string }) => {
})
}));
},
[chatData.chatModel, chatId, toast]
[chatData.modelName, chatId, toast]
);
/**
* 发送一个内容
*/
const sendPrompt = useCallback(async () => {
if (isChatting) {
toast({
title: '正在聊天中...请等待结束',
status: 'warning'
});
return;
}
const storeInput = inputVal;
// 去除空行
const val = inputVal
.trim()
.split('\n')
.filter((val) => val)
.join('\n');
if (!chatData?.modelId || !val || !ChatBox.current || isChatting) {
const val = inputVal.trim().replace(/\n\s*/g, '\n');
if (!chatData?.modelId || !val) {
toast({
title: '内容为空',
status: 'warning'
});
return;
}
// 长度校验
const model = modelList.find((item) => item.model === chatData.modelName);
if (model && val.length >= model.maxToken) {
toast({
title: '单次输入超出 4000 字符',
status: 'warning'
});
return;
}
@@ -335,32 +268,90 @@ const Chat = ({ chatId }: { chatId: string }) => {
}
}, [
inputVal,
chatData?.modelId,
chatData.history,
chatData,
isChatting,
resetInputVal,
scrollToBottom,
toast,
gptChatPrompt,
pushChatHistory,
chatId,
toast
chatId
]);
// 重新编辑
const reEdit = useCallback(async () => {
if (chatData.history[chatData.history.length - 1]?.obj !== 'Human') return;
// 删除数据库最后一句
await delLastMessage(chatId);
const val = chatData.history[chatData.history.length - 1].value;
// 删除一句话
const delChatRecord = useCallback(
async (index: number) => {
setLoading(true);
try {
// 删除数据库最后一句
await delChatRecordByIndex(chatId, index);
resetInputVal(val);
setChatData((state) => ({
...state,
history: state.history.filter((_, i) => i !== index)
}));
} catch (err) {
console.log(err);
}
setLoading(false);
},
[chatId, setLoading]
);
setChatData((state) => ({
...state,
history: state.history.slice(0, -1)
}));
}, [chatData.history, chatId, resetInputVal]);
// 复制内容
const onclickCopy = useCallback(
(value: string) => {
const val = value.replace(/\n+/g, '\n');
copyData(val);
},
[copyData]
);
// 初始化聊天框
useQuery(
['init', chatId],
() => {
setLoading(true);
return getInitChatSiteInfo(chatId);
},
{
onSuccess(res) {
setChatData({
...res,
history: res.history.map((item) => ({
...item,
status: 'finish'
}))
});
if (res.history.length > 0) {
setTimeout(() => {
scrollToBottom();
}, 500);
}
},
onError(e: any) {
toast({
title: e?.message || '初始化异常,请检查地址',
status: 'error',
isClosable: true,
duration: 5000
});
router.push('/model/list');
},
onSettled() {
setLoading(false);
}
}
);
// 更新流中断对象
useEffect(() => {
controller.current = new AbortController();
return () => {
// eslint-disable-next-line react-hooks/exhaustive-deps
controller.current?.abort();
};
}, [chatId]);
return (
<Flex
h={'100%'}
@@ -389,7 +380,7 @@ const Chat = ({ chatId }: { chatId: string }) => {
px={7}
>
<Box onClick={onOpenSlider}>
<MyIcon
<Icon
name={'menu'}
w={'20px'}
h={'20px'}
@@ -419,7 +410,7 @@ const Chat = ({ chatId }: { chatId: string }) => {
flexDirection={'column'}
>
{/* 聊天内容 */}
<Box ref={ChatBox} flex={'1 0 0'} h={0} w={'100%'} overflowY={'auto'}>
<Box ref={ChatBox} pb={[4, 0]} flex={'1 0 0'} h={0} w={'100%'} overflowY={'auto'}>
{chatData.history.map((item, index) => (
<Box
key={index}
@@ -432,15 +423,21 @@ const Chat = ({ chatId }: { chatId: string }) => {
borderBottom={'1px solid rgba(0,0,0,0.1)'}
>
<Flex maxW={'750px'} m={'auto'} alignItems={'flex-start'}>
<Box mr={media(4, 1)}>
<Image
src={item.obj === 'Human' ? '/icon/human.png' : '/icon/logo.png'}
alt="/icon/logo.png"
width={media(30, 20)}
height={media(30, 20)}
/>
</Box>
<Box flex={'1 0 0'} w={0} overflow={'hidden'}>
<Menu>
<MenuButton as={Box} mr={media(4, 1)} cursor={'pointer'}>
<Image
src={item.obj === 'Human' ? '/icon/human.png' : '/icon/logo.png'}
alt="/icon/logo.png"
width={media(30, 20)}
height={media(30, 20)}
/>
</MenuButton>
<MenuList fontSize={'sm'}>
<MenuItem onClick={() => onclickCopy(item.value)}></MenuItem>
<MenuItem onClick={() => delChatRecord(index)}></MenuItem>
</MenuList>
</Menu>
<Box flex={'1 0 0'} w={0} overflow={'hidden'} id={`chat${index}`}>
{item.obj === 'AI' ? (
<Markdown
source={item.value}
@@ -457,89 +454,82 @@ const Chat = ({ chatId }: { chatId: string }) => {
</Box>
{/* 发送区 */}
<Box m={media('20px auto', '0 auto')} w={'100%'} maxW={media('min(750px, 100%)', 'auto')}>
{!!chatWindowError ? (
<Box textAlign={'center'}>
<Box color={'red'}>{chatWindowError.text}</Box>
<Flex py={5} justifyContent={'center'}>
{getToken() && <Button onClick={resetChat}></Button>}
{chatWindowError.canDelete && (
<Button ml={20} colorScheme={'green'} onClick={reEdit}>
</Button>
)}
</Flex>
</Box>
) : (
<Box
py={5}
position={'relative'}
boxShadow={`0 0 15px rgba(0,0,0,0.1)`}
border={media('1px solid', '0')}
borderColor={useColorModeValue('gray.200', 'gray.700')}
borderRadius={['none', 'md']}
backgroundColor={useColorModeValue('white', 'gray.700')}
<Box
py={'18px'}
position={'relative'}
boxShadow={`0 0 15px rgba(0,0,0,0.1)`}
border={media('1px solid', '0')}
borderColor={useColorModeValue('gray.200', 'gray.700')}
borderRadius={['none', 'md']}
backgroundColor={useColorModeValue('white', 'gray.700')}
>
{/* 输入框 */}
<Textarea
ref={TextareaDom}
py={0}
pr={['45px', '55px']}
border={'none'}
_focusVisible={{
border: 'none'
}}
placeholder="提问"
resize={'none'}
value={inputVal}
rows={1}
height={'22px'}
lineHeight={'22px'}
maxHeight={'150px'}
maxLength={-1}
overflowY={'auto'}
whiteSpace={'pre-wrap'}
wordBreak={'break-all'}
color={useColorModeValue('blackAlpha.700', 'white')}
onChange={(e) => {
const textarea = e.target;
setInputVal(textarea.value);
textarea.style.height = textareaMinH;
textarea.style.height = `${textarea.scrollHeight}px`;
}}
onKeyDown={(e) => {
// 触发快捷发送
if (isPc && e.keyCode === 13 && !e.shiftKey) {
sendPrompt();
e.preventDefault();
}
// 全选内容
// @ts-ignore
e.key === 'a' && e.ctrlKey && e.target?.select();
}}
/>
{/* 发送和等待按键 */}
<Flex
alignItems={'center'}
justifyContent={'center'}
h={'30px'}
w={'30px'}
position={'absolute'}
right={['12px', '20px']}
bottom={'15px'}
onClick={sendPrompt}
>
{/* 输入框 */}
<Textarea
ref={TextareaDom}
w={'100%'}
pr={'45px'}
py={0}
border={'none'}
_focusVisible={{
border: 'none'
}}
placeholder="提问"
resize={'none'}
value={inputVal}
rows={1}
height={'22px'}
lineHeight={'22px'}
maxHeight={'150px'}
maxLength={chatData?.secret.contentMaxLen || -1}
overflowY={'auto'}
color={useColorModeValue('blackAlpha.700', 'white')}
onChange={(e) => {
const textarea = e.target;
setInputVal(textarea.value);
textarea.style.height = textareaMinH;
textarea.style.height = `${textarea.scrollHeight}px`;
}}
onKeyDown={(e) => {
// 触发快捷发送
if (isPc && e.keyCode === 13 && !e.shiftKey) {
sendPrompt();
e.preventDefault();
}
// 全选内容
// @ts-ignore
e.key === 'a' && e.ctrlKey && e.target?.select();
}}
/>
{/* 发送和等待按键 */}
<Box position={'absolute'} bottom={5} right={media('20px', '10px')}>
{isChatting ? (
<Image
style={{ transform: 'translateY(4px)' }}
src={'/icon/chatting.svg'}
width={30}
height={30}
alt={''}
/>
) : (
<Box cursor={'pointer'} onClick={sendPrompt}>
<Icon
name={'icon-fasong'}
width={20}
height={20}
color={useColorModeValue('#718096', 'white')}
></Icon>
</Box>
)}
</Box>
</Box>
)}
{isChatting ? (
<Icon
style={{ transform: 'translateY(4px)' }}
h={'30px'}
w={'30px'}
name={'chatting'}
/>
) : (
<Icon
name={'chatSend'}
width={['18px', '20px']}
height={['18px', '20px']}
cursor={'pointer'}
fill={useColorModeValue('#718096', 'white')}
></Icon>
)}
</Flex>
</Box>
</Box>
</Flex>
</Flex>
@@ -549,7 +539,7 @@ const Chat = ({ chatId }: { chatId: string }) => {
export default Chat;
export async function getServerSideProps(context: any) {
const chatId = context.query?.chatId || '';
const chatId = context?.query?.chatId || 'noid';
return {
props: { chatId }

View File

@@ -8,10 +8,21 @@ import {
ModalBody,
ModalCloseButton,
Button,
Input
Input,
Select,
FormControl,
FormErrorMessage
} from '@chakra-ui/react';
import { postData } from '@/api/data';
import { useMutation } from '@tanstack/react-query';
import { useForm, SubmitHandler } from 'react-hook-form';
import { DataType } from '@/types/data';
import { DataTypeTextMap } from '@/constants/data';
export interface CreateDataProps {
name: string;
type: DataType;
}
const CreateDataModal = ({
onClose,
@@ -21,9 +32,20 @@ const CreateDataModal = ({
onSuccess: () => void;
}) => {
const [inputVal, setInputVal] = useState('');
const {
getValues,
register,
handleSubmit,
formState: { errors }
} = useForm<CreateDataProps>({
defaultValues: {
name: '',
type: 'abstract'
}
});
const { isLoading, mutate } = useMutation({
mutationFn: (name: string) => postData(name),
mutationFn: (e: CreateDataProps) => postData(e),
onSuccess() {
onSuccess();
onClose();
@@ -37,23 +59,33 @@ const CreateDataModal = ({
<ModalHeader></ModalHeader>
<ModalCloseButton />
<ModalBody display={'flex'}>
<Input
value={inputVal}
onChange={(e) => setInputVal(e.target.value)}
placeholder={'数据集名称'}
></Input>
<ModalBody>
<FormControl mb={8} isInvalid={!!errors.name}>
<Input
placeholder="数据集名称"
{...register('name', {
required: '数据集名称不能为空'
})}
/>
<FormErrorMessage position={'absolute'} fontSize="xs">
{!!errors.name && errors.name.message}
</FormErrorMessage>
</FormControl>
<FormControl>
<Select placeholder="数据集类型" {...register('type', {})}>
{Object.entries(DataTypeTextMap).map(([key, value]) => (
<option key={key} value={key}>
{value}
</option>
))}
</Select>
</FormControl>
</ModalBody>
<ModalFooter>
<Button colorScheme={'gray'} onClick={onClose}>
</Button>
<Button
ml={3}
isDisabled={inputVal === ''}
isLoading={isLoading}
onClick={() => mutate(inputVal)}
>
<Button ml={3} isLoading={isLoading} onClick={handleSubmit(mutate as any)}>
</Button>
</ModalFooter>

View File

@@ -22,6 +22,7 @@ import { useToast } from '@/hooks/useToast';
import { useLoading } from '@/hooks/useLoading';
import { formatPrice } from '@/utils/user';
import { modelList, ChatModelNameEnum } from '@/constants/model';
import { encode } from 'gpt-token-utils';
const fileExtension = '.txt,.doc,.docx,.pdf,.md';
@@ -102,8 +103,11 @@ const ImportDataModal = ({
}
})
)
).join('\n');
)
.join('\n')
.replace(/\n+/g, '\n');
setFileText(fileTexts);
console.log(encode(fileTexts));
} catch (error: any) {
console.log(error);
toast({
@@ -159,7 +163,9 @@ const ImportDataModal = ({
placeholder={'请粘贴或输入需要处理的文本'}
onChange={(e) => setTextInput(e.target.value)}
/>
<Box mt={2}> {textInput.length} </Box>
<Box mt={2}>
{textInput.length} {encode(textInput).length} tokens
</Box>
</>
)}
{activeTab === 'doc' && (
@@ -172,12 +178,15 @@ const ImportDataModal = ({
border={'1px solid '}
borderColor={'blackAlpha.200'}
borderRadius={'md'}
fontSize={'sm'}
>
<Button onClick={onOpen}></Button>
<Box mt={2}> {fileExtension} </Box>
{fileText && (
<>
<Box mt={2}> {fileText.length} </Box>
<Box mt={2}>
{fileText.length} {encode(fileText).length} tokens
</Box>
<Box
maxH={'300px'}
w={'100%'}

Some files were not shown because too many files have changed in this diff Show More