Compare commits

..

8 Commits
v3.7.3 ... v3.8

Author SHA1 Message Date
archer
02caa57304 docs 2023-05-23 19:18:52 +08:00
archer
6014a56e54 feat: system prompt 2023-05-23 19:13:01 +08:00
archer
b8f08eb33e feat: quote change 2023-05-23 18:35:45 +08:00
archer
944e876aaa feat: chat quote 2023-05-23 15:09:57 +08:00
archer
ee2c259c3d perf: text and avatar 2023-05-22 16:47:41 +08:00
archer
1c8db69a5a feat: limit export kb 2023-05-22 14:14:06 +08:00
archer
5128bbcce4 perf: insert kb data 2023-05-22 13:16:34 +08:00
archer
51a5d450b7 feat: content check 2023-05-21 22:12:02 +08:00
86 changed files with 1809 additions and 1001 deletions

View File

@@ -16,6 +16,8 @@ aliTemplateCode=SMS_xxx
TOKEN_KEY=xxx
# root key, 最高权限
ROOT_KEY=xxx
# 是否进行安全校验(1: 开启0: 关闭)
SENSITIVE_CHECK=1
# openai
# OPENAI_BASE_URL=https://api.openai.com/v1
# OPENAI_BASE_URL_AUTH=可选的安全凭证(不需要的时候,记得去掉)

View File

@@ -52,6 +52,8 @@ COPY --from=builder --chown=nextjs:nodejs /app/.next/static ./.next/static
USER nextjs
ENV PORT=3000
EXPOSE 3000
CMD ["node", "server.js"]

View File

@@ -36,6 +36,7 @@ Fast GPT 允许你使用自己的 openai API KEY 来快速的调用 openai 接
## 👀 其他
- [FastGpt 常见问题](https://kjqvjse66l.feishu.cn/docx/HtrgdT0pkonP4kxGx8qcu6XDnGh)
- [公众号接入](https://www.bilibili.com/video/BV1xh4y1t7fy/)
- [FastGpt + Laf 最佳实践,将知识库装入公众号,点击去 Laf 公众号体验效果](https://b4jky7-fastgpt.oss.laf.run/lafercode.png)
- [FastGpt V3.4 更新集合](https://www.bilibili.com/video/BV1Lo4y147Qh/?vd_source=92041a1a395f852f9d89158eaa3f61b4)
- [FastGpt 知识库演示](https://www.bilibili.com/video/BV1Wo4y1p7i1/)

View File

@@ -2,7 +2,7 @@
## 代理环境(国外服务器可忽略)
选择一个即可。
选择一个即可。这只是代理!!!不是项目。
1. [sealos nginx 方案](./proxy/sealos.md) - 推荐。约等于不用钱,不需要额外准备任何东西。
2. [clash 方案](./proxy/clash.md) - 仅需一台服务器(需要有 clash

View File

@@ -52,6 +52,10 @@ services:
- aliTemplateCode=SMS_xxxx
# token加密凭证随便填作为登录凭证
- TOKEN_KEY=xxxx
# root key, 最高权限,可以内部接口互相调用
- ROOT_KEY=xxx
# 是否进行安全校验(1: 开启0: 关闭)
- SENSITIVE_CHECK=1
# 和上方mongo镜像的username,password对应
- MONGODB_URI=mongodb://username:password@0.0.0.0:27017/?authSource=admin
- MONGODB_NAME=fastgpt

View File

@@ -90,7 +90,7 @@ http {
1. 进入刚刚部署应用的详情,复制外网地址
![step5](./imgs//sealos5.png)
2. 修改环境变量:
2. 修改环境变量(是 FastGpt 的环境变量,不是 sealos 的):
```
OPENAI_BASE_URL=https://tgohwtdlrmer.cloud.sealos.io/openai/v1

View File

@@ -10,34 +10,38 @@
# proxy可选
AXIOS_PROXY_HOST=127.0.0.1
AXIOS_PROXY_PORT=7890
# openai 中转连接(可选
OPENAI_BASE_URL=https://api.openai.com/v1
OPENAI_BASE_URL_AUTH=可选的安全凭证
# 是否开启队列任务。 1-开启0-关闭(请求 parentUrl 去执行任务,单机时直接填1
# 是否开启队列任务。 1-开启0-关闭请求parentUrl去执行任务,单机时直接填1
queueTask=1
parentUrl=https://hostname/api/openapi/startEvents
# 发送邮箱验证码配置。用的是 QQ 邮箱。参考 nodeMail 获取MAILE_CODE自行百度。
MY_MAIL=xxxx@qq.com
MAILE_CODE=xxxx
# 阿里短信服务(邮箱和短信至少二选一)
aliAccessKeyId=xxxx
aliAccessKeySecret=xxxx
aliSignName=xxxxx
aliTemplateCode=SMS_xxxx
# token加密凭证(随便填,作为登录凭证)
TOKEN_KEY=xxxx
queueTask=1
parentUrl=https://hostname/api/openapi/startEvents
# 和mongo镜像的username,password对应
MONGODB_URI=mongodb://username:passsword@0.0.0.0:27017/?authSource=admin
MONGODB_NAME=xxx
# email
MY_MAIL=xxx@qq.com
MAILE_CODE=xxx
# ali ems
aliAccessKeyId=xxx
aliAccessKeySecret=xxx
aliSignName=xxx
aliTemplateCode=SMS_xxx
# token
TOKEN_KEY=xxx
# root key, 最高权限
ROOT_KEY=xxx
# 是否进行安全校验(1: 开启0: 关闭)
SENSITIVE_CHECK=1
# openai
# OPENAI_BASE_URL=https://api.openai.com/v1
# OPENAI_BASE_URL_AUTH=可选的安全凭证(不需要的时候,记得去掉)
OPENAIKEY=sk-xxx
GPT4KEY=sk-xxx
# claude
CLAUDE_BASE_URL=calude模型请求地址
CLAUDE_KEY=CLAUDE_KEY
# db
MONGODB_URI=mongodb://username:password@0.0.0.0:27017/test?authSource=admin
PG_HOST=0.0.0.0
PG_PORT=8100
# 和PG镜像对应.
PG_USER=fastgpt # POSTGRES_USER
PG_PASSWORD=1234 # POSTGRES_PASSWORD
PG_DB_NAME=fastgpt # POSTGRES_DB
OPENAIKEY=sk-xxxxx
PG_USER=xxx
PG_PASSWORD=xxx
PG_DB_NAME=xxx
```
## 运行

View File

@@ -1,6 +1,6 @@
{
"name": "fastgpt",
"version": "0.1.0",
"version": "3.7",
"private": true,
"scripts": {
"dev": "next dev",
@@ -83,5 +83,8 @@
},
"lint-staged": {
"./src/**/*.{ts,tsx,scss}": "npm run format"
},
"engines": {
"node": ">=18.0.0"
}
}

View File

@@ -1,3 +1,4 @@
### Fast GPT V3.7
### Fast GPT V3.8
- 新增 - 知识库引用反馈。
- 新增 - 知识库与 AI 助手对多对关系,一个知识库可以被多个 AI 助手关联,一个 AI 助手可以关联多个知识库。

View File

@@ -1,10 +1,11 @@
import { GET, POST, DELETE } from './request';
import type { ChatItemType, HistoryItemType } from '@/types/chat';
import type { HistoryItemType } from '@/types/chat';
import type { InitChatResponse, InitShareChatResponse } from './response/chat';
import { RequestPaging } from '../types/index';
import type { ShareChatSchema } from '@/types/mongoSchema';
import type { ShareChatEditType } from '@/types/model';
import { Obj2Query } from '@/utils/tools';
import { QuoteItemType } from '@/pages/api/openapi/kb/appKbSearch';
/**
* 获取初始化聊天内容
@@ -24,14 +25,19 @@ export const getChatHistory = (data: RequestPaging) =>
export const delChatHistoryById = (id: string) => GET(`/chat/removeHistory?id=${id}`);
/**
* 存储一轮对话
* get history quotes
*/
export const postSaveChat = (data: {
modelId: string;
newChatId: '' | string;
chatId: '' | string;
prompts: [ChatItemType, ChatItemType];
}) => POST<string>('/chat/saveChat', data);
export const getHistoryQuote = (params: { chatId: string; historyId: string }) =>
GET<(QuoteItemType & { _id: string })[]>(`/chat/getHistoryQuote`, params);
/**
* update history quote status
*/
export const updateHistoryQuote = (params: {
chatId: string;
historyId: string;
quoteId: string;
}) => GET(`/chat/updateHistoryQuote`, params);
/**
* 删除一句对话

View File

@@ -1,4 +1,4 @@
import { SYSTEM_PROMPT_HEADER, NEW_CHATID_HEADER } from '@/constants/chat';
import { GUIDE_PROMPT_HEADER, NEW_CHATID_HEADER, QUOTE_LEN_HEADER } from '@/constants/chat';
interface StreamFetchProps {
url: string;
@@ -7,7 +7,7 @@ interface StreamFetchProps {
abortSignal: AbortController;
}
export const streamFetch = ({ url, data, onMessage, abortSignal }: StreamFetchProps) =>
new Promise<{ responseText: string; systemPrompt: string; newChatId: string }>(
new Promise<{ responseText: string; newChatId: string; systemPrompt: string; quoteLen: number }>(
async (resolve, reject) => {
try {
const res = await fetch(url, {
@@ -23,8 +23,11 @@ export const streamFetch = ({ url, data, onMessage, abortSignal }: StreamFetchPr
const decoder = new TextDecoder();
const systemPrompt = decodeURIComponent(res.headers.get(SYSTEM_PROMPT_HEADER) || '').trim();
const newChatId = decodeURIComponent(res.headers.get(NEW_CHATID_HEADER) || '');
const systemPrompt = decodeURIComponent(res.headers.get(GUIDE_PROMPT_HEADER) || '').trim();
const quoteLen = res.headers.get(QUOTE_LEN_HEADER)
? Number(res.headers.get(QUOTE_LEN_HEADER))
: 0;
let responseText = '';
@@ -33,7 +36,7 @@ export const streamFetch = ({ url, data, onMessage, abortSignal }: StreamFetchPr
const { done, value } = await reader?.read();
if (done) {
if (res.status === 200) {
resolve({ responseText, systemPrompt, newChatId });
resolve({ responseText, newChatId, quoteLen, systemPrompt });
} else {
const parseError = JSON.parse(responseText);
reject(parseError?.message || '请求异常');
@@ -47,7 +50,7 @@ export const streamFetch = ({ url, data, onMessage, abortSignal }: StreamFetchPr
read();
} catch (err: any) {
if (err?.message === 'The user aborted a request.') {
return resolve({ responseText, systemPrompt, newChatId });
return resolve({ responseText, newChatId, quoteLen: 0, systemPrompt: '' });
}
reject(typeof err === 'string' ? err : err?.message || '请求异常');
}

View File

@@ -1,6 +1,6 @@
import { GET, POST, DELETE, PUT } from './request';
import type { ModelSchema } from '@/types/mongoSchema';
import type { ModelUpdateParams, ShareModelItem } from '@/types/model';
import type { ModelUpdateParams } from '@/types/model';
import { RequestPaging } from '../types/index';
import type { ModelListResponse } from './response/model';
@@ -36,10 +36,7 @@ export const putModelById = (id: string, data: ModelUpdateParams) =>
*/
export const getShareModelList = (data: { searchText?: string } & RequestPaging) =>
POST(`/model/share/getModels`, data);
/**
* 获取我收藏的模型
*/
export const getCollectionModels = () => GET<ShareModelItem[]>(`/model/share/getCollection`);
/**
* 收藏/取消收藏模型
*/

View File

@@ -40,10 +40,8 @@ export const getTrainingData = (kbId: string) =>
embeddingQueue: number;
}>(`/plugins/kb/data/getTrainingData?kbId=${kbId}`);
/**
* 获取 web 页面内容
*/
export const getWebContent = (url: string) => POST<string>(`/model/data/fetchingUrlData`, { url });
export const getKbDataItemById = (dataId: string) =>
GET(`/plugins/kb/data/getDataById`, { dataId });
/**
* 直接push数据

View File

@@ -54,13 +54,13 @@ function responseError(err: any) {
if (typeof err === 'string') {
return Promise.reject({ message: err });
}
if (err.response) {
// 有报错响应
const res = err.response;
if (res.data.code in TOKEN_ERROR_CODE) {
clearCookie();
return Promise.reject({ message: 'token过期重新登录' });
}
// 有报错响应
if (err?.code in TOKEN_ERROR_CODE) {
clearCookie();
window.location.replace(
`/login?lastRoute=${encodeURIComponent(location.pathname + location.search)}`
);
return Promise.reject({ message: 'token过期重新登录' });
}
return Promise.reject(err);
}

View File

@@ -0,0 +1,19 @@
import React from 'react';
import { Image } from '@chakra-ui/react';
import type { ImageProps } from '@chakra-ui/react';
const Avatar = ({ w = '30px', ...props }: ImageProps) => {
return (
<Image
fallbackSrc="/icon/logo.png"
borderRadius={'50%'}
objectFit={'contain'}
alt=""
w={w}
h={w}
{...props}
/>
);
};
export default Avatar;

View File

@@ -0,0 +1 @@
<?xml version="1.0" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1684739031957" class="icon" viewBox="0 0 1026 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="4988" xmlns:xlink="http://www.w3.org/1999/xlink" width="64.125" height="64"><path d="M371.732817 94.172314q25.773475 0 44.112294 17.843175t18.338819 43.616651l0 247.821878q0 25.773475-18.338819 44.112294t-44.112294 18.338819l-247.821878 0q-25.773475 0-43.616651-18.338819t-17.843175-44.112294l0-247.821878q0-25.773475 17.843175-43.616651t43.616651-17.843175l247.821878 0zM371.732817 589.81607q25.773475 0 44.112294 17.843175t18.338819 43.616651l0 248.813166q0 25.773475-18.338819 43.616651t-44.112294 17.843175l-247.821878 0q-25.773475 0-43.616651-17.843175t-17.843175-43.616651l0-248.813166q0-25.773475 17.843175-43.616651t43.616651-17.843175l247.821878 0zM868.367861 589.81607q25.773475 0 43.616651 17.843175t17.843175 43.616651l0 248.813166q0 25.773475-17.843175 43.616651t-43.616651 17.843175l-247.821878 0q-25.773475 0-44.112294-17.843175t-18.338819-43.616651l0-248.813166q0-25.773475 18.338819-43.616651t44.112294-17.843175l247.821878 0zM1006.156825 203.21394q19.82575 19.82575 19.82575 46.590513t-19.82575 45.599226l-184.379477 184.379477q-19.82575 19.82575-46.094869 19.82575t-46.094869-19.82575l-184.379477-184.379477q-18.834463-18.834463-18.834463-45.599226t18.834463-46.590513l184.379477-184.379477q19.82575-18.834463 46.094869-18.834463t46.094869 18.834463z" p-id="4989"></path></svg>

After

Width:  |  Height:  |  Size: 1.5 KiB

View File

@@ -0,0 +1 @@
<?xml version="1.0" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1684826302600" class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="2244" xmlns:xlink="http://www.w3.org/1999/xlink" ><path d="M904 512h-56c-4.4 0-8 3.6-8 8v320H184V184h320c4.4 0 8-3.6 8-8v-56c0-4.4-3.6-8-8-8H144c-17.7 0-32 14.3-32 32v736c0 17.7 14.3 32 32 32h736c17.7 0 32-14.3 32-32V520c0-4.4-3.6-8-8-8z" p-id="2245"></path><path d="M355.9 534.9L354 653.8c-0.1 8.9 7.1 16.2 16 16.2h0.4l118-2.9c2-0.1 4-0.9 5.4-2.3l415.9-415c3.1-3.1 3.1-8.2 0-11.3L785.4 114.3c-1.6-1.6-3.6-2.3-5.7-2.3s-4.1 0.8-5.7 2.3l-415.8 415c-1.4 1.5-2.3 3.5-2.3 5.6z m63.5 23.6L779.7 199l45.2 45.1-360.5 359.7-45.7 1.1 0.7-46.4z" p-id="2246"></path></svg>

After

Width:  |  Height:  |  Size: 810 B

View File

@@ -0,0 +1 @@
<?xml version="1.0" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1684745011703" class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="1481" xmlns:xlink="http://www.w3.org/1999/xlink" ><path d="M110.025 252.249c0 13.741 5.456 27.374 14.899 37.112s22.663 15.364 35.987 15.364 26.544-5.626 35.987-15.364c9.441-9.738 14.899-23.371 14.899-37.112s-5.456-27.375-14.899-37.111c-9.442-9.738-22.663-15.364-35.987-15.364s-26.544 5.626-35.987 15.364c-9.338 9.736-14.899 23.37-14.899 37.111m0 0zM103.625 512.575c0 13.741 5.455 27.482 14.899 37.22 9.442 9.738 22.663 15.364 36.091 15.364 13.324 0 26.649-5.626 36.091-15.364s14.899-23.371 14.899-37.22c0-13.741-5.455-27.482-14.899-37.22-9.442-9.738-22.662-15.364-36.091-15.364-13.324 0-26.649 5.626-36.091 15.364-9.444 9.737-14.899 23.37-14.899 37.22m0 0zM103.625 774.089c0 13.741 5.455 27.482 14.899 37.22 9.442 9.738 22.663 15.364 36.091 15.364 13.324 0 26.649-5.626 36.091-15.364s14.899-23.37 14.899-37.22c0-13.741-5.455-27.482-14.899-37.22-9.442-9.737-22.662-15.364-36.091-15.364-13.324 0-26.649 5.627-36.091 15.364-9.444 9.847-14.899 23.479-14.899 37.22m0 0zM919.041 249.869c0 27.699-19.935 50.095-44.59 50.095H345.88c-24.655 0-44.59-22.397-44.59-50.095 0-27.699 19.935-50.095 44.59-50.095h528.571c24.656-0.001 44.59 22.396 44.59 50.095m0 0zM919.041 510.195c0 27.59-19.935 50.095-44.59 50.095H345.88c-24.655 0-44.59-22.398-44.59-50.096 0-27.699 19.935-50.096 44.59-50.096h528.571c24.656-0.109 44.59 22.397 44.59 50.097m0 0zM919.041 771.601c0 27.699-19.935 50.096-44.59 50.096H345.88c-24.655 0-44.59-22.397-44.59-50.096 0-27.591 19.935-49.988 44.59-49.988h528.571c24.656-0.108 44.59 22.397 44.59 49.988m0 0z" p-id="1482"></path></svg>

After

Width:  |  Height:  |  Size: 1.7 KiB

View File

@@ -1 +1 @@
<?xml version="1.0" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1683450447995" class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="2005" xmlns:xlink="http://www.w3.org/1999/xlink" width="64" height="64"><path d="M728.99015111 121.90378667h107.92732444V40.96h-107.92732444v80.94378667z m175.38161778 67.45429333v107.92732445H985.31555555v-107.92732445h-80.94378666z m-67.45429334 175.38161778h-107.92732444v80.94378667h107.92732444v-80.94378667z m-175.38161777-67.45429333v-107.92732445h-80.94378667v107.92732445h80.94378667z m67.45429333 67.45429333c-37.23491555 0-67.45429333-30.21937778-67.45429333-67.45429333h-80.94378667c0 81.97006222 66.42801778 148.39808 148.39808 148.39808v-80.94378667z m175.38161778-67.45429333c0 37.23491555-30.21937778 67.45429333-67.45429334 67.45429333v80.94378667c81.97006222 0 148.39808-66.42801778 148.39808-148.39808h-80.94378666zM836.91747555 121.90151111c37.23491555 0 67.45429333 30.21937778 67.45429334 67.45429334H985.31555555C985.31555555 107.38801778 918.88753778 40.96 836.91747555 40.96v80.94378667zM728.99015111 40.96c-81.97006222 0-148.39808 66.42801778-148.39808 148.39808h80.94378667c0-37.23491555 30.21937778-67.45429333 67.45429333-67.45429333V40.96zM189.35808 661.53585778h107.92732445v-80.94378667h-107.92732445v80.94378667z m175.38161778 67.45429333v107.92732444h80.94378667v-107.92732444h-80.94378667z m-67.45429333 175.38161778h-107.92732445V985.31555555h107.92732445v-80.94378666zM121.90151111 836.91747555v-107.92732444H40.96v107.92732444h80.94378667z m67.45429334 67.45429334c-37.23491555 0-67.45429333-30.21937778-67.45429334-67.45429334H40.96C40.96 918.88753778 107.38801778 985.31555555 189.35808 985.31555555v-80.94378666z m175.38161777-67.45429334c0 37.23491555-30.21937778 67.45429333-67.45429333 67.45429334V985.31555555c81.97006222 0 148.39808-66.42801778 148.39808-148.39808h-80.94378667z m-67.45429333-175.38161777c37.23491555 0 67.45429333 30.21937778 67.45429333 67.45429333h80.94378667c0-81.97006222-66.42801778-148.39808-148.39808-148.39808v80.94378667z m-107.92732444-80.94378667C107.38801778 580.59207111 40.96 647.02008889 40.96 728.99015111h80.94378667c0-37.23491555 30.21937778-67.45429333 67.45429333-67.45429333v-80.94378667z m0-458.68828444h107.92732444V40.96h-107.92732444v80.94378667z m175.38161777 67.45429333v107.92732445h80.94378667v-107.92732445h-80.94378667z m-67.45429333 175.38161778h-107.92732444v80.94378667h107.92732444v-80.94378667zM121.90151111 297.28540445v-107.92732445H40.96v107.92732445h80.94378667z m67.45429334 67.45429333c-37.23491555 0-67.45429333-30.21937778-67.45429334-67.45429333H40.96c0 81.97006222 66.42801778 148.39808 148.39808 148.39808v-80.94378667z m175.38161777-67.45429333c0 37.23491555-30.21937778 67.45429333-67.45429333 67.45429333v80.94378667c81.97006222 0 148.39808-66.42801778 148.39808-148.39808h-80.94378667zM297.28540445 121.90151111c37.23491555 0 67.45429333 30.21937778 67.45429333 67.45429334h80.94378667c0-81.97006222-66.42801778-148.39808-148.39808-148.39808v80.94378666zM189.35808 40.96C107.38801778 40.96 40.96 107.38801778 40.96 189.35808h80.94378667c0-37.23491555 30.21937778-67.45429333 67.45429333-67.45429333V40.96z m539.63207111 620.57585778h107.92732444v-80.94378667h-107.92732444v80.94378667z m175.38161778 67.45429333v107.92732444H985.31555555v-107.92732444h-80.94378666z m-67.45429334 175.38161778h-107.92732444V985.31555555h107.92732444v-80.94378666z m-175.38161777-67.45429334v-107.92732444h-80.94378667v107.92732444h80.94378667z m67.45429333 67.45429334c-37.23491555 0-67.45429333-30.21937778-67.45429333-67.45429334h-80.94378667c0 81.97006222 66.42801778 148.39808 148.39808 148.39808v-80.94378666z m175.38161778-67.45429334c0 37.23491555-30.21937778 67.45429333-67.45429334 67.45429334V985.31555555C918.88753778 985.31555555 985.31555555 918.88753778 985.31555555 836.91747555h-80.94378666z m-67.45429334-175.38161777c37.23491555 0 67.45429333 30.21937778 67.45429334 67.45429333H985.31555555c0-81.97006222-66.42801778-148.39808-148.39808-148.39808v80.94378667z m-107.92732444-80.94378667c-81.97006222 0-148.39808 66.42801778-148.39808 148.39808h80.94378667c0-37.23491555 30.21937778-67.45429333 67.45429333-67.45429333v-80.94378667z" p-id="2006"></path></svg>
<?xml version="1.0" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1684739068105" class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="7879" xmlns:xlink="http://www.w3.org/1999/xlink" ><path d="M817.87 556.31h-63.58v-66.24A42.27 42.27 0 0 0 712 447.8h-84.81a42.27 42.27 0 0 0-42.27 42.27v66.24H436.57v-66.24a42.27 42.27 0 0 0-42.27-42.27h-84.83a42.27 42.27 0 0 0-42.27 42.27v66.24h-61.83A22.39 22.39 0 0 0 183 578.7a22.39 22.39 0 0 0 22.39 22.39h61.81v65.55a42.27 42.27 0 0 0 42.27 42.27h84.83a42.27 42.27 0 0 0 42.27-42.27v-65.55h148.36v65.55a42.27 42.27 0 0 0 42.27 42.27H712a42.27 42.27 0 0 0 42.27-42.27v-65.55h63.58a22.39 22.39 0 0 0 22.39-22.39 22.39 22.39 0 0 0-22.37-22.39z m-438.64 95.26h-54.69V505.14h54.69z m317.72 0h-54.69V505.14H697z" p-id="7880"></path><path d="M823 202.58h-90.81v-63.09a71.88 71.88 0 0 0-71.88-71.88H363.19a71.88 71.88 0 0 0-71.88 71.88v63.08h-90.12A137.17 137.17 0 0 0 64 339.75v479a137.17 137.17 0 0 0 137.19 137.14H823a137.17 137.17 0 0 0 137.19-137.17v-479A137.17 137.17 0 0 0 823 202.58z m-474.36-54.1A23.52 23.52 0 0 1 372.17 125h279.16a23.52 23.52 0 0 1 23.52 23.52v54.1h-326.2z m554.23 673.31a76.76 76.76 0 0 1-76.76 76.76h-628a76.76 76.76 0 0 1-76.76-76.76V336.67a76.76 76.76 0 0 1 76.76-76.76h628a76.76 76.76 0 0 1 76.76 76.76z" p-id="7881"></path></svg>

Before

Width:  |  Height:  |  Size: 4.2 KiB

After

Width:  |  Height:  |  Size: 1.4 KiB

View File

@@ -1 +0,0 @@
<?xml version="1.0" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1683254591061" class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="1213" xmlns:xlink="http://www.w3.org/1999/xlink" width="64" height="64"><path d="M389.5296 650.78613333a204.8 204.8 0 1 1-10.82026667-288.49493333L557.43146667 245.76a153.6 153.6 0 1 1 39.25333333 55.9104l-176.46933333 115.02933333c15.01866667 28.50133333 23.48373333 60.928 23.48373333 95.3344a204.11733333 204.11733333 0 0 1-16.86186667 81.47626667l257.1264 144.62293333a153.6 153.6 0 1 1-30.9248 60.928l-263.54346666-148.24106666z" p-id="1214"></path></svg>

Before

Width:  |  Height:  |  Size: 710 B

View File

@@ -12,7 +12,6 @@ const map = {
delete: require('./icons/delete.svg').default,
withdraw: require('./icons/withdraw.svg').default,
stop: require('./icons/stop.svg').default,
shareMarket: require('./icons/shareMarket.svg').default,
collectionLight: require('./icons/collectionLight.svg').default,
collectionSolid: require('./icons/collectionSolid.svg').default,
chat: require('./icons/chat.svg').default,
@@ -27,7 +26,10 @@ const map = {
wx: require('./icons/wx.svg').default,
out: require('./icons/out.svg').default,
git: require('./icons/git.svg').default,
kb: require('./icons/kb.svg').default
kb: require('./icons/kb.svg').default,
appStore: require('./icons/appStore.svg').default,
menu: require('./icons/menu.svg').default,
edit: require('./icons/edit.svg').default
};
export type IconName = keyof typeof map;

View File

@@ -1,9 +1,10 @@
import React, { useMemo } from 'react';
import { Box, Flex, Image, Tooltip } from '@chakra-ui/react';
import { Box, Flex, Tooltip } from '@chakra-ui/react';
import { useRouter } from 'next/router';
import MyIcon from '../Icon';
import { useUserStore } from '@/store/user';
import { useChatStore } from '@/store/chat';
import Avatar from '../Avatar';
export enum NavbarTypeEnum {
normal = 'normal',
@@ -23,7 +24,7 @@ const Navbar = () => {
activeLink: ['/chat']
},
{
label: 'AI助手',
label: '我的应用',
icon: 'model',
link: `/model?modelId=${lastModelId}`,
activeLink: ['/model']
@@ -35,8 +36,8 @@ const Navbar = () => {
activeLink: ['/kb']
},
{
label: '共享',
icon: 'shareMarket',
label: '应用市场',
icon: 'appStore',
link: '/model/share',
activeLink: ['/model/share']
},
@@ -82,13 +83,7 @@ const Navbar = () => {
cursor={'pointer'}
onClick={() => router.push('/number')}
>
<Image
src={userInfo?.avatar || '/icon/human.png'}
objectFit={'contain'}
w={'36px'}
h={'36px'}
alt=""
/>
<Avatar w={'36px'} h={'36px'} src={userInfo?.avatar} fallbackSrc={'/icon/human.png'} />
</Box>
{/* 导航列表 */}
<Box flex={1}>

View File

@@ -10,25 +10,21 @@ const NavbarPhone = () => {
const navbarList = useMemo(
() => [
{
label: '聊天',
icon: 'tabbarChat',
link: `/chat?modelId=${lastChatModelId}&chatId=${lastChatId}`,
activeLink: ['/chat']
},
{
label: 'AI助手',
icon: 'tabbarModel',
link: `/model`,
activeLink: ['/model']
},
{
label: '发现',
icon: 'tabbarMore',
link: '/tools',
activeLink: ['/tools']
},
{
label: '我',
icon: 'tabbarMe',
link: '/number',
activeLink: ['/number']

View File

@@ -339,9 +339,12 @@
text-align: justify;
tab-size: 4;
word-spacing: normal;
word-break: break-all;
width: 100%;
* {
word-break: break-all;
}
p {
white-space: pre-line;
}

View File

@@ -1,5 +1,6 @@
export const SYSTEM_PROMPT_HEADER = 'System-Prompt-Header';
export const NEW_CHATID_HEADER = 'Chat-Id-Header';
export const NEW_CHATID_HEADER = 'response-new-chat-id';
export const QUOTE_LEN_HEADER = 'response-quote-len';
export const GUIDE_PROMPT_HEADER = 'response-guide-prompt';
export enum ChatRoleEnum {
System = 'System',

View File

@@ -122,15 +122,15 @@ export const ModelVectorSearchModeMap: Record<
> = {
[ModelVectorSearchModeEnum.hightSimilarity]: {
text: '高相似度, 无匹配时拒绝回复',
similarity: 0.2
similarity: 0.18
},
[ModelVectorSearchModeEnum.noContext]: {
text: '高相似度,无匹配时直接回复',
similarity: 0.2
similarity: 0.18
},
[ModelVectorSearchModeEnum.lowSimilarity]: {
text: '低相似度匹配',
similarity: 0.8
similarity: 0.7
}
};

View File

@@ -2,3 +2,13 @@ export enum SplitTextTypEnum {
'qa' = 'qa',
'subsection' = 'subsection'
}
export enum PluginTypeEnum {
LLM = 'LLM',
Text = 'Text',
Function = 'Function'
}
export enum PluginParamsTypeEnum {
'Text' = 'text'
}

View File

@@ -27,6 +27,6 @@ export enum PromotionEnum {
export const PromotionTypeMap = {
[PromotionEnum.invite]: '好友充值',
[PromotionEnum.shareModel]: 'AI助手分享',
[PromotionEnum.shareModel]: '应用分享',
[PromotionEnum.withdraw]: '提现'
};

View File

@@ -4,7 +4,6 @@ import { IconButton, Flex, Box, Input } from '@chakra-ui/react';
import { ArrowBackIcon, ArrowForwardIcon } from '@chakra-ui/icons';
import { useMutation } from '@tanstack/react-query';
import { useToast } from './useToast';
import { useQuery } from '@tanstack/react-query';
export const usePagination = <T = any,>({
api,

View File

@@ -55,7 +55,7 @@ export default function App({ Component, pageProps }: AppProps) {
<Script src="/js/qrcode.min.js" strategy="lazyOnload"></Script>
<Script src="/js/pdf.js" strategy="lazyOnload"></Script>
<Script src="/js/html2pdf.bundle.min.js" strategy="lazyOnload"></Script>
<Script src="/js/particles.js" strategy="lazyOnload"></Script>
<Script src="/js/particles.js"></Script>
<QueryClientProvider client={queryClient}>
<ChakraProvider theme={theme}>
<ColorModeScript initialColorMode={theme.config.initialColorMode} />

View File

@@ -2,18 +2,24 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { connectToDatabase } from '@/service/mongo';
import { authChat } from '@/service/utils/auth';
import { modelServiceToolMap } from '@/service/utils/chat';
import { ChatItemSimpleType } from '@/types/chat';
import { ChatItemType } from '@/types/chat';
import { jsonRes } from '@/service/response';
import { ChatModelMap, ModelVectorSearchModeMap } from '@/constants/model';
import { pushChatBill } from '@/service/events/pushBill';
import { resStreamResponse } from '@/service/utils/chat';
import { searchKb } from '@/service/plugins/searchKb';
import { ChatRoleEnum } from '@/constants/chat';
import { appKbSearch } from '../openapi/kb/appKbSearch';
import { ChatRoleEnum, QUOTE_LEN_HEADER, GUIDE_PROMPT_HEADER } from '@/constants/chat';
import { BillTypeEnum } from '@/constants/user';
import { sensitiveCheck } from '@/service/api/text';
import { NEW_CHATID_HEADER } from '@/constants/chat';
import { saveChat } from './saveChat';
import { Types } from 'mongoose';
/* 发送提示词 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
let step = 0; // step=1时表示开始了流响应
res.on('close', () => {
res.end();
});
res.on('error', () => {
console.log('error: ', 'request error');
res.end();
@@ -21,9 +27,9 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
try {
const { chatId, prompt, modelId } = req.body as {
prompt: ChatItemSimpleType;
prompt: [ChatItemType, ChatItemType];
modelId: string;
chatId: '' | string;
chatId?: string;
};
if (!modelId || !prompt) {
@@ -43,86 +49,143 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
const modelConstantsData = ChatModelMap[model.chat.chatModel];
// 读取对话内容
const prompts = [...content, prompt];
const prompts = [...content, prompt[0]];
const {
code = 200,
systemPrompts = [],
quote = [],
guidePrompt = ''
} = await (async () => {
// 使用了知识库搜索
if (model.chat.relatedKbs.length > 0) {
const { code, searchPrompts, rawSearch, guidePrompt } = await appKbSearch({
model,
userId,
prompts,
similarity: ModelVectorSearchModeMap[model.chat.searchMode]?.similarity
});
// 使用了知识库搜索
if (model.chat.relatedKbs.length > 0) {
const { code, searchPrompts } = await searchKb({
userOpenAiKey,
prompts,
similarity: ModelVectorSearchModeMap[model.chat.searchMode]?.similarity,
model,
return {
code,
quote: rawSearch,
systemPrompts: searchPrompts,
guidePrompt
};
}
if (model.chat.systemPrompt) {
return {
guidePrompt: model.chat.systemPrompt,
systemPrompts: [
{
obj: ChatRoleEnum.System,
value: model.chat.systemPrompt
}
]
};
}
return {};
})();
// get conversationId. create a newId if it is null
const conversationId = chatId || String(new Types.ObjectId());
!chatId && res.setHeader(NEW_CHATID_HEADER, conversationId);
if (showModelDetail) {
guidePrompt && res.setHeader(GUIDE_PROMPT_HEADER, encodeURIComponent(guidePrompt));
res.setHeader(QUOTE_LEN_HEADER, quote.length);
}
// search result is empty
if (code === 201) {
const response = systemPrompts[0]?.value;
await saveChat({
chatId,
newChatId: conversationId,
modelId,
prompts: [
prompt[0],
{
...prompt[1],
quote: [],
value: response
}
],
userId
});
// search result is empty
if (code === 201) {
return res.send(searchPrompts[0]?.value);
}
prompts.splice(prompts.length - 3, 0, ...searchPrompts);
} else {
// 没有用知识库搜索,仅用系统提示词
model.chat.systemPrompt &&
prompts.splice(prompts.length - 3, 0, {
obj: ChatRoleEnum.System,
value: model.chat.systemPrompt
});
return res.end(response);
}
prompts.splice(prompts.length - 3, 0, ...systemPrompts);
// content check
await sensitiveCheck({
input: [...systemPrompts, prompt[0]].map((item) => item.value).join('')
});
// 计算温度
const temperature = (modelConstantsData.maxTemperature * (model.chat.temperature / 10)).toFixed(
2
);
// 发出请求
// 发出 chat 请求
const { streamResponse } = await modelServiceToolMap[model.chat.chatModel].chatCompletion({
apiKey: userOpenAiKey || systemAuthKey,
temperature: +temperature,
messages: prompts,
stream: true,
res,
chatId
chatId: conversationId
});
console.log('api response time:', `${(Date.now() - startTime) / 1000}s`);
step = 1;
if (res.closed) return res.end();
const { totalTokens, finishMessages } = await resStreamResponse({
model: model.chat.chatModel,
res,
chatResponse: streamResponse,
prompts,
systemPrompt: showModelDetail
? prompts
.filter((item) => item.obj === ChatRoleEnum.System)
.map((item) => item.value)
.join('\n')
: ''
});
// 只有使用平台的 key 才计费
pushChatBill({
isPay: !userOpenAiKey,
chatModel: model.chat.chatModel,
userId,
chatId,
textLen: finishMessages.map((item) => item.value).join('').length,
tokens: totalTokens,
type: BillTypeEnum.chat
});
} catch (err: any) {
if (step === 1) {
// 直接结束流
res.end();
console.log('error结束');
} else {
res.status(500);
jsonRes(res, {
code: 500,
error: err
try {
const { totalTokens, finishMessages, responseContent } = await resStreamResponse({
model: model.chat.chatModel,
res,
chatResponse: streamResponse,
prompts
});
// save chat
await saveChat({
chatId,
newChatId: conversationId,
modelId,
prompts: [
prompt[0],
{
...prompt[1],
value: responseContent,
quote: showModelDetail ? quote : [],
systemPrompt: showModelDetail ? guidePrompt : ''
}
],
userId
});
res.end();
// 只有使用平台的 key 才计费
pushChatBill({
isPay: !userOpenAiKey,
chatModel: model.chat.chatModel,
userId,
chatId: conversationId,
textLen: finishMessages.map((item) => item.value).join('').length,
tokens: totalTokens,
type: BillTypeEnum.chat
});
} catch (error) {
res.end();
console.log('error结束', error);
}
} catch (err: any) {
res.status(500);
jsonRes(res, {
code: 500,
error: err
});
}
}

View File

@@ -0,0 +1,49 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase, Chat } from '@/service/mongo';
import { authUser } from '@/service/utils/auth';
import { Types } from 'mongoose';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
const { chatId, historyId } = req.query as { chatId: string; historyId: string };
await connectToDatabase();
const { userId } = await authUser({ req, authToken: true });
if (!chatId || !historyId) {
throw new Error('params is error');
}
const history = await Chat.aggregate([
{
$match: {
_id: new Types.ObjectId(chatId),
userId: new Types.ObjectId(userId)
}
},
{
$unwind: '$content'
},
{
$match: {
'content._id': new Types.ObjectId(historyId)
}
},
{
$project: {
quote: '$content.quote'
}
}
]);
jsonRes(res, {
data: history[0]?.quote || []
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}

View File

@@ -25,7 +25,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
const myModel = await Model.findOne({ userId });
if (!myModel) {
const { _id } = await Model.create({
name: 'AI助手1',
name: '应用1',
userId,
status: ModelStatusEnum.running
});
@@ -73,7 +73,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
_id: '$content._id',
obj: '$content.obj',
value: '$content.value',
systemPrompt: '$content.systemPrompt'
systemPrompt: '$content.systemPrompt',
quoteLen: { $size: { $ifNull: ['$content.quote', []] } }
}
}
]);

View File

@@ -6,15 +6,17 @@ import { authModel } from '@/service/utils/auth';
import { authUser } from '@/service/utils/auth';
import mongoose from 'mongoose';
type Props = {
newChatId?: string;
chatId?: string;
modelId: string;
prompts: [ChatItemType, ChatItemType];
};
/* 聊天内容存存储 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
const { chatId, modelId, prompts, newChatId } = req.body as {
newChatId: '' | string;
chatId: '' | string;
modelId: string;
prompts: [ChatItemType, ChatItemType];
};
const { chatId, modelId, prompts, newChatId } = req.body as Props;
if (!prompts) {
throw new Error('缺少参数');
@@ -22,44 +24,17 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
const { userId } = await authUser({ req, authToken: true });
await connectToDatabase();
const nId = await saveChat({
chatId,
modelId,
prompts,
newChatId,
userId
});
const content = prompts.map((item) => ({
_id: new mongoose.Types.ObjectId(item._id),
obj: item.obj,
value: item.value,
systemPrompt: item.systemPrompt
}));
await authModel({ modelId, userId, authOwner: false });
// 没有 chatId, 创建一个对话
if (!chatId) {
const { _id } = await Chat.create({
_id: newChatId ? new mongoose.Types.ObjectId(newChatId) : undefined,
userId,
modelId,
content,
title: content[0].value.slice(0, 20),
latestChat: content[1].value
});
return jsonRes(res, {
data: _id
});
} else {
// 已经有记录,追加入库
await Chat.findByIdAndUpdate(chatId, {
$push: {
content: {
$each: content
}
},
title: content[0].value.slice(0, 20),
latestChat: content[1].value,
updateTime: new Date()
});
}
jsonRes(res);
jsonRes(res, {
data: nId
});
} catch (err) {
jsonRes(res, {
code: 500,
@@ -67,3 +42,47 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
});
}
}
export async function saveChat({
chatId,
newChatId,
modelId,
prompts,
userId
}: Props & { userId: string }) {
await connectToDatabase();
await authModel({ modelId, userId, authOwner: false });
const content = prompts.map((item) => ({
_id: item._id ? new mongoose.Types.ObjectId(item._id) : undefined,
obj: item.obj,
value: item.value,
systemPrompt: item.systemPrompt,
quote: item.quote || []
}));
// 没有 chatId, 创建一个对话
if (!chatId) {
const { _id } = await Chat.create({
_id: newChatId ? new mongoose.Types.ObjectId(newChatId) : undefined,
userId,
modelId,
content,
title: content[0].value.slice(0, 20),
latestChat: content[1].value
});
return _id;
} else {
// 已经有记录,追加入库
await Chat.findByIdAndUpdate(chatId, {
$push: {
content: {
$each: content
}
},
title: content[0].value.slice(0, 20),
latestChat: content[1].value,
updateTime: new Date()
});
}
}

View File

@@ -7,13 +7,13 @@ import { jsonRes } from '@/service/response';
import { ChatModelMap, ModelVectorSearchModeMap } from '@/constants/model';
import { pushChatBill, updateShareChatBill } from '@/service/events/pushBill';
import { resStreamResponse } from '@/service/utils/chat';
import { searchKb } from '@/service/plugins/searchKb';
import { ChatRoleEnum } from '@/constants/chat';
import { BillTypeEnum } from '@/constants/user';
import { sensitiveCheck } from '@/service/api/text';
import { appKbSearch } from '../../openapi/kb/appKbSearch';
/* 发送提示词 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
let step = 0; // step=1 时,表示开始了流响应
res.on('error', () => {
console.log('error: ', 'request error');
res.end();
@@ -41,31 +41,46 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
const modelConstantsData = ChatModelMap[model.chat.chatModel];
// 使用了知识库搜索
if (model.chat.relatedKbs.length > 0) {
const { code, searchPrompts } = await searchKb({
userOpenAiKey,
prompts,
similarity: ModelVectorSearchModeMap[model.chat.searchMode]?.similarity,
model,
userId
});
// search result is empty
if (code === 201) {
return res.send(searchPrompts[0]?.value);
}
prompts.splice(prompts.length - 3, 0, ...searchPrompts);
} else {
// 没有用知识库搜索,仅用系统提示词
model.chat.systemPrompt &&
prompts.splice(prompts.length - 3, 0, {
obj: ChatRoleEnum.System,
value: model.chat.systemPrompt
const { code = 200, systemPrompts = [] } = await (async () => {
// 使用了知识库搜索
if (model.chat.relatedKbs.length > 0) {
const { code, searchPrompts } = await appKbSearch({
model,
userId,
prompts,
similarity: ModelVectorSearchModeMap[model.chat.searchMode]?.similarity
});
return {
code,
systemPrompts: searchPrompts
};
}
if (model.chat.systemPrompt) {
return {
systemPrompts: [
{
obj: ChatRoleEnum.System,
value: model.chat.systemPrompt
}
]
};
}
return {};
})();
// search result is empty
if (code === 201) {
return res.send(systemPrompts[0]?.value);
}
prompts.splice(prompts.length - 3, 0, ...systemPrompts);
// content check
await sensitiveCheck({
input: [...systemPrompts, prompts[prompts.length - 1]].map((item) => item.value).join('')
});
// 计算温度
const temperature = (modelConstantsData.maxTemperature * (model.chat.temperature / 10)).toFixed(
2
@@ -83,40 +98,40 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
console.log('api response time:', `${(Date.now() - startTime) / 1000}s`);
step = 1;
if (res.closed) return res.end();
const { totalTokens, finishMessages } = await resStreamResponse({
model: model.chat.chatModel,
res,
chatResponse: streamResponse,
prompts,
systemPrompt: ''
});
/* bill */
pushChatBill({
isPay: !userOpenAiKey,
chatModel: model.chat.chatModel,
userId,
textLen: finishMessages.map((item) => item.value).join('').length,
tokens: totalTokens,
type: BillTypeEnum.chat
});
updateShareChatBill({
shareId,
tokens: totalTokens
});
} catch (err: any) {
if (step === 1) {
// 直接结束流
res.end();
console.log('error结束');
} else {
res.status(500);
jsonRes(res, {
code: 500,
error: err
try {
const { totalTokens, finishMessages } = await resStreamResponse({
model: model.chat.chatModel,
res,
chatResponse: streamResponse,
prompts
});
res.end();
/* bill */
pushChatBill({
isPay: !userOpenAiKey,
chatModel: model.chat.chatModel,
userId,
textLen: finishMessages.map((item) => item.value).join('').length,
tokens: totalTokens,
type: BillTypeEnum.chat
});
updateShareChatBill({
shareId,
tokens: totalTokens
});
} catch (error) {
res.end();
console.log('error结束', error);
}
} catch (err: any) {
res.status(500);
jsonRes(res, {
code: 500,
error: err
});
}
}

View File

@@ -0,0 +1,51 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase, Chat } from '@/service/mongo';
import { authUser } from '@/service/utils/auth';
import { Types } from 'mongoose';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
let { chatId, historyId, quoteId } = req.query as {
chatId: string;
historyId: string;
quoteId: string;
};
await connectToDatabase();
const { userId } = await authUser({ req, authToken: true });
if (!chatId || !historyId || !quoteId) {
throw new Error('params is error');
}
await Chat.updateOne(
{
_id: new Types.ObjectId(chatId),
userId: new Types.ObjectId(userId),
'content._id': new Types.ObjectId(historyId)
},
{
$set: {
'content.$.quote.$[quoteElem].isEdit': true
}
},
{
arrayFilters: [
{
'quoteElem.id': quoteId
}
]
}
);
jsonRes(res, {
data: ''
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}

View File

@@ -1,37 +0,0 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase, Collection } from '@/service/mongo';
import { authUser } from '@/service/utils/auth';
import type { ShareModelItem } from '@/types/model';
/* 获取模型列表 */
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
// 凭证校验
const { userId } = await authUser({ req, authToken: true });
await connectToDatabase();
// get my collections
const collections = await Collection.find({
userId
}).populate('modelId', '_id avatar name userId share');
jsonRes<ShareModelItem[]>(res, {
data: collections
.map((item: any) => ({
_id: item.modelId?._id,
avatar: item.modelId?.avatar || '/icon/logo.png',
name: item.modelId?.name || '',
userId: item.modelId?.userId || '',
share: item.modelId?.share || {},
isCollection: true
}))
.filter((item) => item.share.isShare)
});
} catch (err) {
jsonRes(res, {
data: []
});
}
}

View File

@@ -3,6 +3,8 @@ import { jsonRes } from '@/service/response';
import { connectToDatabase, Collection, Model } from '@/service/mongo';
import type { PagingData } from '@/types';
import type { ShareModelItem } from '@/types/model';
import { parseCookie } from '@/service/utils/auth';
import { Types } from 'mongoose';
/* 获取模型列表 */
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
@@ -15,6 +17,14 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
await connectToDatabase();
let userId = '';
try {
userId = await parseCookie(req.headers.cookie);
} catch (error) {
error;
}
const regex = new RegExp(searchText, 'i');
const where = {
@@ -23,15 +33,58 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
{ $or: [{ name: { $regex: regex } }, { 'share.intro': { $regex: regex } }] }
]
};
const pipeline = [
{
$match: where
},
{
$lookup: {
from: 'collections',
let: { modelId: '$_id' },
pipeline: [
{
$match: {
$expr: {
$and: [
{ $eq: ['$modelId', '$$modelId'] },
{
$eq: ['$userId', userId ? new Types.ObjectId(userId) : new Types.ObjectId()]
}
]
}
}
}
],
as: 'collections'
}
},
{
$project: {
_id: 1,
avatar: { $ifNull: ['$avatar', '/icon/logo.png'] },
name: 1,
userId: 1,
share: 1,
isCollection: {
$cond: { if: { $gt: [{ $size: '$collections' }, 0] }, then: true, else: false }
}
}
},
{
$sort: { 'share.collection': -1 }
},
{
$skip: (pageNum - 1) * pageSize
},
{
$limit: pageSize
}
];
// 获取被分享的模型
const [models, total] = await Promise.all([
Model.find(where, '_id avatar name userId share')
.sort({
'share.collection': -1
})
.limit(pageSize)
.skip((pageNum - 1) * pageSize),
// @ts-ignore
Model.aggregate(pipeline),
Model.countDocuments(where)
]);
@@ -39,14 +92,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
data: {
pageNum,
pageSize,
data: models.map((item) => ({
_id: item._id,
avatar: item.avatar || '/icon/logo.png',
name: item.name,
userId: item.userId,
share: item.share,
isCollection: false
})),
data: models,
total
}
});

View File

@@ -6,14 +6,19 @@ import { ChatItemSimpleType } from '@/types/chat';
import { jsonRes } from '@/service/response';
import { ChatModelMap, ModelVectorSearchModeMap } from '@/constants/model';
import { pushChatBill } from '@/service/events/pushBill';
import { searchKb } from '@/service/plugins/searchKb';
import { ChatRoleEnum } from '@/constants/chat';
import { withNextCors } from '@/service/utils/tools';
import { BillTypeEnum } from '@/constants/user';
import { sensitiveCheck } from '@/service/api/text';
import { NEW_CHATID_HEADER } from '@/constants/chat';
import { Types } from 'mongoose';
import { appKbSearch } from '../kb/appKbSearch';
/* 发送提示词 */
export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse) {
let step = 0; // step=1时表示开始了流响应
res.on('close', () => {
res.end();
});
res.on('error', () => {
console.log('error: ', 'request error');
res.end();
@@ -62,13 +67,16 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
const modelConstantsData = ChatModelMap[model.chat.chatModel];
let systemPrompts: {
obj: ChatRoleEnum;
value: string;
}[] = [];
// 使用了知识库搜索
if (model.chat.relatedKbs.length > 0) {
const similarity = ModelVectorSearchModeMap[model.chat.searchMode]?.similarity || 0.22;
const { code, searchPrompts } = await searchKb({
const { code, searchPrompts } = await appKbSearch({
prompts,
similarity,
similarity: ModelVectorSearchModeMap[model.chat.searchMode]?.similarity,
model,
userId
});
@@ -77,23 +85,38 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
if (code === 201) {
return isStream
? res.send(searchPrompts[0]?.value)
: jsonRes(res, { data: searchPrompts[0]?.value });
: jsonRes(res, {
data: searchPrompts[0]?.value,
message: searchPrompts[0]?.value
});
}
prompts.splice(prompts.length - 3, 0, ...searchPrompts);
} else {
// 没有用知识库搜索,仅用系统提示词
model.chat.systemPrompt &&
prompts.splice(prompts.length - 3, 0, {
systemPrompts = searchPrompts;
} else if (model.chat.systemPrompt) {
systemPrompts = [
{
obj: ChatRoleEnum.System,
value: model.chat.systemPrompt
});
}
];
}
prompts.splice(prompts.length - 3, 0, ...systemPrompts);
// content check
await sensitiveCheck({
input: [...systemPrompts, prompts[prompts.length - 1]].map((item) => item.value).join('')
});
// 计算温度
const temperature = (modelConstantsData.maxTemperature * (model.chat.temperature / 10)).toFixed(
2
);
// get conversationId. create a newId if it is null
const conversationId = chatId || String(new Types.ObjectId());
!chatId && res?.setHeader(NEW_CHATID_HEADER, conversationId);
// 发出请求
const { streamResponse, responseMessages, responseText, totalTokens } =
await modelServiceToolMap[model.chat.chatModel].chatCompletion({
@@ -102,30 +125,41 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
messages: prompts,
stream: isStream,
res,
chatId
chatId: conversationId
});
console.log('api response time:', `${(Date.now() - startTime) / 1000}s`);
let textLen = 0;
let tokens = totalTokens;
if (res.closed) return res.end();
if (isStream) {
step = 1;
const { finishMessages, totalTokens } = await resStreamResponse({
model: model.chat.chatModel,
res,
chatResponse: streamResponse,
prompts
});
textLen = finishMessages.map((item) => item.value).join('').length;
tokens = totalTokens;
} else {
textLen = responseMessages.map((item) => item.value).join('').length;
jsonRes(res, {
data: responseText
});
}
const { textLen = 0, tokens = totalTokens } = await (async () => {
if (isStream) {
try {
const { finishMessages, totalTokens } = await resStreamResponse({
model: model.chat.chatModel,
res,
chatResponse: streamResponse,
prompts
});
res.end();
return {
textLen: finishMessages.map((item) => item.value).join('').length,
tokens: totalTokens
};
} catch (error) {
res.end();
console.log('error结束', error);
}
} else {
jsonRes(res, {
data: responseText
});
return {
textLen: responseMessages.map((item) => item.value).join('').length
};
}
return {};
})();
pushChatBill({
isPay: true,
@@ -136,16 +170,10 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
type: BillTypeEnum.openapiChat
});
} catch (err: any) {
if (step === 1) {
// 直接结束流
res.end();
console.log('error结束');
} else {
res.status(500);
jsonRes(res, {
code: 500,
error: err
});
}
res.status(500);
jsonRes(res, {
code: 500,
error: err
});
}
});

View File

@@ -0,0 +1,221 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { authUser } from '@/service/utils/auth';
import { PgClient } from '@/service/pg';
import { withNextCors } from '@/service/utils/tools';
import type { ChatItemSimpleType } from '@/types/chat';
import type { ModelSchema } from '@/types/mongoSchema';
import { ModelVectorSearchModeEnum } from '@/constants/model';
import { authModel } from '@/service/utils/auth';
import { ChatModelMap } from '@/constants/model';
import { ChatRoleEnum } from '@/constants/chat';
import { openaiEmbedding } from '../plugin/openaiEmbedding';
import { ModelDataStatusEnum } from '@/constants/model';
import { modelToolMap } from '@/utils/plugin';
export type QuoteItemType = { id: string; q: string; a: string; isEdit: boolean };
type Props = {
prompts: ChatItemSimpleType[];
similarity: number;
appId: string;
};
type Response = {
code: 200 | 201;
rawSearch: QuoteItemType[];
guidePrompt: string;
searchPrompts: {
obj: ChatRoleEnum;
value: string;
}[];
};
export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
const { userId } = await authUser({ req });
if (!userId) {
throw new Error('userId is empty');
}
const { prompts, similarity, appId } = req.body as Props;
if (!similarity || !Array.isArray(prompts) || !appId) {
throw new Error('params is error');
}
// auth model
const { model } = await authModel({
modelId: appId,
userId
});
const result = await appKbSearch({
userId,
prompts,
similarity,
model
});
jsonRes<Response>(res, {
data: result
});
} catch (err) {
console.log(err);
jsonRes(res, {
code: 500,
error: err
});
}
});
export async function appKbSearch({
model,
userId,
prompts,
similarity
}: {
userId: string;
prompts: ChatItemSimpleType[];
similarity: number;
model: ModelSchema;
}): Promise<Response> {
const modelConstantsData = ChatModelMap[model.chat.chatModel];
// search two times.
const userPrompts = prompts.filter((item) => item.obj === 'Human');
const input: string[] = [
userPrompts[userPrompts.length - 1].value,
userPrompts[userPrompts.length - 2]?.value
].filter((item) => item);
// get vector
const promptVectors = await openaiEmbedding({
userId,
input
});
// search kb
const searchRes = await Promise.all(
promptVectors.map((promptVector) =>
PgClient.select<QuoteItemType>('modelData', {
fields: ['id', 'q', 'a'],
where: [
['status', ModelDataStatusEnum.ready],
'AND',
`kb_id IN (${model.chat.relatedKbs.map((item) => `'${item}'`).join(',')})`,
'AND',
`vector <=> '[${promptVector}]' < ${similarity}`
],
order: [{ field: 'vector', mode: `<=> '[${promptVector}]'` }],
limit: promptVectors.length === 1 ? 15 : 10
}).then((res) => res.rows)
)
);
// filter same search result
const idSet = new Set<string>();
const filterSearch = searchRes.map((search) =>
search.filter((item) => {
if (idSet.has(item.id)) {
return false;
}
idSet.add(item.id);
return true;
})
);
// slice search result by rate.
const sliceRateMap: Record<number, number[]> = {
1: [1],
2: [0.7, 0.3]
};
const sliceRate = sliceRateMap[searchRes.length] || sliceRateMap[0];
// 计算固定提示词的 token 数量
const guidePrompt = model.chat.systemPrompt // user system prompt
? {
obj: ChatRoleEnum.System,
value: model.chat.systemPrompt
}
: model.chat.searchMode === ModelVectorSearchModeEnum.noContext
? {
obj: ChatRoleEnum.System,
value: `知识库是关于"${model.name}"的内容,根据知识库内容回答问题.`
}
: {
obj: ChatRoleEnum.System,
value: `玩一个问答游戏,规则为:
1.你完全忘记你已有的知识
2.你只回答关于"${model.name}"的问题
3.你只从知识库中选择内容进行回答
4.如果问题不在知识库中,你会回答:"我不知道。"
请务必遵守规则`
};
const fixedSystemTokens = modelToolMap[model.chat.chatModel].countTokens({
messages: [guidePrompt]
});
const maxTokens = modelConstantsData.systemMaxToken - fixedSystemTokens;
const sliceResult = sliceRate.map((rate, i) =>
modelToolMap[model.chat.chatModel]
.tokenSlice({
maxToken: Math.round(maxTokens * rate),
messages: filterSearch[i].map((item) => ({
obj: ChatRoleEnum.System,
value: `${item.q}\n${item.a}`
}))
})
.map((item) => item.value)
);
// slice filterSearch
const sliceSearch = filterSearch.map((item, i) => item.slice(0, sliceResult[i].length)).flat();
// system prompt
const systemPrompt = sliceResult.flat().join('\n').trim();
/* 高相似度+不回复 */
if (!systemPrompt && model.chat.searchMode === ModelVectorSearchModeEnum.hightSimilarity) {
return {
code: 201,
rawSearch: [],
guidePrompt: '',
searchPrompts: [
{
obj: ChatRoleEnum.System,
value: '对不起,你的问题不在知识库中。'
}
]
};
}
/* 高相似度+无上下文,不添加额外知识,仅用系统提示词 */
if (!systemPrompt && model.chat.searchMode === ModelVectorSearchModeEnum.noContext) {
return {
code: 200,
rawSearch: [],
guidePrompt: model.chat.systemPrompt || '',
searchPrompts: model.chat.systemPrompt
? [
{
obj: ChatRoleEnum.System,
value: model.chat.systemPrompt
}
]
: []
};
}
return {
code: 200,
rawSearch: sliceSearch,
guidePrompt: guidePrompt.value || '',
searchPrompts: [
{
obj: ChatRoleEnum.System,
value: `知识库:${systemPrompt}`
},
guidePrompt
]
};
}

View File

@@ -4,7 +4,7 @@ import { jsonRes } from '@/service/response';
import { connectToDatabase } from '@/service/mongo';
import { authUser } from '@/service/utils/auth';
import { generateVector } from '@/service/events/generateVector';
import { PgClient } from '@/service/pg';
import { PgClient, insertKbItem } from '@/service/pg';
import { authKb } from '@/service/utils/auth';
import { withNextCors } from '@/service/utils/tools';
@@ -68,14 +68,10 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
.map<{ q: string; a: string }>((item: any) => item.value);
// 插入记录
const insertRes = await PgClient.insert('modelData', {
values: filterData.map((item) => [
{ key: 'user_id', value: userId },
{ key: 'kb_id', value: kbId },
{ key: 'q', value: item.q },
{ key: 'a', value: item.a },
{ key: 'status', value: 'waiting' }
])
const insertRes = await insertKbItem({
userId,
kbId,
data: filterData
});
generateVector();

View File

@@ -0,0 +1,77 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { authUser } from '@/service/utils/auth';
import { PgClient } from '@/service/pg';
import { withNextCors } from '@/service/utils/tools';
import { getApiKey } from '@/service/utils/auth';
import { getOpenAIApi } from '@/service/utils/chat/openai';
import { embeddingModel } from '@/constants/model';
import { axiosConfig } from '@/service/utils/tools';
import { pushGenerateVectorBill } from '@/service/events/pushBill';
type Props = {
input: string[];
};
type Response = number[][];
export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
const { userId } = await authUser({ req });
let { input } = req.query as Props;
if (!Array.isArray(input)) {
throw new Error('缺少参数');
}
jsonRes<Response>(res, {
data: await openaiEmbedding({ userId, input, mustPay: true })
});
} catch (err) {
console.log(err);
jsonRes(res, {
code: 500,
error: err
});
}
});
export async function openaiEmbedding({
userId,
input,
mustPay = false
}: { userId: string; mustPay?: boolean } & Props) {
const { userOpenAiKey, systemAuthKey } = await getApiKey({
model: 'gpt-3.5-turbo',
userId,
mustPay
});
// 获取 chatAPI
const chatAPI = getOpenAIApi();
// 把输入的内容转成向量
const result = await chatAPI
.createEmbedding(
{
model: embeddingModel,
input
},
{
timeout: 60000,
...axiosConfig(userOpenAiKey || systemAuthKey)
}
)
.then((res) => ({
tokenLen: res.data.usage.total_tokens || 0,
vectors: res.data.data.map((item) => item.embedding)
}));
pushGenerateVectorBill({
isPay: !userOpenAiKey,
userId,
text: input.join(''),
tokenLen: result.tokenLen
});
return result.vectors;
}

View File

@@ -0,0 +1,66 @@
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { authUser } from '@/service/utils/auth';
import type { ChatItemSimpleType } from '@/types/chat';
import { countOpenAIToken } from '@/utils/plugin/openai';
type ModelType = 'gpt-3.5-turbo' | 'gpt-4' | 'gpt-4-32k';
type Props = {
messages: ChatItemSimpleType[];
model: ModelType;
maxLen: number;
};
type Response = ChatItemSimpleType[];
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
await authUser({ req });
const { messages, model, maxLen } = req.body as Props;
if (!Array.isArray(messages) || !model || !maxLen) {
throw new Error('params is error');
}
return jsonRes<Response>(res, {
data: gpt_chatItemTokenSlice({
messages,
model,
maxToken: maxLen
})
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}
export function gpt_chatItemTokenSlice({
messages,
model,
maxToken
}: {
messages: ChatItemSimpleType[];
model: ModelType;
maxToken: number;
}) {
let result: ChatItemSimpleType[] = [];
for (let i = 0; i < messages.length; i++) {
const msgs = [...result, messages[i]];
const tokens = countOpenAIToken({ messages: msgs, model });
if (tokens < maxToken) {
result = msgs;
} else {
break;
}
}
return result;
}

View File

@@ -0,0 +1,48 @@
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { authUser, getSystemOpenAiKey } from '@/service/utils/auth';
import type { TextPluginRequestParams } from '@/types/plugin';
import axios from 'axios';
import { axiosConfig } from '@/service/utils/tools';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
if (process.env.SENSITIVE_CHECK !== '1') {
return jsonRes(res);
}
await authUser({ req });
const { input } = req.body as TextPluginRequestParams;
const response = await axios({
...axiosConfig(getSystemOpenAiKey()),
method: 'POST',
url: `/moderations`,
data: {
input
}
});
const data = (response.data.results?.[0]?.category_scores as Record<string, number>) || {};
const values = Object.values(data);
for (const val of values) {
if (val > 0.2) {
return jsonRes(res, {
code: 500,
message: '您的内容不合规'
});
}
}
jsonRes(res);
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}

View File

@@ -4,7 +4,7 @@ import { connectToDatabase, SplitData } from '@/service/mongo';
import { authKb, authUser } from '@/service/utils/auth';
import { generateVector } from '@/service/events/generateVector';
import { generateQA } from '@/service/events/generateQA';
import { PgClient } from '@/service/pg';
import { insertKbItem } from '@/service/pg';
import { SplitTextTypEnum } from '@/constants/plugin';
import { withNextCors } from '@/service/utils/tools';
@@ -43,14 +43,13 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
} else if (mode === SplitTextTypEnum.subsection) {
// 待优化,直接调用另一个接口
// 插入记录
await PgClient.insert('modelData', {
values: chunks.map((item) => [
{ key: 'user_id', value: userId },
{ key: 'kb_id', value: kbId },
{ key: 'q', value: item },
{ key: 'a', value: '' },
{ key: 'status', value: 'waiting' }
])
await insertKbItem({
userId,
kbId,
data: chunks.map((item) => ({
q: item,
a: ''
}))
});
generateVector();

View File

@@ -1,6 +1,6 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase } from '@/service/mongo';
import { connectToDatabase, User } from '@/service/mongo';
import { authUser } from '@/service/utils/auth';
import { PgClient } from '@/service/pg';
@@ -14,10 +14,28 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
throw new Error('缺少参数');
}
await connectToDatabase();
// 凭证校验
const { userId } = await authUser({ req, authToken: true });
await connectToDatabase();
const thirtyMinutesAgo = new Date(Date.now() - 30 * 60 * 1000);
// auth export times
const authTimes = await User.findOne(
{
_id: userId,
$or: [
{ 'limit.exportKbTime': { $exists: false } },
{ 'limit.exportKbTime': { $lte: thirtyMinutesAgo } }
]
},
'_id limit'
);
if (!authTimes) {
throw new Error('上次导出未到半小时,每半小时仅可导出一次。');
}
// 统计数据
const count = await PgClient.count('modelData', {
@@ -36,6 +54,11 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
item.a.replace(/\n/g, '\\n')
]);
// update export time
await User.findByIdAndUpdate(userId, {
'limit.exportKbTime': new Date()
});
jsonRes(res, {
data
});

View File

@@ -0,0 +1,39 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase } from '@/service/mongo';
import { authUser } from '@/service/utils/auth';
import { PgClient } from '@/service/pg';
import type { PgKBDataItemType } from '@/types/pg';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
let { dataId } = req.query as {
dataId: string;
};
if (!dataId) {
throw new Error('缺少参数');
}
// 凭证校验
const { userId } = await authUser({ req, authToken: true });
await connectToDatabase();
const where: any = [['user_id', userId], 'AND', ['id', dataId]];
const searchRes = await PgClient.select<PgKBDataItemType>('modelData', {
fields: ['id', 'q', 'a', 'status'],
where,
limit: 1
});
jsonRes(res, {
data: searchRes.rows[0]
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}

View File

@@ -1,8 +1,9 @@
import React from 'react';
import { Card, Box, Image, Flex } from '@chakra-ui/react';
import { Card, Box, Flex } from '@chakra-ui/react';
import { useMarkdown } from '@/hooks/useMarkdown';
import Markdown from '@/components/Markdown';
import { LOGO_ICON } from '@/constants/chat';
import Avatar from '@/components/Avatar';
const Empty = ({
showChatProblem,
@@ -31,13 +32,7 @@ const Empty = ({
{name && (
<Card p={4} mb={10}>
<Flex mb={2} alignItems={'center'} justifyContent={'center'}>
<Image
src={avatar || LOGO_ICON}
w={'32px'}
maxH={'40px'}
objectFit={'contain'}
alt={''}
/>
<Avatar src={avatar} w={'32px'} h={'32px'} />
<Box ml={3} fontSize={'3xl'} fontWeight={'bold'}>
{name}
</Box>

View File

@@ -1,7 +1,8 @@
import React from 'react';
import { Box, Flex, Image } from '@chakra-ui/react';
import { Box, Flex } from '@chakra-ui/react';
import { useRouter } from 'next/router';
import { ModelListItemType } from '@/types/model';
import Avatar from '@/components/Avatar';
const ModelList = ({ models, modelId }: { models: ModelListItemType[]; modelId: string }) => {
const router = useRouter();
@@ -32,19 +33,13 @@ const ModelList = ({ models, modelId }: { models: ModelListItemType[]; modelId:
router.replace(`/chat?modelId=${item._id}`);
}}
>
<Image
src={item.avatar || '/icon/logo.png'}
alt=""
w={'34px'}
maxH={'50px'}
objectFit={'contain'}
/>
<Avatar src={item.avatar} w={'34px'} h={'34px'} />
<Box flex={'1 0 0'} w={0} ml={3}>
<Box className="textEllipsis" color={'myGray.1000'}>
{item.name}
</Box>
<Box className="textEllipsis" color={'myGray.400'} fontSize={'sm'}>
{item.systemPrompt || '这个AI助手没有设置提示词~'}
{item.systemPrompt || '这个 应用 没有设置提示词~'}
</Box>
</Box>
</Flex>

View File

@@ -7,8 +7,7 @@ import {
Divider,
useDisclosure,
useColorMode,
useColorModeValue,
Image
useColorModeValue
} from '@chakra-ui/react';
import { useUserStore } from '@/store/user';
import { useQuery } from '@tanstack/react-query';
@@ -17,6 +16,7 @@ import MyIcon from '@/components/Icon';
import WxConcat from '@/components/WxConcat';
import { delChatHistoryById } from '@/api/chat';
import { useChatStore } from '@/store/chat';
import Avatar from '@/components/Avatar';
const PhoneSliderBar = ({
chatId,
@@ -74,7 +74,7 @@ const PhoneSliderBar = ({
color={'white'}
>
<Flex alignItems={'center'} justifyContent={'space-between'} px={3}>
<Box flex={'0 0 50px'}>AI助手</Box>
<Box flex={'0 0 50px'}>AI应用</Box>
{/* 新对话 */}
<Button
w={'50%'}
@@ -115,7 +115,7 @@ const PhoneSliderBar = ({
onClose();
}}
>
<Image src={item.avatar || '/icon/logo.png'} mr={2} alt={''} w={'16px'} h={'16px'} />
<Avatar src={item.avatar} mr={2} w={'18px'} h={'18px'} />
<Box className={'textEllipsis'} flex={'1 0 0'} w={0}>
{item.name}
</Box>

View File

@@ -0,0 +1,175 @@
import React, { useCallback, useState } from 'react';
import {
Modal,
ModalOverlay,
ModalContent,
ModalBody,
ModalCloseButton,
ModalHeader,
Box,
useTheme
} from '@chakra-ui/react';
import { QuoteItemType } from '@/pages/api/openapi/kb/appKbSearch';
import MyIcon from '@/components/Icon';
import InputDataModal from '@/pages/kb/components/InputDataModal';
import { getKbDataItemById } from '@/api/plugins/kb';
import { useLoading } from '@/hooks/useLoading';
import { useQuery } from '@tanstack/react-query';
import { getHistoryQuote, updateHistoryQuote } from '@/api/chat';
import { useToast } from '@/hooks/useToast';
import { getErrText } from '@/utils/tools';
const QuoteModal = ({
historyId,
chatId,
onClose
}: {
historyId: string;
chatId: string;
onClose: () => void;
}) => {
const theme = useTheme();
const { toast } = useToast();
const { setIsLoading, Loading } = useLoading();
const [editDataItem, setEditDataItem] = useState<{
dataId: string;
a: string;
q: string;
}>();
const {
data: quote = [],
refetch,
isLoading
} = useQuery(['getHistoryQuote'], () => getHistoryQuote({ historyId, chatId }));
/**
* click edit, get new kbDataItem
*/
const onclickEdit = useCallback(
async (item: QuoteItemType) => {
try {
setIsLoading(true);
const data = (await getKbDataItemById(item.id)) as QuoteItemType;
if (!data) {
throw new Error('该数据已被删除');
}
setEditDataItem({
dataId: data.id,
q: data.q,
a: data.a
});
} catch (err) {
toast({
status: 'warning',
title: getErrText(err)
});
}
setIsLoading(false);
},
[setIsLoading, toast]
);
/**
* update kbData, update mongo status and reload quotes
*/
const updateQuoteStatus = useCallback(
async (quoteId: string) => {
setIsLoading(true);
try {
await updateHistoryQuote({
chatId,
historyId,
quoteId
});
// reload quote
refetch();
} catch (err) {
toast({
status: 'warning',
title: getErrText(err)
});
}
setIsLoading(false);
},
[chatId, historyId, refetch, setIsLoading, toast]
);
return (
<>
<Modal isOpen={true} onClose={onClose}>
<ModalOverlay />
<ModalContent
position={'relative'}
maxW={'min(90vw, 700px)'}
h={'80vh'}
overflow={'overlay'}
>
<ModalHeader>
({quote.length})
<Box fontSize={'sm'} fontWeight={'normal'}>
注意: 修改知识库内容成功后
</Box>
</ModalHeader>
<ModalCloseButton />
<ModalBody pt={0} whiteSpace={'pre-wrap'} textAlign={'justify'} fontSize={'sm'}>
{quote.map((item) => (
<Box
key={item.id}
flex={'1 0 0'}
p={2}
borderRadius={'sm'}
border={theme.borders.base}
_notLast={{ mb: 2 }}
position={'relative'}
_hover={{ '& .edit': { display: 'flex' } }}
>
{item.isEdit && <Box color={'myGray.600'}>()</Box>}
<Box>{item.q}</Box>
<Box>{item.a}</Box>
<Box
className="edit"
display={'none'}
position={'absolute'}
right={0}
top={0}
bottom={0}
w={'40px'}
bg={'rgba(255,255,255,0.9)'}
alignItems={'center'}
justifyContent={'center'}
boxShadow={'-10px 0 10px rgba(255,255,255,1)'}
>
<MyIcon
name={'edit'}
w={'18px'}
h={'18px'}
cursor={'pointer'}
color={'myGray.600'}
_hover={{
color: 'myBlue.700'
}}
onClick={() => onclickEdit(item)}
/>
</Box>
</Box>
))}
</ModalBody>
<Loading loading={isLoading} fixed={false} />
</ModalContent>
</Modal>
{editDataItem && (
<InputDataModal
onClose={() => setEditDataItem(undefined)}
onSuccess={() => updateQuoteStatus(editDataItem.dataId)}
kbId=""
defaultValues={editDataItem}
/>
)}
</>
);
};
export default QuoteModal;

View File

@@ -1,12 +1,7 @@
import React, { useCallback, useState, useRef, useMemo, useEffect, MouseEvent } from 'react';
import { useRouter } from 'next/router';
import {
getInitChatSiteInfo,
delChatRecordByIndex,
postSaveChat,
delChatHistoryById
} from '@/api/chat';
import type { ChatSiteItemType, ExportChatType } from '@/types/chat';
import { getInitChatSiteInfo, delChatRecordByIndex, delChatHistoryById } from '@/api/chat';
import type { ChatItemType, ChatSiteItemType, ExportChatType } from '@/types/chat';
import {
Textarea,
Box,
@@ -16,13 +11,13 @@ import {
MenuButton,
MenuList,
MenuItem,
Image,
Button,
Modal,
ModalOverlay,
ModalContent,
ModalBody,
ModalCloseButton,
ModalHeader,
useDisclosure,
Drawer,
DrawerOverlay,
@@ -36,7 +31,7 @@ import { useToast } from '@/hooks/useToast';
import { useGlobalStore } from '@/store/global';
import { useQuery } from '@tanstack/react-query';
import dynamic from 'next/dynamic';
import { useCopyData, voiceBroadcast, hasVoiceApi } from '@/utils/tools';
import { useCopyData, voiceBroadcast, hasVoiceApi, delay } from '@/utils/tools';
import { streamFetch } from '@/api/fetch';
import MyIcon from '@/components/Icon';
import { throttle } from 'lodash';
@@ -51,7 +46,9 @@ import { useUserStore } from '@/store/user';
import Loading from '@/components/Loading';
import Markdown from '@/components/Markdown';
import SideBar from '@/components/SideBar';
import Avatar from '@/components/Avatar';
import Empty from './components/Empty';
import QuoteModal from './components/QuoteModal';
const PhoneSliderBar = dynamic(() => import('./components/PhoneSliderBar'), {
ssr: false
@@ -78,6 +75,7 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
const controller = useRef(new AbortController());
const isLeavePage = useRef(false);
const [showHistoryQuote, setShowHistoryQuote] = useState<string>();
const [showSystemPrompt, setShowSystemPrompt] = useState('');
const [messageContextMenuData, setMessageContextMenuData] = useState<{
// message messageContextMenuData
@@ -173,13 +171,14 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
controller.current = abortSignal;
isLeavePage.current = false;
const prompt = {
obj: prompts[0].obj,
value: prompts[0].value
};
const prompt: ChatItemType[] = prompts.map((item) => ({
_id: item._id,
obj: item.obj,
value: item.value
}));
// 流请求,获取数据
let { responseText, systemPrompt, newChatId } = await streamFetch({
const { newChatId, quoteLen, systemPrompt } = await streamFetch({
url: '/api/chat/chat',
data: {
prompt,
@@ -207,39 +206,13 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
return;
}
// save chat record
try {
newChatId = await postSaveChat({
newChatId, // 如果有newChatId会自动以这个Id创建对话框
modelId,
chatId,
prompts: [
{
_id: prompts[0]._id,
obj: 'Human',
value: prompt.value
},
{
_id: prompts[1]._id,
obj: 'AI',
value: responseText,
systemPrompt
}
]
});
if (newChatId) {
setForbidLoadChatData(true);
router.replace(`/chat?modelId=${modelId}&chatId=${newChatId}`);
}
} catch (err) {
toast({
title: '对话出现异常, 继续对话会导致上下文丢失,请刷新页面',
status: 'warning',
duration: 3000,
isClosable: true
});
if (newChatId) {
setForbidLoadChatData(true);
router.replace(`/chat?modelId=${modelId}&chatId=${newChatId}`);
}
abortSignal.signal.aborted && (await delay(600));
// 设置聊天内容为完成状态
setChatData((state) => ({
...state,
@@ -249,27 +222,19 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
return {
...item,
status: 'finish',
quoteLen,
systemPrompt
};
})
}));
// refresh history
loadHistory({ pageNum: 1, init: true });
setTimeout(() => {
loadHistory({ pageNum: 1, init: true });
generatingMessage();
}, 100);
},
[
chatId,
setForbidLoadChatData,
generatingMessage,
loadHistory,
modelId,
router,
setChatData,
toast
]
[chatId, setForbidLoadChatData, generatingMessage, loadHistory, modelId, router, setChatData]
);
/**
@@ -595,7 +560,7 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
borderBottom={theme.borders.base}
onClick={() => router.push(`/model?modelId=${chatData.modelId}`)}
>
AI助手详
</MenuItem>
)}
{hasVoiceApi && (
@@ -654,9 +619,9 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
>
{!isPc && (
<MyIcon
name={'tabbarMore'}
w={'14px'}
h={'14px'}
name={'menu'}
w={'20px'}
h={'20px'}
color={useColorModeValue('blackAlpha.700', 'white')}
onClick={onOpenSlider}
/>
@@ -717,39 +682,36 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
{item.obj === 'Human' && <Box flex={1} />}
{/* avatar */}
<Menu autoSelect={false} isLazy>
<MenuButton
as={Box}
{...(item.obj === 'AI'
? {
order: 1,
mr: ['6px', 2],
cursor: 'pointer',
onClick: () =>
isPc &&
chatData.model.canUse &&
router.push(`/model?modelId=${chatData.modelId}`)
}
: {
order: 3,
ml: ['6px', 2]
})}
>
<Tooltip label={item.obj === 'AI' ? 'AI助手详情' : ''}>
<Image
<Tooltip label={item.obj === 'AI' ? '应用详情' : ''}>
<MenuButton
as={Box}
{...(item.obj === 'AI'
? {
order: 1,
mr: ['6px', 2],
cursor: 'pointer',
onClick: () =>
isPc &&
chatData.model.canUse &&
router.push(`/model?modelId=${chatData.modelId}`)
}
: {
order: 3,
ml: ['6px', 2]
})}
>
<Avatar
className="avatar"
src={
item.obj === 'Human'
? userInfo?.avatar || '/icon/human.png'
: chatData.model.avatar || LOGO_ICON
}
alt="avatar"
w={['20px', '34px']}
h={['20px', '34px']}
borderRadius={'50%'}
objectFit={'contain'}
/>
</Tooltip>
</MenuButton>
</MenuButton>
</Tooltip>
{!isPc && <RenderContextMenu history={item} index={index} AiDetail />}
</Menu>
{/* message */}
@@ -768,19 +730,35 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
isChatting={isChatting && index === chatData.history.length - 1}
formatLink
/>
{item.systemPrompt && (
<Button
size={'xs'}
mt={2}
fontWeight={'normal'}
colorScheme={'gray'}
variant={'outline'}
w={'90px'}
onClick={() => setShowSystemPrompt(item.systemPrompt || '')}
>
</Button>
)}
<Flex>
{!!item.systemPrompt && (
<Button
mt={2}
mr={3}
size={'xs'}
fontWeight={'normal'}
colorScheme={'gray'}
variant={'outline'}
px={[2, 4]}
onClick={() => setShowSystemPrompt(item.systemPrompt || '')}
>
</Button>
)}
{!!item.quoteLen && (
<Button
mt={2}
size={'xs'}
fontWeight={'normal'}
colorScheme={'gray'}
variant={'outline'}
px={[2, 4]}
onClick={() => setShowHistoryQuote(item._id)}
>
{item.quoteLen}
</Button>
)}
</Flex>
</Card>
</Box>
) : (
@@ -909,13 +887,22 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
</DrawerContent>
</Drawer>
)}
{/* quote modal*/}
{showHistoryQuote && chatId && (
<QuoteModal
historyId={showHistoryQuote}
chatId={chatId}
onClose={() => setShowHistoryQuote(undefined)}
/>
)}
{/* system prompt show modal */}
{
<Modal isOpen={!!showSystemPrompt} onClose={() => setShowSystemPrompt('')}>
<ModalOverlay />
<ModalContent pt={5} maxW={'min(90vw, 600px)'} h={'80vh'} overflow={'overlay'}>
<ModalContent maxW={'min(90vw, 600px)'} maxH={'80vh'} minH={'50vh'} overflow={'overlay'}>
<ModalCloseButton />
<ModalBody pt={5} whiteSpace={'pre-wrap'} textAlign={'justify'}>
<ModalHeader></ModalHeader>
<ModalBody pt={0} whiteSpace={'pre-wrap'} textAlign={'justify'} fontSize={'xs'}>
{showSystemPrompt}
</ModalBody>
</ModalContent>

View File

@@ -11,7 +11,6 @@ import {
MenuButton,
MenuList,
MenuItem,
Image,
Button,
Modal,
ModalOverlay,
@@ -48,6 +47,7 @@ import { useUserStore } from '@/store/user';
import Loading from '@/components/Loading';
import Markdown from '@/components/Markdown';
import SideBar from '@/components/SideBar';
import Avatar from '@/components/Avatar';
import Empty from './components/Empty';
const ShareHistory = dynamic(() => import('./components/ShareHistory'), {
@@ -73,7 +73,6 @@ const Chat = ({ shareId, historyId }: { shareId: string; historyId: string }) =>
const isLeavePage = useRef(false);
const [inputVal, setInputVal] = useState(''); // user input prompt
const [showSystemPrompt, setShowSystemPrompt] = useState('');
const [messageContextMenuData, setMessageContextMenuData] = useState<{
// message messageContextMenuData
left: number;
@@ -178,7 +177,7 @@ const Chat = ({ shareId, historyId }: { shareId: string; historyId: string }) =>
}));
// 流请求,获取数据
const { responseText, systemPrompt } = await streamFetch({
const { responseText } = await streamFetch({
url: '/api/chat/shareChat/chat',
data: {
prompts: formatPrompts.slice(-shareChatData.maxContext - 1, -1),
@@ -215,8 +214,7 @@ const Chat = ({ shareId, historyId }: { shareId: string; historyId: string }) =>
if (index !== state.history.length - 1) return item;
return {
...item,
status: 'finish',
systemPrompt
status: 'finish'
};
});
@@ -564,9 +562,9 @@ const Chat = ({ shareId, historyId }: { shareId: string; historyId: string }) =>
>
{!isPc && (
<MyIcon
name={'tabbarMore'}
w={'14px'}
h={'14px'}
name={'menu'}
w={'20px'}
h={'20px'}
color={useColorModeValue('blackAlpha.700', 'white')}
onClick={onOpenSlider}
/>
@@ -614,34 +612,30 @@ const Chat = ({ shareId, historyId }: { shareId: string; historyId: string }) =>
{item.obj === 'Human' && <Box flex={1} />}
{/* avatar */}
<Menu autoSelect={false} isLazy>
<MenuButton
as={Box}
{...(item.obj === 'AI'
? {
order: 1,
mr: ['6px', 2]
}
: {
order: 3,
ml: ['6px', 2]
})}
>
<Tooltip label={item.obj === 'AI' ? 'AI助手详情' : ''}>
<Image
className="avatar"
<Tooltip label={item.obj === 'AI' ? '应用详情' : ''}>
<MenuButton
as={Box}
{...(item.obj === 'AI'
? {
order: 1,
mr: ['6px', 2]
}
: {
order: 3,
ml: ['6px', 2]
})}
>
<Avatar
src={
item.obj === 'Human'
? userInfo?.avatar || '/icon/human.png'
: shareChatData.model.avatar || LOGO_ICON
}
alt="avatar"
w={['20px', '34px']}
h={['20px', '34px']}
borderRadius={'50%'}
objectFit={'contain'}
/>
</Tooltip>
</MenuButton>
</MenuButton>
</Tooltip>
{!isPc && <RenderContextMenu history={item} index={index} />}
</Menu>
{/* message */}
@@ -660,19 +654,6 @@ const Chat = ({ shareId, historyId }: { shareId: string; historyId: string }) =>
isChatting={isChatting && index === shareChatData.history.length - 1}
formatLink
/>
{item.systemPrompt && (
<Button
size={'xs'}
mt={2}
fontWeight={'normal'}
colorScheme={'gray'}
variant={'outline'}
w={'90px'}
onClick={() => setShowSystemPrompt(item.systemPrompt || '')}
>
</Button>
)}
</Card>
</Box>
) : (
@@ -800,18 +781,6 @@ const Chat = ({ shareId, historyId }: { shareId: string; historyId: string }) =>
</DrawerContent>
</Drawer>
)}
{/* system prompt show modal */}
{
<Modal isOpen={!!showSystemPrompt} onClose={() => setShowSystemPrompt('')}>
<ModalOverlay />
<ModalContent maxW={'min(90vw, 600px)'} pr={2} maxH={'80vh'} overflowY={'auto'}>
<ModalCloseButton />
<ModalBody pt={5} whiteSpace={'pre-wrap'} textAlign={'justify'}>
{showSystemPrompt}
</ModalBody>
</ModalContent>
</Modal>
}
{/* context menu */}
{messageContextMenuData && (
<Box

View File

@@ -131,6 +131,10 @@ const DataCard = ({ kbId }: { kbId: string }) => {
type: 'text/csv',
filename: 'data.csv'
});
toast({
title: '导出成功,下次导出需要半小时后',
status: 'success'
});
} catch (error) {
error;
}
@@ -164,10 +168,10 @@ const DataCard = ({ kbId }: { kbId: string }) => {
mr={2}
size={'sm'}
isLoading={isLoadingExport}
title={'换行数据导出时,会进行格式转换'}
title={'半小时仅能导出1次'}
onClick={() => onclickExport()}
>
csv
</Button>
<Menu autoSelect={false}>
<MenuButton as={Button} size={'sm'}>

View File

@@ -6,7 +6,6 @@ import {
Flex,
Button,
Tooltip,
Image,
FormControl,
Input,
Tag,
@@ -18,13 +17,13 @@ import { useForm } from 'react-hook-form';
import { useQuery } from '@tanstack/react-query';
import { useUserStore } from '@/store/user';
import { delKbById, putKbById } from '@/api/plugins/kb';
import { useLoading } from '@/hooks/useLoading';
import { KbItemType } from '@/types/plugin';
import { useSelectFile } from '@/hooks/useSelectFile';
import { useConfirm } from '@/hooks/useConfirm';
import { compressImg } from '@/utils/file';
import DataCard from './DataCard';
import { getErrText } from '@/utils/tools';
import Avatar from '@/components/Avatar';
const Detail = ({ kbId }: { kbId: string }) => {
const { toast } = useToast();
@@ -58,7 +57,7 @@ const Detail = ({ kbId }: { kbId: string }) => {
},
onError(err: any) {
toast({
title: getErrText(err, '获取AI助手异常'),
title: getErrText(err, '获取知识库异常'),
status: 'error'
});
loadKbList(true);
@@ -95,6 +94,7 @@ const Detail = ({ kbId }: { kbId: string }) => {
id: kbId,
...data
});
await getKbDetail(kbId, true);
toast({
title: '更新成功',
status: 'success'
@@ -108,7 +108,7 @@ const Detail = ({ kbId }: { kbId: string }) => {
}
setBtnLoading(false);
},
[kbId, loadKbList, toast]
[getKbDetail, kbId, loadKbList, toast]
);
const saveSubmitError = useCallback(() => {
// deep search message
@@ -138,7 +138,7 @@ const Detail = ({ kbId }: { kbId: string }) => {
maxH: 100
});
setValue('avatar', base64);
loadKbList(true);
setRefresh((state) => !state);
} catch (err: any) {
toast({
title: typeof err === 'string' ? err : '头像选择异常',
@@ -146,7 +146,7 @@ const Detail = ({ kbId }: { kbId: string }) => {
});
}
},
[loadKbList, setValue, toast]
[setRefresh, setValue, toast]
);
return (
@@ -180,12 +180,10 @@ const Detail = ({ kbId }: { kbId: string }) => {
<Box flex={'0 0 60px'} w={0}>
</Box>
<Image
src={getValues('avatar') || '/icon/logo.png'}
alt={'avatar'}
<Avatar
src={getValues('avatar')}
w={['28px', '36px']}
h={['28px', '36px']}
objectFit={'cover'}
cursor={'pointer'}
title={'点击切换头像'}
onClick={onOpenSelectFile}

View File

@@ -30,7 +30,7 @@ const InputDataModal = ({
kbId: string;
defaultValues?: FormData;
}) => {
const [importing, setImporting] = useState(false);
const [loading, setLoading] = useState(false);
const { toast } = useToast();
const { register, handleSubmit, reset } = useForm<FormData>({
@@ -49,7 +49,7 @@ const InputDataModal = ({
});
return;
}
setImporting(true);
setLoading(true);
try {
const res = await postKbDataFromList({
@@ -78,7 +78,7 @@ const InputDataModal = ({
});
console.log(err);
}
setImporting(false);
setLoading(false);
},
[kbId, onSuccess, reset, toast]
);
@@ -88,16 +88,20 @@ const InputDataModal = ({
if (!e.dataId) return;
if (e.a !== defaultValues.a || e.q !== defaultValues.q) {
await putKbDataById({
dataId: e.dataId,
a: e.a,
q: e.q === defaultValues.q ? '' : e.q
});
onSuccess();
setLoading(true);
try {
await putKbDataById({
dataId: e.dataId,
a: e.a,
q: e.q === defaultValues.q ? '' : e.q
});
onSuccess();
} catch (error) {}
setLoading(false);
}
toast({
title: '修改回答成功',
title: '修改数据成功',
status: 'success'
});
onClose();
@@ -116,18 +120,18 @@ const InputDataModal = ({
maxW={'90vw'}
position={'relative'}
>
<ModalHeader></ModalHeader>
<ModalHeader>{defaultValues.dataId ? '变更数据' : '手动导入数据'}</ModalHeader>
<ModalCloseButton />
<Box
display={['block', 'flex']}
flex={'1 0 0'}
h={['100%', 0]}
overflowY={'auto'}
overflow={'overlay'}
px={6}
pb={2}
>
<Box flex={1} mr={[0, 4]} mb={[4, 0]} h={['230px', '100%']}>
<Box flex={1} mr={[0, 4]} mb={[4, 0]} h={['50%', '100%']}>
<Box h={'30px'}>{'匹配的知识点'}</Box>
<Textarea
placeholder={'匹配的知识点。这部分内容会被搜索,请把控内容的质量。总和最多 3000 字。'}
@@ -139,7 +143,7 @@ const InputDataModal = ({
})}
/>
</Box>
<Box flex={1} h={['330px', '100%']}>
<Box flex={1} h={['50%', '100%']}>
<Box h={'30px'}></Box>
<Textarea
placeholder={
@@ -159,10 +163,10 @@ const InputDataModal = ({
</Button>
<Button
isLoading={importing}
isLoading={loading}
onClick={handleSubmit(defaultValues.dataId ? updateData : sureImportData)}
>
{defaultValues.dataId ? '确认变更' : '确认导入'}
</Button>
</Flex>
</ModalContent>

View File

@@ -1,5 +1,5 @@
import React, { useCallback, useState, useMemo } from 'react';
import { Box, Flex, useTheme, Input, IconButton, Tooltip, Image, Tag } from '@chakra-ui/react';
import { Box, Flex, useTheme, Input, IconButton, Tooltip, Tag } from '@chakra-ui/react';
import { AddIcon } from '@chakra-ui/icons';
import { useRouter } from 'next/router';
import { postCreateKb } from '@/api/plugins/kb';
@@ -8,6 +8,7 @@ import { useToast } from '@/hooks/useToast';
import { useQuery } from '@tanstack/react-query';
import { useUserStore } from '@/store/user';
import MyIcon from '@/components/Icon';
import Avatar from '@/components/Avatar';
const KbList = ({ kbId }: { kbId: string }) => {
const theme = useTheme();
@@ -111,13 +112,7 @@ const KbList = ({ kbId }: { kbId: string }) => {
router.push(`/kb?kbId=${item._id}`);
}}
>
<Image
src={item.avatar || '/icon/logo.png'}
alt=""
w={'34px'}
maxH={'50px'}
objectFit={'contain'}
/>
<Avatar src={item.avatar} w={'34px'} h={'34px'} />
<Box flex={'1 0 0'} w={0} ml={3}>
<Box className="textEllipsis" color={'myGray.1000'}>
{item.name}

View File

@@ -65,7 +65,7 @@ const RegisterForm = ({ setPageType, loginSuccess }: Props) => {
});
// aut register a model
postCreateModel({
name: 'AI助手1'
name: '应用1'
});
} catch (error: any) {
toast({

View File

@@ -1,5 +1,5 @@
import React, { useCallback, useMemo, useState } from 'react';
import { Box, Flex, useTheme, Input, IconButton, Tooltip, Image } from '@chakra-ui/react';
import { Box, Flex, useTheme, Input, IconButton, Tooltip } from '@chakra-ui/react';
import { AddIcon } from '@chakra-ui/icons';
import { useRouter } from 'next/router';
import MyIcon from '@/components/Icon';
@@ -8,6 +8,7 @@ import { useLoading } from '@/hooks/useLoading';
import { useToast } from '@/hooks/useToast';
import { useQuery } from '@tanstack/react-query';
import { useUserStore } from '@/store/user';
import Avatar from '@/components/Avatar';
const ModelList = ({ modelId }: { modelId: string }) => {
const theme = useTheme();
@@ -23,7 +24,7 @@ const ModelList = ({ modelId }: { modelId: string }) => {
const onclickCreateModel = useCallback(async () => {
setIsLoading(true);
try {
const id = await postCreateModel({ name: `AI助手${myModels.length + 1}` });
const id = await postCreateModel({ name: `AI应用${myModels.length + 1}` });
toast({
title: '创建成功',
status: 'success'
@@ -94,7 +95,7 @@ const ModelList = ({ modelId }: { modelId: string }) => {
/>
)}
</Flex>
<Tooltip label={'新建一个AI助手'}>
<Tooltip label={'新建一个AI应用'}>
<IconButton
h={'32px'}
icon={<AddIcon />}
@@ -134,19 +135,13 @@ const ModelList = ({ modelId }: { modelId: string }) => {
router.push(`/model?modelId=${item._id}`);
}}
>
<Image
src={item.avatar || '/icon/logo.png'}
alt=""
w={'34px'}
maxH={'50px'}
objectFit={'contain'}
/>
<Avatar src={item.avatar} w={'34px'} h={'34px'} />
<Box flex={'1 0 0'} w={0} ml={3}>
<Box className="textEllipsis" color={'myGray.1000'}>
{item.name}
</Box>
<Box className="textEllipsis" color={'myGray.400'} fontSize={'sm'}>
{item.systemPrompt || '这个AI助手没有设置提示词~'}
{item.systemPrompt || '这个 应用 没有设置提示词~'}
</Box>
</Box>
</Flex>

View File

@@ -15,7 +15,6 @@ import {
Button,
Select,
Switch,
Image,
Modal,
ModalOverlay,
ModalContent,
@@ -51,6 +50,7 @@ import { formatTimeToChatTime, useCopyData, getErrText } from '@/utils/tools';
import MyIcon from '@/components/Icon';
import { useGlobalStore } from '@/store/global';
import { useUserStore } from '@/store/user';
import Avatar from '@/components/Avatar';
const ModelEditForm = ({
formHooks,
@@ -71,7 +71,7 @@ const ModelEditForm = ({
const { loadKbList } = useUserStore();
const { openConfirm, ConfirmChild } = useConfirm({
content: '确认删除该AI助手?'
content: '确认删除该应用?'
});
const { copyData } = useCopyData();
const { register, setValue, getValues } = formHooks;
@@ -189,13 +189,7 @@ ${e.password ? `密码为: ${e.password}` : ''}`;
onClick={() => router.push(`/kb?kbId=${item._id}`)}
>
<Flex alignItems={'center'}>
<Image
src={item.avatar}
fallbackSrc="/icon/logo.png"
w={'20px'}
h={'20px'}
alt=""
></Image>
<Avatar src={item.avatar} w={'20px'} h={'20px'}></Avatar>
<Box ml={3} fontWeight={'bold'}>
{item.name}
</Box>
@@ -222,12 +216,10 @@ ${e.password ? `密码为: ${e.password}` : ''}`;
<Box flex={'0 0 80px'} w={0}>
</Box>
<Image
src={getValues('avatar') || '/icon/logo.png'}
alt={'avatar'}
<Avatar
src={getValues('avatar')}
w={['28px', '36px']}
h={['28px', '36px']}
objectFit={'cover'}
cursor={isOwner ? 'pointer' : 'default'}
title={'点击切换头像'}
onClick={() => isOwner && onOpenSelectFile()}
@@ -283,14 +275,14 @@ ${e.password ? `密码为: ${e.password}` : ''}`;
</Flex>
{isOwner && (
<Flex mt={5} alignItems={'center'}>
<Box flex={'0 0 100px'}>AI助手</Box>
<Box flex={'0 0 100px'}></Box>
<Button
colorScheme={'gray'}
variant={'outline'}
size={'sm'}
onClick={openConfirm(handleDelModel)}
>
AI助手
</Button>
</Flex>
)}
@@ -366,7 +358,9 @@ ${e.password ? `密码为: ${e.password}` : ''}`;
rows={8}
maxLength={-1}
isDisabled={!isOwner}
placeholder={'模型默认的 prompt 词,通过调整该内容,可以引导模型聊天方向。'}
placeholder={
'模型默认的 prompt 词,通过调整该内容,可以引导模型聊天方向。\n\n如果使用了知识库搜索没有填写该内容时系统会自动补充提示词如果填写了内容则以填写的内容为准。'
}
{...register('chat.systemPrompt')}
/>
</Box>
@@ -621,13 +615,7 @@ ${e.password ? `密码为: ${e.password}` : ''}`;
}}
>
<Flex alignItems={'center'}>
<Image
src={item.avatar}
fallbackSrc="/icon/logo.png"
w={'20px'}
h={'20px'}
alt=""
></Image>
<Avatar src={item.avatar} w={'20px'} h={'20px'} />
<Box ml={3} fontWeight={'bold'}>
{item.name}
</Box>

View File

@@ -29,7 +29,7 @@ const ModelDetail = ({ modelId, isPc }: { modelId: string; isPc: boolean }) => {
},
onError(err: any) {
toast({
title: err?.message || '获取AI助手异常',
title: err?.message || '获取应用异常',
status: 'error'
});
setLastModelId('');

View File

@@ -1,9 +1,10 @@
import React from 'react';
import { Box, Flex, Image, Button } from '@chakra-ui/react';
import { Box, Flex, Button, Tooltip } from '@chakra-ui/react';
import type { ShareModelItem } from '@/types/model';
import { useRouter } from 'next/router';
import MyIcon from '@/components/Icon';
import styles from '../index.module.scss';
import Avatar from '@/components/Avatar';
const ShareModelList = ({
models = [],
@@ -27,27 +28,29 @@ const ShareModelList = ({
borderRadius={'md'}
>
<Flex alignItems={'center'}>
<Image
<Avatar
src={model.avatar}
alt={'avatar'}
w={['28px', '36px']}
h={['28px', '36px']}
objectFit={'cover'}
borderRadius={'50%'}
/>
<Box fontWeight={'bold'} fontSize={'lg'} ml={5}>
{model.name}
</Box>
</Flex>
<Box
flex={1}
className={styles.intro}
my={4}
fontSize={'sm'}
wordBreak={'break-all'}
color={'blackAlpha.600'}
>
{model.share.intro || '这个AI助手还没有介绍~'}
</Box>
<Tooltip label={model.share.intro}>
<Box
className={styles.intro}
flex={1}
my={4}
fontSize={'sm'}
wordBreak={'break-all'}
color={'blackAlpha.600'}
>
{model.share.intro || '这个 应用 还没有介绍~'}
</Box>
</Tooltip>
<Flex justifyContent={'space-between'}>
<Flex
alignItems={'center'}
@@ -66,21 +69,11 @@ const ShareModelList = ({
<Button
size={'sm'}
variant={'outline'}
w={['60px', '80px']}
w={['60px', '70px']}
onClick={() => router.push(`/chat?modelId=${model._id}`)}
>
</Button>
{model.share.isShareDetail && (
<Button
ml={4}
size={'sm'}
w={['60px', '80px']}
onClick={() => router.push(`/model?modelId=${model._id}`)}
>
</Button>
)}
</Box>
</Flex>
</Flex>

View File

@@ -1,12 +1,11 @@
import React, { useState, useRef, useCallback, useMemo } from 'react';
import React, { useState, useRef, useCallback } from 'react';
import { Box, Flex, Card, Grid, Input } from '@chakra-ui/react';
import { useLoading } from '@/hooks/useLoading';
import { getShareModelList, triggerModelCollection, getCollectionModels } from '@/api/model';
import { getShareModelList, triggerModelCollection } from '@/api/model';
import { usePagination } from '@/hooks/usePagination';
import type { ShareModelItem } from '@/types/model';
import { useUserStore } from '@/store/user';
import ShareModelList from './components/list';
import { useQuery } from '@tanstack/react-query';
const modelList = () => {
const { Loading } = useLoading();
@@ -15,7 +14,13 @@ const modelList = () => {
const { refreshModel } = useUserStore();
/* 加载模型 */
const { data, isLoading, Pagination, getData, pageNum } = usePagination<ShareModelItem>({
const {
data: models,
isLoading,
Pagination,
getData,
pageNum
} = usePagination<ShareModelItem>({
api: getShareModelList,
pageSize: 24,
params: {
@@ -23,65 +28,32 @@ const modelList = () => {
}
});
const { data: collectionModels = [], refetch: refetchCollection } = useQuery(
['getCollectionModels'],
getCollectionModels
);
const models = useMemo(() => {
if (!collectionModels) return [];
return data.map((model) => ({
...model,
isCollection: !!collectionModels.find((item) => item._id === model._id)
}));
}, [collectionModels, data]);
const onclickCollection = useCallback(
async (modelId: string) => {
try {
await triggerModelCollection(modelId);
getData(pageNum);
refetchCollection();
refreshModel.removeModelDetail(modelId);
} catch (error) {
console.log(error);
}
},
[getData, pageNum, refetchCollection, refreshModel]
[getData, pageNum, refreshModel]
);
return (
<Box py={[5, 10]} px={'5vw'}>
<Card px={6} py={3}>
<Flex alignItems={'center'} justifyContent={'space-between'}>
<Box fontWeight={'bold'} fontSize={'xl'}>
AI助手
</Box>
</Flex>
{collectionModels.length == 0 && (
<Box textAlign={'center'} pt={3}>
AI助手~
</Box>
)}
<Grid templateColumns={['1fr', '1fr 1fr', '1fr 1fr 1fr']} gridGap={4} mt={4}>
<ShareModelList models={collectionModels} onclickCollection={onclickCollection} />
</Grid>
</Card>
<Card mt={5} px={6} py={3}>
<Box display={['block', 'flex']} alignItems={'center'} justifyContent={'space-between'}>
<Box fontWeight={'bold'} flex={1} fontSize={'xl'}>
AI助手市
<Box as={'span'} fontWeight={'normal'} fontSize={'md'}>
(Beta)
</Box>
</Box>
<Box mt={[2, 0]} textAlign={'right'}>
<Input
maxW={'240px'}
w={['200px', '250px']}
size={'sm'}
value={searchText}
placeholder="搜索AI助手,回车确认"
placeholder="搜索应用,回车确认"
onChange={(e) => setSearchText(e.target.value)}
onBlur={() => {
if (searchText === lastSearch.current) return;
@@ -98,7 +70,17 @@ const modelList = () => {
/>
</Box>
</Box>
<Grid templateColumns={['1fr', '1fr 1fr', '1fr 1fr 1fr']} gridGap={4} mt={4}>
<Grid
templateColumns={[
'repeat(1,1fr)',
'repeat(2,1fr)',
'repeat(3,1fr)',
'repeat(4,1fr)',
'repeat(5,1fr)'
]}
gridGap={4}
mt={4}
>
<ShareModelList models={models} onclickCollection={onclickCollection} />
</Grid>
<Flex mt={4} justifyContent={'flex-end'}>

View File

@@ -1,5 +1,5 @@
import React, { useCallback, useState } from 'react';
import { Card, Box, Flex, Button, Input, Image } from '@chakra-ui/react';
import { Card, Box, Flex, Button, Input } from '@chakra-ui/react';
import { useForm } from 'react-hook-form';
import { UserUpdateParams } from '@/types/user';
import { putUserInfo } from '@/api/user';
@@ -14,6 +14,7 @@ import dynamic from 'next/dynamic';
import { useSelectFile } from '@/hooks/useSelectFile';
import { compressImg } from '@/utils/file';
import Loading from '@/components/Loading';
import Avatar from '@/components/Avatar';
const PayRecordTable = dynamic(() => import('./components/PayRecordTable'), {
loading: () => <Loading fixed={false} />,
@@ -106,12 +107,10 @@ const NumberSetting = () => {
</Flex>
<Flex mt={6} alignItems={'center'}>
<Box flex={'0 0 50px'}>:</Box>
<Image
<Avatar
src={userInfo?.avatar}
alt={'avatar'}
w={['28px', '36px']}
maxH={'40px'}
objectFit={'contain'}
h={['28px', '36px']}
cursor={'pointer'}
title={'点击切换头像'}
onClick={onOpenSelectFile}

View File

@@ -11,8 +11,8 @@ const list = [
link: '/kb'
},
{
icon: 'shareMarket',
label: 'AI助手市场',
icon: 'appStore',
label: 'AI应用市场',
link: '/model/share'
},
{

110
src/service/api/request.ts Normal file
View File

@@ -0,0 +1,110 @@
import axios, { Method, InternalAxiosRequestConfig, AxiosResponse } from 'axios';
interface ConfigType {
headers?: { [key: string]: string };
hold?: boolean;
}
interface ResponseDataType {
code: number;
message: string;
data: any;
}
/**
* 请求开始
*/
function requestStart(config: InternalAxiosRequestConfig): InternalAxiosRequestConfig {
if (config.headers) {
config.headers.rootkey = process.env.ROOT_KEY;
}
return config;
}
/**
* 请求成功,检查请求头
*/
function responseSuccess(response: AxiosResponse<ResponseDataType>) {
return response;
}
/**
* 响应数据检查
*/
function checkRes(data: ResponseDataType) {
if (data === undefined) {
return Promise.reject('服务器异常');
} else if (data.code < 200 || data.code >= 400) {
return Promise.reject(data);
}
return data.data;
}
/**
* 响应错误
*/
function responseError(err: any) {
if (!err) {
return Promise.reject({ message: '未知错误' });
}
if (typeof err === 'string') {
return Promise.reject({ message: err });
}
return Promise.reject(err);
}
/* 创建请求实例 */
const instance = axios.create({
timeout: 60000, // 超时时间
headers: {
'content-type': 'application/json'
}
});
/* 请求拦截 */
instance.interceptors.request.use(requestStart, (err) => Promise.reject(err));
/* 响应拦截 */
instance.interceptors.response.use(responseSuccess, (err) => Promise.reject(err));
function request(url: string, data: any, config: ConfigType, method: Method): any {
/* 去空 */
for (const key in data) {
if (data[key] === null || data[key] === undefined) {
delete data[key];
}
}
return instance
.request({
baseURL: `http://localhost:${process.env.PORT || 3000}/api`,
url,
method,
data: method === 'GET' ? null : data,
params: method === 'GET' ? data : null, // get请求不携带dataparams放在url上
...config // 用户自定义配置,可以覆盖前面的配置
})
.then((res) => checkRes(res.data))
.catch((err) => responseError(err));
}
/**
* api请求方式
* @param {String} url
* @param {Any} params
* @param {Object} config
* @returns
*/
export function GET<T>(url: string, params = {}, config: ConfigType = {}): Promise<T> {
return request(url, params, config, 'GET');
}
export function POST<T>(url: string, data = {}, config: ConfigType = {}): Promise<T> {
return request(url, data, config, 'POST');
}
export function PUT<T>(url: string, data = {}, config: ConfigType = {}): Promise<T> {
return request(url, data, config, 'PUT');
}
export function DELETE<T>(url: string, config: ConfigType = {}): Promise<T> {
return request(url, {}, config, 'DELETE');
}

5
src/service/api/text.ts Normal file
View File

@@ -0,0 +1,5 @@
import { POST } from './request';
import type { TextPluginRequestParams } from '@/types/plugin';
export const sensitiveCheck = (data: TextPluginRequestParams) =>
POST('/openapi/text/sensitiveCheck', data);

View File

@@ -4,7 +4,7 @@ import { OpenAiChatEnum } from '@/constants/model';
import { pushSplitDataBill } from '@/service/events/pushBill';
import { generateVector } from './generateVector';
import { openaiError2 } from '../errorCode';
import { PgClient } from '@/service/pg';
import { insertKbItem } from '@/service/pg';
import { SplitDataSchema } from '@/types/mongoSchema';
import { modelServiceToolMap } from '../utils/chat';
import { ChatRoleEnum } from '@/constants/chat';
@@ -133,14 +133,10 @@ A2:
textList: dataItem.textList.slice(0, -5)
}),
// 生成的内容插入 pg
PgClient.insert('modelData', {
values: resultList.map((item) => [
{ key: 'user_id', value: dataItem.userId },
{ key: 'kb_id', value: dataItem.kbId },
{ key: 'q', value: item.q },
{ key: 'a', value: item.a },
{ key: 'status', value: 'waiting' }
])
insertKbItem({
userId: dataItem.userId,
kbId: dataItem.kbId,
data: resultList
})
]);
console.log('生成QA成功time:', `${(Date.now() - startTime) / 1000}s`);

View File

@@ -1,8 +1,8 @@
import { openaiCreateEmbedding } from '../utils/chat/openai';
import { getApiKey } from '../utils/auth';
import { openaiError2 } from '../errorCode';
import { PgClient } from '@/service/pg';
import { getErrText } from '@/utils/tools';
import { openaiEmbedding } from '@/pages/api/openapi/plugin/openaiEmbedding';
export async function generateVector(next = false): Promise<any> {
if (process.env.queueTask !== '1') {
@@ -42,24 +42,20 @@ export async function generateVector(next = false): Promise<any> {
dataId = dataItem.id;
// 获取 openapi Key
let userOpenAiKey;
try {
const res = await getApiKey({ model: 'gpt-3.5-turbo', userId: dataItem.userId });
userOpenAiKey = res.userOpenAiKey;
await getApiKey({ model: 'gpt-3.5-turbo', userId: dataItem.userId });
} catch (err: any) {
await PgClient.delete('modelData', {
where: [['id', dataId]]
});
generateVector(true);
getErrText(err, '获取 OpenAi Key 失败');
return;
return generateVector(true);
}
// 生成词向量
const { vectors } = await openaiCreateEmbedding({
textArr: [dataItem.q],
userId: dataItem.userId,
userOpenAiKey
const vectors = await openaiEmbedding({
input: [dataItem.q],
userId: dataItem.userId
});
// 更新 pg 向量和状态数据

View File

@@ -47,6 +47,29 @@ const ChatSchema = new Schema({
type: String,
required: true
},
quote: {
type: [
{
id: {
type: String,
required: true
},
q: {
type: String,
default: ''
},
a: {
type: String,
default: ''
},
isEdit: {
type: String,
default: false
}
}
],
default: []
},
systemPrompt: {
type: String,
default: ''

View File

@@ -16,6 +16,10 @@ const UserSchema = new Schema({
get: (val: string) => hashPassword(val),
select: false
},
createTime: {
type: Date,
default: () => new Date()
},
avatar: {
type: String,
default: '/icon/human.png'
@@ -41,9 +45,11 @@ const UserSchema = new Schema({
type: String,
default: ''
},
createTime: {
type: Date,
default: () => new Date()
limit: {
exportKbTime: {
// Every half hour
type: Date
}
}
});

View File

@@ -156,3 +156,29 @@ class Pg {
}
export const PgClient = new Pg();
/**
* data insert kb
*/
export const insertKbItem = ({
userId,
kbId,
data
}: {
userId: string;
kbId: string;
data: {
q: string;
a: string;
}[];
}) => {
return PgClient.insert('modelData', {
values: data.map((item) => [
{ key: 'user_id', value: userId },
{ key: 'kb_id', value: kbId },
{ key: 'q', value: item.q },
{ key: 'a', value: item.a },
{ key: 'status', value: 'waiting' }
])
});
};

View File

@@ -1,175 +0,0 @@
import { PgClient } from '@/service/pg';
import { ModelDataStatusEnum, ModelVectorSearchModeEnum, ChatModelMap } from '@/constants/model';
import { ModelSchema } from '@/types/mongoSchema';
import { openaiCreateEmbedding } from '../utils/chat/openai';
import { ChatRoleEnum } from '@/constants/chat';
import { modelToolMap } from '@/utils/chat';
import { ChatItemSimpleType } from '@/types/chat';
/**
* use openai embedding search kb
*/
export const searchKb = async ({
userOpenAiKey,
prompts,
similarity = 0.2,
model,
userId
}: {
userOpenAiKey?: string;
prompts: ChatItemSimpleType[];
model: ModelSchema;
userId: string;
similarity?: number;
}): Promise<{
code: 200 | 201;
searchPrompts: {
obj: ChatRoleEnum;
value: string;
}[];
}> => {
async function search(textArr: string[] = []) {
const limitMap: Record<ModelVectorSearchModeEnum, number> = {
[ModelVectorSearchModeEnum.hightSimilarity]: 15,
[ModelVectorSearchModeEnum.noContext]: 15,
[ModelVectorSearchModeEnum.lowSimilarity]: 20
};
// 获取提示词的向量
const { vectors: promptVectors } = await openaiCreateEmbedding({
userOpenAiKey,
userId,
textArr
});
const searchRes = await Promise.all(
promptVectors.map((promptVector) =>
PgClient.select<{ id: string; q: string; a: string }>('modelData', {
fields: ['id', 'q', 'a'],
where: [
['status', ModelDataStatusEnum.ready],
'AND',
`kb_id IN (${model.chat.relatedKbs.map((item) => `'${item}'`).join(',')})`,
'AND',
`vector <=> '[${promptVector}]' < ${similarity}`
],
order: [{ field: 'vector', mode: `<=> '[${promptVector}]'` }],
limit: limitMap[model.chat.searchMode]
}).then((res) => res.rows)
)
);
// Remove repeat record
const idSet = new Set<string>();
const filterSearch = searchRes.map((search) =>
search.filter((item) => {
if (idSet.has(item.id)) {
return false;
}
idSet.add(item.id);
return true;
})
);
return filterSearch.map((item) => item.map((item) => `${item.q}\n${item.a}`).join('\n'));
}
const modelConstantsData = ChatModelMap[model.chat.chatModel];
// search three times
const userPrompts = prompts.filter((item) => item.obj === 'Human');
const searchArr: string[] = [
userPrompts[userPrompts.length - 1].value,
userPrompts[userPrompts.length - 2]?.value
].filter((item) => item);
const systemPrompts = await search(searchArr);
// filter system prompts.
const filterRateMap: Record<number, number[]> = {
1: [1],
2: [0.7, 0.3]
};
const filterRate = filterRateMap[systemPrompts.length] || filterRateMap[0];
// 计算固定提示词的 token 数量
const fixedPrompts = [
...(model.chat.systemPrompt
? [
{
obj: ChatRoleEnum.System,
value: model.chat.systemPrompt
}
]
: []),
...(model.chat.searchMode === ModelVectorSearchModeEnum.noContext
? [
{
obj: ChatRoleEnum.System,
value: `知识库是关于"${model.name}"的内容,根据知识库内容回答问题.`
}
]
: [
{
obj: ChatRoleEnum.System,
value: `玩一个问答游戏,规则为:
1.你完全忘记你已有的知识
2.你只回答关于"${model.name}"的问题
3.你只从知识库中选择内容进行回答
4.如果问题不在知识库中,你会回答:"我不知道。"
请务必遵守规则`
}
])
];
const fixedSystemTokens = modelToolMap[model.chat.chatModel].countTokens({
messages: fixedPrompts
});
const maxTokens = modelConstantsData.systemMaxToken - fixedSystemTokens;
const filterSystemPrompt = filterRate
.map((rate, i) =>
modelToolMap[model.chat.chatModel].sliceText({
text: systemPrompts[i],
length: Math.floor(maxTokens * rate)
})
)
.join('\n')
.trim();
/* 高相似度+不回复 */
if (!filterSystemPrompt && model.chat.searchMode === ModelVectorSearchModeEnum.hightSimilarity) {
return {
code: 201,
searchPrompts: [
{
obj: ChatRoleEnum.System,
value: '对不起,你的问题不在知识库中。'
}
]
};
}
/* 高相似度+无上下文,不添加额外知识,仅用系统提示词 */
if (!filterSystemPrompt && model.chat.searchMode === ModelVectorSearchModeEnum.noContext) {
return {
code: 200,
searchPrompts: model.chat.systemPrompt
? [
{
obj: ChatRoleEnum.System,
value: model.chat.systemPrompt
}
]
: []
};
}
/* 有匹配 */
return {
code: 200,
searchPrompts: [
{
obj: ChatRoleEnum.System,
value: `知识库:${filterSystemPrompt}`
},
...fixedPrompts
]
};
};

View File

@@ -11,39 +11,42 @@ import { ERROR_ENUM } from '../errorCode';
import { ChatModelType, OpenAiChatEnum } from '@/constants/model';
import { hashPassword } from '@/service/utils/tools';
export const parseCookie = (cookie?: string): Promise<string> => {
return new Promise((resolve, reject) => {
// 获取 cookie
const cookies = Cookie.parse(cookie || '');
const token = cookies.token;
if (!token) {
return reject(ERROR_ENUM.unAuthorization);
}
const key = process.env.TOKEN_KEY as string;
jwt.verify(token, key, function (err, decoded: any) {
if (err || !decoded?.userId) {
reject(ERROR_ENUM.unAuthorization);
return;
}
resolve(decoded.userId);
});
});
};
/* uniform auth user */
export const authUser = async ({
req,
authToken = false,
authOpenApi = false,
authRoot = false
authRoot = false,
authBalance = false
}: {
req: NextApiRequest;
authToken?: boolean;
authOpenApi?: boolean;
authRoot?: boolean;
authBalance?: boolean;
}) => {
const parseCookie = (cookie?: string): Promise<string> => {
return new Promise((resolve, reject) => {
// 获取 cookie
const cookies = Cookie.parse(cookie || '');
const token = cookies.token;
if (!token) {
return reject(ERROR_ENUM.unAuthorization);
}
const key = process.env.TOKEN_KEY as string;
jwt.verify(token, key, function (err, decoded: any) {
if (err || !decoded?.userId) {
reject(ERROR_ENUM.unAuthorization);
return;
}
resolve(decoded.userId);
});
});
};
const parseOpenApiKey = async (apiKey?: string) => {
if (!apiKey) {
return Promise.reject(ERROR_ENUM.unAuthorization);
@@ -66,8 +69,8 @@ export const authUser = async ({
return Promise.reject(error);
}
};
const parseRootKey = async (rootKey?: string, userId?: string) => {
if (!rootKey || !userId || !process.env.ROOT_KEY || rootKey !== process.env.ROOT_KEY) {
const parseRootKey = async (rootKey?: string, userId = '') => {
if (!rootKey || !process.env.ROOT_KEY || rootKey !== process.env.ROOT_KEY) {
return Promise.reject(ERROR_ENUM.unAuthorization);
}
return userId;
@@ -98,13 +101,24 @@ export const authUser = async ({
return Promise.reject(ERROR_ENUM.unAuthorization);
}
if (authBalance) {
const user = await User.findById(uid);
if (!user) {
return Promise.reject(ERROR_ENUM.unAuthorization);
}
if (!user.openaiKey && formatPrice(user.balance) <= 0) {
return Promise.reject(ERROR_ENUM.insufficientQuota);
}
}
return {
userId: uid
};
};
/* random get openai api key */
export const getOpenAiKey = () => {
export const getSystemOpenAiKey = () => {
// 纯字符串类型
const keys = process.env.OPENAIKEY?.split(',') || [];
const i = Math.floor(Math.random() * keys.length);
@@ -129,7 +143,7 @@ export const getApiKey = async ({
const keyMap = {
[OpenAiChatEnum.GPT35]: {
userOpenAiKey: user.openaiKey || '',
systemAuthKey: getOpenAiKey() as string
systemAuthKey: getSystemOpenAiKey() as string
},
[OpenAiChatEnum.GPT4]: {
userOpenAiKey: user.openaiKey || '',
@@ -225,7 +239,7 @@ export const authChat = async ({
req
}: {
modelId: string;
chatId: '' | string;
chatId?: string;
req: NextApiRequest;
}) => {
const { userId } = await authUser({ req, authToken: true });

View File

@@ -1,17 +1,9 @@
import { ChatCompletionType, StreamResponseType } from './index';
import { ChatRoleEnum } from '@/constants/chat';
import axios from 'axios';
import mongoose from 'mongoose';
import { NEW_CHATID_HEADER } from '@/constants/chat';
/* 模型对话 */
export const claudChat = async ({ apiKey, messages, stream, chatId, res }: ChatCompletionType) => {
const conversationId = chatId || String(new mongoose.Types.ObjectId());
// create a new chat
!chatId &&
messages.filter((item) => item.obj === 'Human').length === 1 &&
res?.setHeader(NEW_CHATID_HEADER, conversationId);
export const claudChat = async ({ apiKey, messages, stream, chatId }: ChatCompletionType) => {
// get system prompt
const systemPrompt = messages
.filter((item) => item.obj === 'System')
@@ -26,7 +18,7 @@ export const claudChat = async ({ apiKey, messages, stream, chatId, res }: ChatC
{
prompt,
stream,
conversationId
conversationId: chatId
},
{
headers: {
@@ -55,8 +47,7 @@ export const claudStreamResponse = async ({ res, chatResponse, prompts }: Stream
try {
const decoder = new TextDecoder();
for await (const chunk of chatResponse.data as any) {
if (!res.writable) {
// 流被中断了,直接忽略后面的内容
if (res.closed) {
break;
}
const content = decoder.decode(chunk);

View File

@@ -1,7 +1,7 @@
import { ChatItemSimpleType } from '@/types/chat';
import { modelToolMap } from '@/utils/chat';
import { modelToolMap } from '@/utils/plugin';
import type { ChatModelType } from '@/constants/model';
import { ChatRoleEnum, SYSTEM_PROMPT_HEADER } from '@/constants/chat';
import { ChatRoleEnum } from '@/constants/chat';
import { OpenAiChatEnum, ClaudeEnum } from '@/constants/model';
import { chatResponse, openAiStreamResponse } from './openai';
import { claudChat, claudStreamResponse } from './claude';
@@ -11,6 +11,7 @@ export type ChatCompletionType = {
apiKey: string;
temperature: number;
messages: ChatItemSimpleType[];
chatId?: string;
[key: string]: any;
};
export type ChatCompletionResponseType = {
@@ -23,7 +24,6 @@ export type StreamResponseType = {
chatResponse: any;
prompts: ChatItemSimpleType[];
res: NextApiResponse;
systemPrompt?: string;
[key: string]: any;
};
export type StreamResponseReturnType = {
@@ -129,7 +129,6 @@ export const resStreamResponse = async ({
model,
res,
chatResponse,
systemPrompt,
prompts
}: StreamResponseType & {
model: ChatModelType;
@@ -139,18 +138,14 @@ export const resStreamResponse = async ({
res.setHeader('Access-Control-Allow-Origin', '*');
res.setHeader('X-Accel-Buffering', 'no');
res.setHeader('Cache-Control', 'no-cache, no-transform');
systemPrompt && res.setHeader(SYSTEM_PROMPT_HEADER, encodeURIComponent(systemPrompt));
const { responseContent, totalTokens, finishMessages } = await modelServiceToolMap[
model
].streamResponse({
chatResponse,
prompts,
res,
systemPrompt
res
});
res.end();
return { responseContent, totalTokens, finishMessages };
};

View File

@@ -1,67 +1,18 @@
import { Configuration, OpenAIApi } from 'openai';
import { createParser, ParsedEvent, ReconnectInterval } from 'eventsource-parser';
import { axiosConfig } from '../tools';
import { ChatModelMap, embeddingModel, OpenAiChatEnum } from '@/constants/model';
import { pushGenerateVectorBill } from '../../events/pushBill';
import { adaptChatItem_openAI } from '@/utils/chat/openai';
import { modelToolMap } from '@/utils/chat';
import { ChatModelMap, OpenAiChatEnum } from '@/constants/model';
import { adaptChatItem_openAI } from '@/utils/plugin/openai';
import { modelToolMap } from '@/utils/plugin';
import { ChatCompletionType, ChatContextFilter, StreamResponseType } from './index';
import { ChatRoleEnum } from '@/constants/chat';
import { getOpenAiKey } from '../auth';
export const getOpenAIApi = (apiKey: string) => {
const configuration = new Configuration({
apiKey,
basePath: process.env.OPENAI_BASE_URL
});
return new OpenAIApi(configuration);
};
/* 获取向量 */
export const openaiCreateEmbedding = async ({
userOpenAiKey,
userId,
textArr
}: {
userOpenAiKey?: string;
userId: string;
textArr: string[];
}) => {
const systemAuthKey = getOpenAiKey();
// 获取 chatAPI
const chatAPI = getOpenAIApi(userOpenAiKey || systemAuthKey);
// 把输入的内容转成向量
const res = await chatAPI
.createEmbedding(
{
model: embeddingModel,
input: textArr
},
{
timeout: 60000,
...axiosConfig()
}
)
.then((res) => ({
tokenLen: res.data.usage.total_tokens || 0,
vectors: res.data.data.map((item) => item.embedding)
}));
pushGenerateVectorBill({
isPay: !userOpenAiKey,
userId,
text: textArr.join(''),
tokenLen: res.tokenLen
});
return {
vectors: res.vectors,
chatAPI
};
};
export const getOpenAIApi = () =>
new OpenAIApi(
new Configuration({
basePath: process.env.OPENAI_BASE_URL || 'https://api.openai.com/v1'
})
);
/* 模型对话 */
export const chatResponse = async ({
@@ -78,7 +29,7 @@ export const chatResponse = async ({
});
const adaptMessages = adaptChatItem_openAI({ messages: filterMessages });
const chatAPI = getOpenAIApi(apiKey);
const chatAPI = getOpenAIApi();
const response = await chatAPI.createChatCompletion(
{
@@ -93,7 +44,7 @@ export const chatResponse = async ({
{
timeout: stream ? 60000 : 240000,
responseType: stream ? 'stream' : 'json',
...axiosConfig()
...axiosConfig(apiKey)
}
);
@@ -129,7 +80,7 @@ export const openAiStreamResponse = async ({
const content: string = json?.choices?.[0].delta.content || '';
responseContent += content;
res.writable && content && res.write(content);
!res.closed && content && res.write(content);
} catch (error) {
error;
}
@@ -139,8 +90,7 @@ export const openAiStreamResponse = async ({
const decoder = new TextDecoder();
const parser = createParser(onParse);
for await (const chunk of chatResponse.data as any) {
if (!res.writable) {
// 流被中断了,直接忽略后面的内容
if (res.closed) {
break;
}
parser.feed(decoder.decode(chunk, { stream: true }));

View File

@@ -31,9 +31,11 @@ export const clearCookie = (res: NextApiResponse) => {
};
/* openai axios config */
export const axiosConfig = () => ({
export const axiosConfig = (apikey: string) => ({
baseURL: process.env.OPENAI_BASE_URL || 'https://api.openai.com/v1',
httpsAgent: global.httpsAgent,
headers: {
Authorization: `Bearer ${apikey}`,
auth: process.env.OPENAI_BASE_URL_AUTH || ''
}
});

3
src/types/chat.d.ts vendored
View File

@@ -1,11 +1,14 @@
import { ChatRoleEnum } from '@/constants/chat';
import type { InitChatResponse, InitShareChatResponse } from '@/api/response/chat';
import { QuoteItemType } from '@/pages/api/openapi/kb/appKbSearch';
export type ExportChatType = 'md' | 'pdf' | 'html';
export type ChatItemSimpleType = {
obj: `${ChatRoleEnum}`;
value: string;
quoteLen?: number;
quote?: QuoteItemType[];
systemPrompt?: string;
};
export type ChatItemType = {

View File

@@ -22,6 +22,9 @@ export interface UserModelSchema {
promotion: {
rate: number;
};
limit: {
exportKbTime?: Date;
};
}
export interface AuthCodeSchema {

20
src/types/plugin.d.ts vendored
View File

@@ -1,10 +1,12 @@
import type { kbSchema } from './mongoSchema';
import { PluginTypeEnum } from '@/constants/plugin';
/* kb type */
export interface KbItemType extends kbSchema {
totalData: number;
tags: string;
}
export interface KbDataItemType {
id: string;
status: 'waiting' | 'ready';
@@ -13,3 +15,21 @@ export interface KbDataItemType {
kbId: string;
userId: string;
}
/* plugin */
export interface PluginConfig {
name: string;
desc: string;
url: string;
category: `${PluginTypeEnum}`;
uniPrice: 22; // 1k token
params: [
{
type: '';
}
];
}
export type TextPluginRequestParams = {
input: string;
};

View File

@@ -1,3 +0,0 @@
export const ClaudeSliceTextByToken = ({ text, length }: { text: string; length: number }) => {
return text.slice(0, length);
};

View File

@@ -1,6 +1,6 @@
import mammoth from 'mammoth';
import Papa from 'papaparse';
import { getOpenAiEncMap } from './chat/openai';
import { getOpenAiEncMap } from './plugin/openai';
/**
* 读取 txt 文件内容

View File

@@ -2,29 +2,37 @@ import { ClaudeEnum, OpenAiChatEnum } from '@/constants/model';
import type { ChatModelType } from '@/constants/model';
import type { ChatItemSimpleType } from '@/types/chat';
import { countOpenAIToken, openAiSliceTextByToken } from './openai';
import { ClaudeSliceTextByToken } from './claude';
import { gpt_chatItemTokenSlice } from '@/pages/api/openapi/text/gptMessagesSlice';
export const modelToolMap: Record<
ChatModelType,
{
countTokens: (data: { messages: ChatItemSimpleType[] }) => number;
sliceText: (data: { text: string; length: number }) => string;
tokenSlice: (data: {
messages: ChatItemSimpleType[];
maxToken: number;
}) => ChatItemSimpleType[];
}
> = {
[OpenAiChatEnum.GPT35]: {
countTokens: ({ messages }) => countOpenAIToken({ model: OpenAiChatEnum.GPT35, messages }),
sliceText: (data) => openAiSliceTextByToken({ model: OpenAiChatEnum.GPT35, ...data })
sliceText: (data) => openAiSliceTextByToken({ model: OpenAiChatEnum.GPT35, ...data }),
tokenSlice: (data) => gpt_chatItemTokenSlice({ model: OpenAiChatEnum.GPT35, ...data })
},
[OpenAiChatEnum.GPT4]: {
countTokens: ({ messages }) => countOpenAIToken({ model: OpenAiChatEnum.GPT4, messages }),
sliceText: (data) => openAiSliceTextByToken({ model: OpenAiChatEnum.GPT4, ...data })
sliceText: (data) => openAiSliceTextByToken({ model: OpenAiChatEnum.GPT4, ...data }),
tokenSlice: (data) => gpt_chatItemTokenSlice({ model: OpenAiChatEnum.GPT4, ...data })
},
[OpenAiChatEnum.GPT432k]: {
countTokens: ({ messages }) => countOpenAIToken({ model: OpenAiChatEnum.GPT432k, messages }),
sliceText: (data) => openAiSliceTextByToken({ model: OpenAiChatEnum.GPT432k, ...data })
sliceText: (data) => openAiSliceTextByToken({ model: OpenAiChatEnum.GPT432k, ...data }),
tokenSlice: (data) => gpt_chatItemTokenSlice({ model: OpenAiChatEnum.GPT432k, ...data })
},
[ClaudeEnum.Claude]: {
countTokens: ({ messages }) => countOpenAIToken({ model: OpenAiChatEnum.GPT35, messages }),
sliceText: (data) => openAiSliceTextByToken({ model: OpenAiChatEnum.GPT35, ...data })
sliceText: (data) => openAiSliceTextByToken({ model: OpenAiChatEnum.GPT35, ...data }),
tokenSlice: (data) => gpt_chatItemTokenSlice({ model: OpenAiChatEnum.GPT35, ...data })
}
};

View File

@@ -126,3 +126,10 @@ export const getErrText = (err: any, def = '') => {
msg && console.log('error =>', msg);
return msg;
};
export const delay = (ms: number) =>
new Promise((resolve) => {
setTimeout(() => {
resolve('');
}, ms);
});